target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void testFetchAllByYear_2015() { log.info("*** BEGIN Test ***"); Integer year = 2015; List<SeasonData> results = classUnderTest.fetchAllByYear(year); assertEquals(68, results.size()); log.info("*** END Test ***"); }
@Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchAllByYear_2016() { log.info("*** BEGIN Test ***"); Integer year = 2016; List<SeasonData> results = classUnderTest.fetchAllByYear(year); assertEquals(68, results.size()); log.info("*** END Test ***"); }
@Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchAllByYear_2017() { log.info("*** BEGIN Test ***"); Integer year = 2017; List<SeasonData> results = classUnderTest.fetchAllByYear(year); assertEquals(68, results.size()); log.info("*** END Test ***"); }
@Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2011() { log.info("*** BEGIN Test ***"); Integer year = 2011; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2012() { log.info("*** BEGIN Test ***"); Integer year = 2012; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2013() { log.info("*** BEGIN Test ***"); Integer year = 2013; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertNull(result); teamName = "Duke"; result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Duke", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2014() { log.info("*** BEGIN Test ***"); Integer year = 2014; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2015() { log.info("*** BEGIN Test ***"); Integer year = 2015; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYear_2011() { log.info("*** BEGIN Test ***"); Integer year = 2011; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchByYearAndTeamName_2016() { log.info("*** BEGIN Test ***"); Integer year = 2016; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchByYearAndTeamName_2017() { log.info("*** BEGIN Test ***"); Integer year = 2017; String teamName = "Kentucky"; SeasonData result = classUnderTest.fetchByYearAndTeamName(year, teamName); assertEquals("Kentucky", result.getTeamName()); log.info("*** END Test ***"); }
@Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public SeasonData fetchByYearAndTeamName(Integer year, String teamName) { SeasonData ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? AND t.team_name = ?"; Object[] args = { year, teamName }; List<SeasonData> results = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested team/year combination (" + teamName + "/" + year + ") does not exist in the DB!"); } return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test @DisplayName("Testing fetchByYear()") public void fetchByYear() { assertAll( () -> { Integer year = 2010; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(44), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(101), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2011; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(41), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(102), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2012; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(41), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(102), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2013; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(34), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(95), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2014; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(35), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(93), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2015; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(39), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(94), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2016; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(43), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(105), tournamentAnalytics.getMaxScore()); }, () -> { Integer year = 2017; TournamentAnalytics tournamentAnalytics = classUnderTest.fetchByYear(year); assertNotNull(tournamentAnalytics); assertEquals(Integer.valueOf(39), tournamentAnalytics.getMinScore()); assertEquals(Integer.valueOf(103), tournamentAnalytics.getMaxScore()); }); }
@Override public TournamentAnalytics fetchByYear(Integer year) { TournamentAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<TournamentAnalytics> results = getJdbcTemplate().query(sql, args, new TournamentAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
TournamentAnalyticsJdbcDao extends JdbcDaoSupport implements TournamentAnalyticsDao { @Override public TournamentAnalytics fetchByYear(Integer year) { TournamentAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<TournamentAnalytics> results = getJdbcTemplate().query(sql, args, new TournamentAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
TournamentAnalyticsJdbcDao extends JdbcDaoSupport implements TournamentAnalyticsDao { @Override public TournamentAnalytics fetchByYear(Integer year) { TournamentAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<TournamentAnalytics> results = getJdbcTemplate().query(sql, args, new TournamentAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } TournamentAnalyticsJdbcDao(DataSource dataSource); }
TournamentAnalyticsJdbcDao extends JdbcDaoSupport implements TournamentAnalyticsDao { @Override public TournamentAnalytics fetchByYear(Integer year) { TournamentAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<TournamentAnalytics> results = getJdbcTemplate().query(sql, args, new TournamentAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } TournamentAnalyticsJdbcDao(DataSource dataSource); @Override TournamentAnalytics fetchByYear(Integer year); }
TournamentAnalyticsJdbcDao extends JdbcDaoSupport implements TournamentAnalyticsDao { @Override public TournamentAnalytics fetchByYear(Integer year) { TournamentAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<TournamentAnalytics> results = getJdbcTemplate().query(sql, args, new TournamentAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } TournamentAnalyticsJdbcDao(DataSource dataSource); @Override TournamentAnalytics fetchByYear(Integer year); }
@Test public void testFetchAllByYear_2011() { log.info("*** BEGIN Test ***"); Integer year = 2011; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testFetchAllByYear_2012() { log.info("*** BEGIN Test ***"); Integer year = 2012; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testFetchAllByYear_2013() { log.info("*** BEGIN Test ***"); Integer year = 2013; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testFetchAllByYear_2014() { log.info("*** BEGIN Test ***"); Integer year = 2014; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testFetchAllByYear_2015() { log.info("*** BEGIN Test ***"); Integer year = 2015; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testFetchAllByYear_2016() { log.info("*** BEGIN Test ***"); Integer year = 2016; List<TournamentResult> results = classUnderTest.fetchAllByYear(year); assertEquals(67, results.size()); log.info("*** END Test ***"); }
@Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
TournamentResultJdbcDao extends JdbcDaoSupport implements TournamentResultDao { @Override public List<TournamentResult> fetchAllByYear(Integer year) { String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ? ORDER BY t.game_date"; Object[] args = { year }; List<TournamentResult> results = getJdbcTemplate().query(sql, new TournamentResultRowMapper(), args); return results; } TournamentResultJdbcDao(DataSource dataSource); @Override List<TournamentResult> fetchAllByYear(Integer year); }
@Test public void testComputeYearsToTrain() { String[] args = { "2010", "2011", "2012", "2013,2014" }; Integer[] expectedYearsToTrain = { 2010, 2011, 2012 }; Integer[] actualYearsToTrain = MlpNetworkTrainer.computeYearsToTrain(args); assertArrayEquals(expectedYearsToTrain, actualYearsToTrain); }
protected static Integer[] computeYearsToTrain(String[] args) { Integer[] ret = new Integer[args.length - 1]; for (int aa = 0; aa < args.length - 1; aa++) { Integer year = Integer.valueOf(args[aa]); NetworkUtils.validateYear(year); ret[aa] = year; } return ret; }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToTrain(String[] args) { Integer[] ret = new Integer[args.length - 1]; for (int aa = 0; aa < args.length - 1; aa++) { Integer year = Integer.valueOf(args[aa]); NetworkUtils.validateYear(year); ret[aa] = year; } return ret; } }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToTrain(String[] args) { Integer[] ret = new Integer[args.length - 1]; for (int aa = 0; aa < args.length - 1; aa++) { Integer year = Integer.valueOf(args[aa]); NetworkUtils.validateYear(year); ret[aa] = year; } return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToTrain(String[] args) { Integer[] ret = new Integer[args.length - 1]; for (int aa = 0; aa < args.length - 1; aa++) { Integer year = Integer.valueOf(args[aa]); NetworkUtils.validateYear(year); ret[aa] = year; } return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); SeasonData pullSeasonData(Integer year, String teamName); List<TournamentResult> pullTournamentResults(Integer year); SeasonAnalytics pullSeasonAnalytics(Integer year); static void main(String[] args); void go(String[] args); @SuppressWarnings("unchecked") @Override void handleLearningEvent(LearningEvent event); }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToTrain(String[] args) { Integer[] ret = new Integer[args.length - 1]; for (int aa = 0; aa < args.length - 1; aa++) { Integer year = Integer.valueOf(args[aa]); NetworkUtils.validateYear(year); ret[aa] = year; } return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); SeasonData pullSeasonData(Integer year, String teamName); List<TournamentResult> pullTournamentResults(Integer year); SeasonAnalytics pullSeasonAnalytics(Integer year); static void main(String[] args); void go(String[] args); @SuppressWarnings("unchecked") @Override void handleLearningEvent(LearningEvent event); static final TransferFunctionType NEURON_PROPERTY_TRANSFER_FUNCTION; }
@Test public void testFetchByYear_2012() { log.info("*** BEGIN Test ***"); Integer year = 2012; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testComputeYearsToSimulate() { String[] args = { "2010", "2011", "2012", "2013,2014" }; Integer[] expectedYearsToSimulate = { 2013, 2014 }; Integer[] actualYearsToSimulate = MlpNetworkTrainer.computeYearsToSimulate(args); assertArrayEquals(expectedYearsToSimulate, actualYearsToSimulate); }
protected static Integer[] computeYearsToSimulate(String[] args) { Integer[] ret = null; String yearsToSimulate = args[args.length - 1]; StringTokenizer strtok = new StringTokenizer(yearsToSimulate, ","); List<Integer> yts = new ArrayList<>(); while (strtok.hasMoreTokens()) { Integer year = Integer.valueOf(strtok.nextToken()); NetworkUtils.validateYear(year); yts.add(year); } ret = yts.toArray(new Integer[yts.size()]); return ret; }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToSimulate(String[] args) { Integer[] ret = null; String yearsToSimulate = args[args.length - 1]; StringTokenizer strtok = new StringTokenizer(yearsToSimulate, ","); List<Integer> yts = new ArrayList<>(); while (strtok.hasMoreTokens()) { Integer year = Integer.valueOf(strtok.nextToken()); NetworkUtils.validateYear(year); yts.add(year); } ret = yts.toArray(new Integer[yts.size()]); return ret; } }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToSimulate(String[] args) { Integer[] ret = null; String yearsToSimulate = args[args.length - 1]; StringTokenizer strtok = new StringTokenizer(yearsToSimulate, ","); List<Integer> yts = new ArrayList<>(); while (strtok.hasMoreTokens()) { Integer year = Integer.valueOf(strtok.nextToken()); NetworkUtils.validateYear(year); yts.add(year); } ret = yts.toArray(new Integer[yts.size()]); return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToSimulate(String[] args) { Integer[] ret = null; String yearsToSimulate = args[args.length - 1]; StringTokenizer strtok = new StringTokenizer(yearsToSimulate, ","); List<Integer> yts = new ArrayList<>(); while (strtok.hasMoreTokens()) { Integer year = Integer.valueOf(strtok.nextToken()); NetworkUtils.validateYear(year); yts.add(year); } ret = yts.toArray(new Integer[yts.size()]); return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); SeasonData pullSeasonData(Integer year, String teamName); List<TournamentResult> pullTournamentResults(Integer year); SeasonAnalytics pullSeasonAnalytics(Integer year); static void main(String[] args); void go(String[] args); @SuppressWarnings("unchecked") @Override void handleLearningEvent(LearningEvent event); }
MlpNetworkTrainer implements LearningEventListener { protected static Integer[] computeYearsToSimulate(String[] args) { Integer[] ret = null; String yearsToSimulate = args[args.length - 1]; StringTokenizer strtok = new StringTokenizer(yearsToSimulate, ","); List<Integer> yts = new ArrayList<>(); while (strtok.hasMoreTokens()) { Integer year = Integer.valueOf(strtok.nextToken()); NetworkUtils.validateYear(year); yts.add(year); } ret = yts.toArray(new Integer[yts.size()]); return ret; } MlpNetworkTrainer(ApplicationContext applicationContext); SeasonData pullSeasonData(Integer year, String teamName); List<TournamentResult> pullTournamentResults(Integer year); SeasonAnalytics pullSeasonAnalytics(Integer year); static void main(String[] args); void go(String[] args); @SuppressWarnings("unchecked") @Override void handleLearningEvent(LearningEvent event); static final TransferFunctionType NEURON_PROPERTY_TRANSFER_FUNCTION; }
@Test public void testFetchByYear_2013() { log.info("*** BEGIN Test ***"); Integer year = 2013; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchByYear_2014() { log.info("*** BEGIN Test ***"); Integer year = 2014; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchByYear_2015() { log.info("*** BEGIN Test ***"); Integer year = 2015; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchByYear_2016() { log.info("*** BEGIN Test ***"); Integer year = 2016; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchByYear_2017() { log.info("*** BEGIN Test ***"); Integer year = 2017; SeasonAnalytics seasonAnalytics = classUnderTest.fetchByYear(year); assertNotNull(seasonAnalytics); log.info("*** END Test ***"); }
@Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
SeasonAnalyticsJdbcDao extends JdbcDaoSupport implements SeasonAnalyticsDao { @Override public SeasonAnalytics fetchByYear(Integer year) { SeasonAnalytics ret = null; String sql = "SELECT * FROM " + TABLE_NAME + " WHERE year = ?"; Object[] args = { year }; List<SeasonAnalytics> results = getJdbcTemplate().query(sql, args, new SeasonAnalyticsRowMapper()); if (results.size() > 0) { if (results.size() > 1) { log.warn("Expected 1 result from query, instead got " + results.size() + "!"); } ret = results.get(0); } else { log.warn("Requested year (" + year + ") does not exist in the DB!"); } return ret; } SeasonAnalyticsJdbcDao(DataSource dataSource); @Override SeasonAnalytics fetchByYear(Integer year); }
@Test public void testFetchAllByYear_2011() { log.info("*** BEGIN Test ***"); Integer year = 2011; List<SeasonData> results = classUnderTest.fetchAllByYear(year); assertEquals(68, results.size()); log.info("*** END Test ***"); }
@Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void testFetchAllByYear_2012() { log.info("*** BEGIN Test ***"); Integer year = 2012; List<SeasonData> results = classUnderTest.fetchAllByYear(year); assertEquals(68, results.size()); log.info("*** END Test ***"); }
@Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
SeasonDataJdbcDao extends JdbcDaoSupport implements SeasonDataDao { @Override public List<SeasonData> fetchAllByYear(Integer year) { List<SeasonData> ret = null; String sql = "SELECT t.* FROM " + TABLE_NAME + " t WHERE t.year = ?"; Object[] args = { year }; ret = getJdbcTemplate().query(sql, args, new SeasonDataRowMapper()); return ret; } SeasonDataJdbcDao(DataSource dataSource); @Override List<SeasonData> fetchAllByYear(Integer year); @Override SeasonData fetchByYearAndTeamName(Integer year, String teamName); }
@Test public void parseCreatedIndexResponseTest() { String path = "/responses/create-index.json"; String index = HttpResponseParser.parseCreatedIndexResponse(getEntityFromResponse(path)); assertEquals("idxtest", index); }
static String parseCreatedIndexResponse(HttpEntity entity) { return JsonHandler.readValue(entity).get(Field.INDEX); }
HttpResponseParser { static String parseCreatedIndexResponse(HttpEntity entity) { return JsonHandler.readValue(entity).get(Field.INDEX); } }
HttpResponseParser { static String parseCreatedIndexResponse(HttpEntity entity) { return JsonHandler.readValue(entity).get(Field.INDEX); } }
HttpResponseParser { static String parseCreatedIndexResponse(HttpEntity entity) { return JsonHandler.readValue(entity).get(Field.INDEX); } }
HttpResponseParser { static String parseCreatedIndexResponse(HttpEntity entity) { return JsonHandler.readValue(entity).get(Field.INDEX); } }
@Test public void testTextDate() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "Fri Aug 12 15:19:20 EST 2011"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertNull(tr.getYear()); assertNull(tr.getMonth()); assertNull(tr.getDay()); assertNull(tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(1, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testExtraDashFn() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "1978-01-"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v)) { return v.charAt(v.length() - 1) == '-' ? v.substring(0, v.length() - 1) : v; } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder().preprocessDateFn(fn).create(); interpreter.interpretTemporal(er, tr); assertEquals(1978, tr.getYear().intValue()); assertEquals(1, tr.getMonth().intValue()); assertNull(tr.getDay()); assertEquals("1978-01", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testDateExtraZ() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2011-10-31Z"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v) && v.matches("([12]\\d{3}-(0[1-9]|1[0-2])-(0[1-9]|[12]\\d|3[01]))Z")) { return v.substring(0, v.length() - 1); } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder().preprocessDateFn(fn).create(); interpreter.interpretTemporal(er, tr); assertEquals(2011, tr.getYear().intValue()); assertEquals(10, tr.getMonth().intValue()); assertEquals(31, tr.getDay().intValue()); assertEquals("2011-10-31", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testRangePartYear() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "1978/91"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); TemporalInterpreter interpreter = TemporalInterpreter.builder().create(); interpreter.interpretTemporal(er, tr); assertNull(tr.getYear()); assertNull(tr.getMonth()); assertNull(tr.getDay()); assertNull(tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(1, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testTextMonthYearFn() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "Aug-2005"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v)) { String t = v.replaceAll("Aug", "").replaceAll("-", ""); return t + "-08"; } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder() .orderings(Arrays.asList(DMY_FORMATS)) .preprocessDateFn(fn) .create(); interpreter.interpretTemporal(er, tr); assertEquals(2005, tr.getYear().intValue()); assertEquals(8, tr.getMonth().intValue()); assertNull(tr.getDay()); assertEquals("2005-08", tr.getEventDate().getGte()); assertNull(tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testNoneIsoYmRangeAnd() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "05-02-1978 & 06-03-1979"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v)) { return v.replaceAll(" & ", "/"); } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder() .orderings(Arrays.asList(DMY_FORMATS)) .preprocessDateFn(fn) .create(); interpreter.interpretTemporal(er, tr); assertEquals(1978, tr.getYear().intValue()); assertEquals(2, tr.getMonth().intValue()); assertEquals(5, tr.getDay().intValue()); assertEquals("1978-02-05", tr.getEventDate().getGte()); assertEquals("1979-03-06", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testNoneIsoYmRangeTo() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "05-02-1978 to 06-03-1979"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v)) { return v.replaceAll(" to ", "/"); } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder() .orderings(Arrays.asList(DMY_FORMATS)) .preprocessDateFn(fn) .create(); interpreter.interpretTemporal(er, tr); assertEquals(1978, tr.getYear().intValue()); assertEquals(2, tr.getMonth().intValue()); assertEquals(5, tr.getDay().intValue()); assertEquals("1978-02-05", tr.getEventDate().getGte()); assertEquals("1979-03-06", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void testNoneIsoYmRange() { Map<String, String> map = new HashMap<>(); map.put(DwcTerm.eventDate.qualifiedName(), "2004-2-1 to 3-2"); ExtendedRecord er = ExtendedRecord.newBuilder().setId("1").setCoreTerms(map).build(); TemporalRecord tr = TemporalRecord.newBuilder().setId("1").build(); SerializableFunction<String, String> fn = v -> { if (StringUtils.isNotEmpty(v)) { return v.replaceAll(" to ", "/").replaceAll(" & ", "/"); } return v; }; TemporalInterpreter interpreter = TemporalInterpreter.builder().preprocessDateFn(fn).create(); interpreter.interpretTemporal(er, tr); assertEquals("2004-02-01", tr.getEventDate().getGte()); assertEquals("2004-03-02", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); map.put(DwcTerm.eventDate.qualifiedName(), "2004-2-1 & 3-2"); interpreter.interpretTemporal(er, tr); assertEquals("2004-02-01", tr.getEventDate().getGte()); assertEquals("2004-03-02", tr.getEventDate().getLte()); assertEquals(0, tr.getIssues().getIssueList().size()); }
public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
TemporalInterpreter implements Serializable { public void interpretTemporal(ExtendedRecord er, TemporalRecord tr) { String year = extractValue(er, DwcTerm.year); String month = extractValue(er, DwcTerm.month); String day = extractValue(er, DwcTerm.day); String eventDate = extractValue(er, DwcTerm.eventDate); String normalizedEventDate = Optional.ofNullable(preprocessDateFn).map(x -> x.apply(eventDate)).orElse(eventDate); EventRange eventRange = temporalRangeParser.parse(year, month, day, normalizedEventDate); eventRange .getFrom() .map(AtomizedLocalDate::fromTemporalAccessor) .ifPresent( ald -> { tr.setYear(ald.getYear()); tr.setMonth(ald.getMonth()); tr.setDay(ald.getDay()); }); EventDate ed = new EventDate(); eventRange.getFrom().map(TemporalAccessor::toString).ifPresent(ed::setGte); eventRange.getTo().map(TemporalAccessor::toString).ifPresent(ed::setLte); tr.setEventDate(ed); addIssueSet(tr, eventRange.getIssues()); } @Builder(buildMethodName = "create") private TemporalInterpreter( List<DateComponentOrdering> orderings, SerializableFunction<String, String> preprocessDateFn); void interpretTemporal(ExtendedRecord er, TemporalRecord tr); void interpretModified(ExtendedRecord er, TemporalRecord tr); void interpretDateIdentified(ExtendedRecord er, TemporalRecord tr); }
@Test public void getInstitutionMatchNoneIssuesTest() { assertEquals( OccurrenceIssue.INSTITUTION_MATCH_NONE, GrscicollInterpreter.getInstitutionMatchNoneIssue(null)); assertEquals( OccurrenceIssue.AMBIGUOUS_INSTITUTION, GrscicollInterpreter.getInstitutionMatchNoneIssue(Status.AMBIGUOUS)); assertEquals( OccurrenceIssue.AMBIGUOUS_INSTITUTION, GrscicollInterpreter.getInstitutionMatchNoneIssue(Status.AMBIGUOUS_MACHINE_TAGS)); assertEquals( OccurrenceIssue.POSSIBLY_ON_LOAN, GrscicollInterpreter.getInstitutionMatchNoneIssue(Status.AMBIGUOUS_OWNER)); }
@VisibleForTesting static OccurrenceIssue getInstitutionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_INSTITUTION; } if (status == Status.AMBIGUOUS_OWNER) { return OccurrenceIssue.POSSIBLY_ON_LOAN; } return OccurrenceIssue.INSTITUTION_MATCH_NONE; }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getInstitutionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_INSTITUTION; } if (status == Status.AMBIGUOUS_OWNER) { return OccurrenceIssue.POSSIBLY_ON_LOAN; } return OccurrenceIssue.INSTITUTION_MATCH_NONE; } }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getInstitutionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_INSTITUTION; } if (status == Status.AMBIGUOUS_OWNER) { return OccurrenceIssue.POSSIBLY_ON_LOAN; } return OccurrenceIssue.INSTITUTION_MATCH_NONE; } }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getInstitutionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_INSTITUTION; } if (status == Status.AMBIGUOUS_OWNER) { return OccurrenceIssue.POSSIBLY_ON_LOAN; } return OccurrenceIssue.INSTITUTION_MATCH_NONE; } static BiConsumer<ExtendedRecord, GrscicollRecord> grscicollInterpreter( KeyValueStore<GrscicollLookupRequest, GrscicollLookupResponse> kvStore, MetadataRecord mdr); }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getInstitutionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_INSTITUTION; } if (status == Status.AMBIGUOUS_OWNER) { return OccurrenceIssue.POSSIBLY_ON_LOAN; } return OccurrenceIssue.INSTITUTION_MATCH_NONE; } static BiConsumer<ExtendedRecord, GrscicollRecord> grscicollInterpreter( KeyValueStore<GrscicollLookupRequest, GrscicollLookupResponse> kvStore, MetadataRecord mdr); }
@Test public void getCollectionMatchNoneIssuesTest() { assertEquals( OccurrenceIssue.COLLECTION_MATCH_NONE, GrscicollInterpreter.getCollectionMatchNoneIssue(null)); assertEquals( OccurrenceIssue.AMBIGUOUS_COLLECTION, GrscicollInterpreter.getCollectionMatchNoneIssue(Status.AMBIGUOUS)); assertEquals( OccurrenceIssue.AMBIGUOUS_COLLECTION, GrscicollInterpreter.getCollectionMatchNoneIssue(Status.AMBIGUOUS_MACHINE_TAGS)); assertEquals( OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH, GrscicollInterpreter.getCollectionMatchNoneIssue(Status.AMBIGUOUS_INSTITUTION_MISMATCH)); }
@VisibleForTesting static OccurrenceIssue getCollectionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_COLLECTION; } if (status == Status.AMBIGUOUS_INSTITUTION_MISMATCH) { return OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH; } return OccurrenceIssue.COLLECTION_MATCH_NONE; }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getCollectionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_COLLECTION; } if (status == Status.AMBIGUOUS_INSTITUTION_MISMATCH) { return OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH; } return OccurrenceIssue.COLLECTION_MATCH_NONE; } }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getCollectionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_COLLECTION; } if (status == Status.AMBIGUOUS_INSTITUTION_MISMATCH) { return OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH; } return OccurrenceIssue.COLLECTION_MATCH_NONE; } }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getCollectionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_COLLECTION; } if (status == Status.AMBIGUOUS_INSTITUTION_MISMATCH) { return OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH; } return OccurrenceIssue.COLLECTION_MATCH_NONE; } static BiConsumer<ExtendedRecord, GrscicollRecord> grscicollInterpreter( KeyValueStore<GrscicollLookupRequest, GrscicollLookupResponse> kvStore, MetadataRecord mdr); }
GrscicollInterpreter { @VisibleForTesting static OccurrenceIssue getCollectionMatchNoneIssue(Status status) { if (status == Status.AMBIGUOUS || status == Status.AMBIGUOUS_MACHINE_TAGS) { return OccurrenceIssue.AMBIGUOUS_COLLECTION; } if (status == Status.AMBIGUOUS_INSTITUTION_MISMATCH) { return OccurrenceIssue.INSTITUTION_COLLECTION_MISMATCH; } return OccurrenceIssue.COLLECTION_MATCH_NONE; } static BiConsumer<ExtendedRecord, GrscicollRecord> grscicollInterpreter( KeyValueStore<GrscicollLookupRequest, GrscicollLookupResponse> kvStore, MetadataRecord mdr); }
@Test(expected = IllegalArgumentException.class) public void createClientFromEmptyHostsTest() { EsClient.from(EsConfig.from()); }
public static EsClient from(EsConfig config) { return new EsClient(config); }
EsClient implements AutoCloseable { public static EsClient from(EsConfig config) { return new EsClient(config); } }
EsClient implements AutoCloseable { public static EsClient from(EsConfig config) { return new EsClient(config); } private EsClient(EsConfig config); }
EsClient implements AutoCloseable { public static EsClient from(EsConfig config) { return new EsClient(config); } private EsClient(EsConfig config); static EsClient from(EsConfig config); Response performGetRequest(String endpoint); Response performPutRequest(String endpoint, Map<String, String> params, HttpEntity body); Response performPostRequest(String endpoint, Map<String, String> params, HttpEntity body); Response performDeleteRequest(String endpoint); @Override @SneakyThrows void close(); }
EsClient implements AutoCloseable { public static EsClient from(EsConfig config) { return new EsClient(config); } private EsClient(EsConfig config); static EsClient from(EsConfig config); Response performGetRequest(String endpoint); Response performPutRequest(String endpoint, Map<String, String> params, HttpEntity body); Response performPostRequest(String endpoint, Map<String, String> params, HttpEntity body); Response performDeleteRequest(String endpoint); @Override @SneakyThrows void close(); }
@Test public void checkFuzzyPositiveTest() { SpeciesMatchRequest matchRequest = SpeciesMatchRequest.builder() .withKingdom("") .withPhylum("") .withClazz("") .withOrder("") .withFamily("") .withGenus("something") .build(); NameUsageMatch usageMatch = new NameUsageMatch(); Diagnostics diagnostics = new Diagnostics(); diagnostics.setMatchType(MatchType.FUZZY); usageMatch.setDiagnostics(diagnostics); boolean result = TaxonomyInterpreter.checkFuzzy(usageMatch, matchRequest); Assert.assertTrue(result); }
@VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
@Test public void checkFuzzyNegativeTest() { SpeciesMatchRequest matchRequest = SpeciesMatchRequest.builder() .withKingdom("") .withPhylum("") .withClazz("") .withOrder("") .withFamily("something") .withGenus("something") .build(); NameUsageMatch usageMatch = new NameUsageMatch(); Diagnostics diagnostics = new Diagnostics(); diagnostics.setMatchType(MatchType.FUZZY); usageMatch.setDiagnostics(diagnostics); boolean result = TaxonomyInterpreter.checkFuzzy(usageMatch, matchRequest); Assert.assertFalse(result); }
@VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
@Test public void checkFuzzyHighrankTest() { SpeciesMatchRequest matchRequest = SpeciesMatchRequest.builder() .withKingdom("") .withPhylum("") .withClazz("") .withOrder("") .withFamily("") .withGenus("something") .build(); NameUsageMatch usageMatch = new NameUsageMatch(); Diagnostics diagnostics = new Diagnostics(); diagnostics.setMatchType(MatchType.HIGHERRANK); usageMatch.setDiagnostics(diagnostics); boolean result = TaxonomyInterpreter.checkFuzzy(usageMatch, matchRequest); Assert.assertFalse(result); }
@VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
TaxonomyInterpreter { @VisibleForTesting protected static boolean checkFuzzy(NameUsageMatch usageMatch, SpeciesMatchRequest matchRequest) { boolean isFuzzy = MatchType.FUZZY == usageMatch.getDiagnostics().getMatchType(); boolean isEmptyTaxa = Strings.isNullOrEmpty(matchRequest.getKingdom()) && Strings.isNullOrEmpty(matchRequest.getPhylum()) && Strings.isNullOrEmpty(matchRequest.getClazz()) && Strings.isNullOrEmpty(matchRequest.getOrder()) && Strings.isNullOrEmpty(matchRequest.getFamily()); return isFuzzy && isEmptyTaxa; } static BiConsumer<ExtendedRecord, TaxonRecord> taxonomyInterpreter( KeyValueStore<SpeciesMatchRequest, NameUsageMatch> kvStore); }
@Test public void taxonRecordUsageTest() { String expected = "{" + "\"id\":\"777\"," + "\"all\":[\"T1\",\"Name\"]," + "\"verbatim\":{\"core\":{\"http: + "\"http: + "\"extensions\":{}}," + "\"gbifClassification\":{\"taxonID\":\"T1\"," + "\"verbatimScientificName\":\"Name\"," + "\"usage\":{\"key\":1," + "\"name\":\"n\"," + "\"rank\":\"ABERRATION\"}," + "\"classification\":[{\"key\":1," + "\"name\":\"Name\"," + "\"rank\":\"CHEMOFORM\"}," + "{\"key\":2," + "\"name\":\"Name2\"," + "\"rank\":\"ABERRATION\"}]" + "," + "\"acceptedUsage\":{\"key\":2," + "\"name\":\"Name2\"," + "\"rank\":\"ABERRATION\"}," + "\"chemoformKey\":1," + "\"chemoform\":\"Name\"," + "\"aberrationKey\":2," + "\"aberration\":\"Name2\"," + "\"classificationPath\":\"_1\"," + "\"taxonKey\":[1,2]}," + "\"created\":\"1970-01-01T00:00\"}"; List<RankedName> rankedNameList = new ArrayList<>(); RankedName name = RankedName.newBuilder().setKey(1).setName("Name").setRank(Rank.CHEMOFORM).build(); RankedName name2 = RankedName.newBuilder().setKey(2).setName("Name2").setRank(Rank.ABERRATION).build(); rankedNameList.add(name); rankedNameList.add(name2); TaxonRecord taxonRecord = TaxonRecord.newBuilder() .setId("777") .setCreated(0L) .setUsage( RankedName.newBuilder().setKey(1).setName("n").setRank(Rank.ABERRATION).build()) .setClassification(rankedNameList) .setAcceptedUsage(name2) .build(); ExtendedRecord extendedRecord = ExtendedRecord.newBuilder() .setId("777") .setCoreTerms( new ImmutableMap.Builder<String, String>() .put(DwcTerm.taxonID.qualifiedName(), "T1") .put(DwcTerm.scientificName.qualifiedName(), "Name") .build()) .build(); String result = GbifJsonConverter.toStringPartialJson(extendedRecord, taxonRecord); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void extendedRecordSkipIssuesWithIdTest() { String expected = "{" + "\"id\":\"777\"," + "\"all\":[]," + "\"verbatim\":{\"core\":{}," + "\"extensions\":{}}}"; ExtendedRecord record = ExtendedRecord.newBuilder().setId("777").build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void temporalRecordSkipIssuesWithIdTest() { String expected = "{\"id\":\"777\",\"created\":\"1970-01-01T00:00\"}"; TemporalRecord record = TemporalRecord.newBuilder().setId("777").setCreated(0L).build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void locationRecordSkipIssuesWithIdTest() { String expected = "{\"id\":\"777\"}"; LocationRecord record = LocationRecord.newBuilder().setId("777").build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void locationFeaturesRecordSkipIssuesWithIdTest() { String expected = "{" + "\"id\":\"777\"," + "\"locationFeatureLayers\":[{\"key\":\"{awdawd}\"," + "\"value\":\"\\\"{\\\"wad\\\":\\\"adw\\\"}\\\"\"}]," + "\"created\":\"1970-01-01T00:00\"}"; LocationFeatureRecord record = LocationFeatureRecord.newBuilder() .setId("777") .setCreated(0L) .setItems(Collections.singletonMap("{awdawd}", "\"{\"wad\":\"adw\"}\"")) .build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void measurementOrFactRecordSkipIssuesWithIdTest() { String expected = "{\"id\":\"777\"," + "\"measurementOrFactItems\":[]," + "\"created\":\"1970-01-01T00:00\"}"; MeasurementOrFactRecord record = MeasurementOrFactRecord.newBuilder().setId("777").setCreated(0L).build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void amplificationRecordSkipIssuesWithIdEmptyTest() { String expected = "{\"id\":\"777\"," + "\"amplificationItems\":[]," + "\"created\":\"1970-01-01T00:00\"}"; AmplificationRecord record = AmplificationRecord.newBuilder().setId("777").setCreated(0L).build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test(expected = NullPointerException.class) public void createConfigNullHostsTest() { EsConfig.from((String[]) null); }
public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); static EsConfig from(String... hostsAddresses); List<URL> getHosts(); String[] getRawHosts(); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); static EsConfig from(String... hostsAddresses); List<URL> getHosts(); String[] getRawHosts(); }
@Test public void multimediaRecordSkipIssuesWithIdTest() { String expected = "{\"id\":\"777\"}"; MultimediaRecord record = MultimediaRecord.newBuilder().setId("777").build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void multimediaRecordSkipIssuesWithIdEmptyTest() { String expected = "{\"id\":\"777\"," + "\"multimediaItems\":[{}]," + "\"mediaTypes\":[]," + "\"mediaLicenses\":[]}"; MultimediaRecord record = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems(Collections.singletonList(Multimedia.newBuilder().build())) .build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void amplificationRecordSkipIssuesWithIdTest() { String expected = "{\"id\":\"777\"," + "\"amplificationItems\":[{\"name\":\"n\"," + "\"identity\":3," + "\"appliedScientificName\":" + "\"sn\"," + "\"matchType\":\"mt\"," + "\"bitScore\":1," + "\"expectValue\":2," + "\"querySequence\":\"qs\"," + "\"subjectSequence\":" + "\"ss\"," + "\"qstart\":5," + "\"qend\":4," + "\"sstart\":8," + "\"send\":6," + "\"distanceToBestMatch\":\"dm\"," + "\"sequenceLength\":7}]}"; AmplificationRecord record = AmplificationRecord.newBuilder() .setId("777") .setAmplificationItems( Arrays.asList( Amplification.newBuilder() .setBlastResult( BlastResult.newBuilder() .setAppliedScientificName("sn") .setBitScore(1) .setDistanceToBestMatch("dm") .setExpectValue(2) .setIdentity(3) .setMatchType("mt") .setName("n") .setQend(4) .setQstart(5) .setQuerySequence("qs") .setSend(6) .setSequenceLength(7) .setSstart(8) .setSubjectSequence("ss") .build()) .build(), Amplification.newBuilder().build())) .build(); String result = GbifJsonConverter.toStringPartialJson(record); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void multimediaRecordTest() { String expected = "{\"id\":\"777\"," + "\"multimediaItems\":[{\"type\":\"StillImage\"," + "\"format\":\"image/jpeg\"," + "\"license\":\"somelicense\"}," + "{\"type\":\"MovingImage\"," + "\"format\":\"video/mp4\"," + "\"license\":\"somelicense\"}]," + "\"mediaTypes\":[\"StillImage\",\"MovingImage\"]," + "\"mediaLicenses\":[\"somelicense\"]}"; Multimedia stillImage = new Multimedia(); stillImage.setType(MediaType.StillImage.name()); stillImage.setFormat("image/jpeg"); stillImage.setLicense("somelicense"); Multimedia movingImage = new Multimedia(); movingImage.setType(MediaType.MovingImage.name()); movingImage.setFormat("video/mp4"); movingImage.setLicense("somelicense"); MultimediaRecord multimediaRecord = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems(Arrays.asList(stillImage, movingImage)) .build(); String result = GbifJsonConverter.toStringPartialJson(multimediaRecord); assertEquals(expected, result); assertTrue(JsonValidationUtils.isValid(result)); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test public void grscicollRecordTest() { String expected = "{\"id\":\"1\"," + "\"institutionKey\":\"cb0098db-6ff6-4a5d-ad29-51348d114e41\"," + "\"collectionKey\":\"5c692584-d517-48e8-93a8-a916ba131d9b\"" + "}"; Institution institution = Institution.newBuilder() .setCode("I1") .setKey("cb0098db-6ff6-4a5d-ad29-51348d114e41") .setName("Institution1") .build(); InstitutionMatch institutionMatch = InstitutionMatch.newBuilder() .setInstitution(institution) .setMatchType(MatchType.EXACT.name()) .build(); Collection collection = Collection.newBuilder() .setKey("5c692584-d517-48e8-93a8-a916ba131d9b") .setCode("C1") .setName("Collection1") .setInstitutionKey("cb0098db-6ff6-4a5d-ad29-51348d114e41") .build(); CollectionMatch collectionMatch = CollectionMatch.newBuilder().setCollection(collection).setMatchType("FUZZY").build(); GrscicollRecord record = GrscicollRecord.newBuilder() .setId("1") .setInstitutionMatch(institutionMatch) .setCollectionMatch(collectionMatch) .build(); String result = GbifJsonConverter.toStringPartialJson(record); Assert.assertEquals(expected, result); }
public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
GbifJsonConverter { public static String toStringPartialJson(SpecificRecordBase... records) { return toPartialJson(records).toString(); } static ObjectNode toJson(SpecificRecordBase... records); static ObjectNode toPartialJson(SpecificRecordBase... records); static String toStringJson(SpecificRecordBase... records); static String toStringPartialJson(SpecificRecordBase... records); ObjectNode toJson(); @Override String toString(); }
@Test(expected = NullPointerException.class) public void nullMergeTest() { MultimediaConverter.merge(null, null, null); }
public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
@Test public void emptyMergeTest() { MultimediaRecord mr = MultimediaRecord.newBuilder().setId("777").build(); ImageRecord ir = ImageRecord.newBuilder().setId("777").build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("777").build(); MultimediaRecord result = MultimediaRecord.newBuilder().setId("777").build(); MultimediaRecord record = MultimediaConverter.merge(mr, ir, ar); Assert.assertEquals(result, record); }
public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
@Test public void multimediaRecordTest() { MultimediaRecord mr = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Collections.singletonList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .build())) .build(); ImageRecord ir = ImageRecord.newBuilder().setId("777").build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("777").build(); MultimediaRecord result = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Collections.singletonList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .build())) .build(); MultimediaRecord record = MultimediaConverter.merge(mr, ir, ar); Assert.assertEquals(result, record); }
public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
@Test public void duplicateTest() { MultimediaRecord mr = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Collections.singletonList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .build())) .build(); ImageRecord ir = ImageRecord.newBuilder() .setId("777") .setImageItems( Collections.singletonList( Image.newBuilder() .setIdentifier("http: .setReferences("http: .build())) .build(); AudubonRecord ar = AudubonRecord.newBuilder().setId("777").build(); MultimediaRecord result = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Collections.singletonList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .setType("StillImage") .build())) .build(); MultimediaRecord record = MultimediaConverter.merge(mr, ir, ar); Assert.assertEquals(result, record); }
public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
@Test public void mergeTest() { MultimediaRecord mr = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Arrays.asList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .setCreated("2010-10-10") .setLicense("license1") .build(), Multimedia.newBuilder().setIdentifier("http: .setIssues(IssueRecord.newBuilder().setIssueList(Arrays.asList("ONE", "THREE")).build()) .build(); ImageRecord ir = ImageRecord.newBuilder() .setId("777") .setImageItems( Collections.singletonList( Image.newBuilder() .setIdentifier("http: .setReferences("http: .setCreated("2010-11-11") .setLicense("license2") .build())) .setIssues(IssueRecord.newBuilder().setIssueList(Arrays.asList("TWO", "THREE")).build()) .build(); AudubonRecord ar = AudubonRecord.newBuilder() .setId("777") .setAudubonItems( Collections.singletonList( Audubon.newBuilder() .setAccessUri("http: .setCreateDate("2010-09-09") .setRights("license3") .build())) .build(); MultimediaRecord result = MultimediaRecord.newBuilder() .setId("777") .setMultimediaItems( Arrays.asList( Multimedia.newBuilder() .setIdentifier("http: .setReferences("http: .setCreated("2010-10-10") .setLicense("license1") .build(), Multimedia.newBuilder() .setType(MediaType.StillImage.name()) .setIdentifier("http: .setReferences("http: .setCreated("2010-11-11") .setLicense("license2") .build(), Multimedia.newBuilder() .setIdentifier("http: .setCreated("2010-09-09") .setLicense("license3") .build())) .setIssues( IssueRecord.newBuilder().setIssueList(Arrays.asList("ONE", "TWO", "THREE")).build()) .build(); MultimediaRecord record = MultimediaConverter.merge(mr, ir, ar); Assert.assertEquals(result, record); }
public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
MultimediaConverter { public static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar) { MultimediaRecord record = MultimediaRecord.newBuilder().setId(mr.getId()).setCreated(mr.getCreated()).build(); boolean isMrEmpty = mr.getMultimediaItems() == null && mr.getIssues().getIssueList().isEmpty(); boolean isIrEmpty = ir.getImageItems() == null && ir.getIssues().getIssueList().isEmpty(); boolean isArEmpty = ar.getAudubonItems() == null && ar.getIssues().getIssueList().isEmpty(); if (isMrEmpty && isIrEmpty && isArEmpty) { return record; } Set<String> issues = new HashSet<>(); issues.addAll(mr.getIssues().getIssueList()); issues.addAll(ir.getIssues().getIssueList()); issues.addAll(ar.getIssues().getIssueList()); Map<String, Multimedia> multimediaMap = new HashMap<>(); putAllAudubonRecord(multimediaMap, ar); putAllImageRecord(multimediaMap, ir); putAllMultimediaRecord(multimediaMap, mr); if (!multimediaMap.isEmpty()) { record.setMultimediaItems(new ArrayList<>(multimediaMap.values())); } if (!issues.isEmpty()) { record.getIssues().getIssueList().addAll(issues); } return record; } static MultimediaRecord merge( @NonNull MultimediaRecord mr, @NonNull ImageRecord ir, @NonNull AudubonRecord ar); }
@Test(expected = IllegalArgumentException.class) public void createConfigInvalidHostsTest() { EsConfig.from("wrong url"); thrown.expectMessage(CoreMatchers.containsString("is not a valid url")); }
public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); static EsConfig from(String... hostsAddresses); List<URL> getHosts(); String[] getRawHosts(); }
EsConfig { public static EsConfig from(String... hostsAddresses) { return new EsConfig(hostsAddresses); } private EsConfig(@NonNull String[] hostsAddresses); static EsConfig from(String... hostsAddresses); List<URL> getHosts(); String[] getRawHosts(); }
@Test public void extendedRecordMapperTest() { Map<String, String> coreTerms = new HashMap<>(); coreTerms.put(DwcTerm.verbatimDepth.simpleName(), "1.0"); coreTerms.put(DwcTerm.collectionCode.simpleName(), "C1"); coreTerms.put(DwcTerm.institutionCode.simpleName(), "I1"); coreTerms.put(DwcTerm.catalogNumber.simpleName(), "CN1"); coreTerms.put(DwcTerm.class_.simpleName(), "classs"); coreTerms.put(DcTerm.format.simpleName(), "format"); coreTerms.put(DwcTerm.order.simpleName(), "order"); coreTerms.put(DwcTerm.group.simpleName(), "group"); coreTerms.put(DcTerm.date.simpleName(), "26/06/2019"); coreTerms.put( DwcTerm.basisOfRecord.simpleName(), BasisOfRecord.HUMAN_OBSERVATION.name().toLowerCase()); coreTerms.put(DwcTerm.lifeStage.simpleName(), "adultss"); coreTerms.put(DwcTerm.sampleSizeUnit.simpleName(), "unit"); coreTerms.put(DwcTerm.sampleSizeValue.simpleName(), "value"); coreTerms.put(DwcTerm.organismQuantity.simpleName(), "quantity"); coreTerms.put(DwcTerm.organismQuantityType.simpleName(), "type"); coreTerms.put(DwcTerm.recordedBy.simpleName(), "recordedBy"); coreTerms.put(DwcTerm.identifiedBy.simpleName(), "identifiedBy"); coreTerms.put(GbifTerm.identifiedByID.simpleName(), "13123|21312"); coreTerms.put(GbifTerm.recordedByID.simpleName(), "53453|5785"); coreTerms.put(DwcTerm.occurrenceStatus.simpleName(), OccurrenceStatus.ABSENT.name()); coreTerms.put(DwcTerm.individualCount.simpleName(), "0"); ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId("1").setCoreTerms(coreTerms).build(); MetadataRecord metadataRecord = MetadataRecord.newBuilder() .setId("1") .setLicense(License.CC_BY_4_0.name()) .setHostingOrganizationKey("hostOrgKey") .build(); List<AgentIdentifier> agentIds = Collections.singletonList( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.OTHER.name()) .setValue("13123") .build()); BasicRecord basicRecord = BasicRecord.newBuilder() .setId("1") .setCreated(1L) .setLicense(License.CC0_1_0.name()) .setIdentifiedByIds(agentIds) .setRecordedByIds(agentIds) .setIndividualCount(0) .setBasisOfRecord(BasisOfRecord.HUMAN_OBSERVATION.name()) .setOccurrenceStatus(OccurrenceStatus.ABSENT.name()) .build(); List<RankedName> classification = new ArrayList<>(); classification.add(RankedName.newBuilder().setName("CLASS").setRank(Rank.CLASS).build()); classification.add(RankedName.newBuilder().setName("ORDER").setRank(Rank.ORDER).build()); TaxonRecord taxonRecord = TaxonRecord.newBuilder() .setCreated( 2L) .setClassification(classification) .build(); TemporalRecord temporalRecord = TemporalRecord.newBuilder() .setId("1") .setDateIdentified("2019-11-12T13:24:56.963591") .setModified("2019-04-15T17:17") .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord( basicRecord, metadataRecord, taxonRecord, temporalRecord, extendedRecord); Assert.assertEquals("1.0", hdfsRecord.getVerbatimdepth()); Assert.assertEquals("C1", hdfsRecord.getCollectioncode()); Assert.assertEquals("C1", hdfsRecord.getVCollectioncode()); Assert.assertEquals("I1", hdfsRecord.getInstitutioncode()); Assert.assertEquals("I1", hdfsRecord.getVInstitutioncode()); Assert.assertEquals("CN1", hdfsRecord.getCatalognumber()); Assert.assertEquals("CN1", hdfsRecord.getVCatalognumber()); Assert.assertEquals("1", hdfsRecord.getIdentifier()); Assert.assertEquals("1", hdfsRecord.getVIdentifier()); Assert.assertEquals("quantity", hdfsRecord.getVOrganismquantity()); Assert.assertEquals("type", hdfsRecord.getVOrganismquantitytype()); Assert.assertEquals("unit", hdfsRecord.getVSamplesizeunit()); Assert.assertEquals("value", hdfsRecord.getVSamplesizevalue()); Assert.assertEquals("recordedBy", hdfsRecord.getVRecordedby()); Assert.assertEquals("identifiedBy", hdfsRecord.getIdentifiedby()); Assert.assertEquals("13123|21312", hdfsRecord.getVIdentifiedbyid()); Assert.assertEquals("53453|5785", hdfsRecord.getVRecordedbyid()); Assert.assertEquals(OccurrenceStatus.ABSENT.name(), hdfsRecord.getVOccurrencestatus()); Assert.assertEquals("0", hdfsRecord.getVIndividualcount()); Assert.assertEquals("CLASS", hdfsRecord.getClass$()); Assert.assertEquals("classs", hdfsRecord.getVClass()); Assert.assertEquals("format", hdfsRecord.getFormat()); Assert.assertEquals("format", hdfsRecord.getVFormat()); Assert.assertEquals("ORDER", hdfsRecord.getOrder()); Assert.assertEquals("order", hdfsRecord.getVOrder()); Assert.assertEquals("group", hdfsRecord.getGroup()); Assert.assertEquals("group", hdfsRecord.getVGroup()); Assert.assertEquals("26/06/2019", hdfsRecord.getDate()); Assert.assertEquals("26/06/2019", hdfsRecord.getVDate()); Assert.assertNotNull(hdfsRecord.getDateidentified()); Assert.assertNotNull(hdfsRecord.getModified()); Assert.assertEquals(BasisOfRecord.HUMAN_OBSERVATION.name(), hdfsRecord.getBasisofrecord()); Assert.assertEquals( BasisOfRecord.HUMAN_OBSERVATION.name().toLowerCase(), hdfsRecord.getVBasisofrecord()); Assert.assertNull(hdfsRecord.getLifestage()); Assert.assertEquals("adultss", hdfsRecord.getVLifestage()); Assert.assertEquals(taxonRecord.getCreated(), hdfsRecord.getLastparsed()); Assert.assertEquals(taxonRecord.getCreated(), hdfsRecord.getLastinterpreted()); Assert.assertEquals(License.CC0_1_0.name(), hdfsRecord.getLicense()); Assert.assertEquals(Collections.singletonList("13123"), hdfsRecord.getRecordedbyid()); Assert.assertEquals(Collections.singletonList("13123"), hdfsRecord.getIdentifiedbyid()); Assert.assertEquals(OccurrenceStatus.ABSENT.name(), hdfsRecord.getOccurrencestatus()); Assert.assertEquals(Integer.valueOf(0), hdfsRecord.getIndividualcount()); Assert.assertEquals( metadataRecord.getHostingOrganizationKey(), hdfsRecord.getHostingorganizationkey()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> extendedRecordMapper() { return (hr, sr) -> { ExtendedRecord er = (ExtendedRecord) sr; er.getCoreTerms() .forEach( (k, v) -> Optional.ofNullable(TERM_FACTORY.findTerm(k)) .ifPresent( term -> { if (TermUtils.verbatimTerms().contains(term)) { Optional.ofNullable(verbatimSchemaField(term)) .ifPresent( field -> { String verbatimField = "V" + field.name().substring(2, 3).toUpperCase() + field.name().substring(3); setHdfsRecordField(hr, field, verbatimField, v); }); } if (!TermUtils.isInterpretedSourceTerm(term)) { Optional.ofNullable(interpretedSchemaField(term)) .ifPresent( field -> { if (Objects.isNull(hr.get(field.name()))) { String interpretedFieldname = field.name(); if (DcTerm.abstract_ == term) { interpretedFieldname = "abstract$"; } else if (DwcTerm.class_ == term) { interpretedFieldname = "class$"; } else if (DwcTerm.group == term) { interpretedFieldname = "group"; } else if (DwcTerm.order == term) { interpretedFieldname = "order"; } else if (DcTerm.date == term) { interpretedFieldname = "date"; } else if (DcTerm.format == term) { interpretedFieldname = "format"; } setHdfsRecordField(hr, field, interpretedFieldname, v); } }); } })); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> extendedRecordMapper() { return (hr, sr) -> { ExtendedRecord er = (ExtendedRecord) sr; er.getCoreTerms() .forEach( (k, v) -> Optional.ofNullable(TERM_FACTORY.findTerm(k)) .ifPresent( term -> { if (TermUtils.verbatimTerms().contains(term)) { Optional.ofNullable(verbatimSchemaField(term)) .ifPresent( field -> { String verbatimField = "V" + field.name().substring(2, 3).toUpperCase() + field.name().substring(3); setHdfsRecordField(hr, field, verbatimField, v); }); } if (!TermUtils.isInterpretedSourceTerm(term)) { Optional.ofNullable(interpretedSchemaField(term)) .ifPresent( field -> { if (Objects.isNull(hr.get(field.name()))) { String interpretedFieldname = field.name(); if (DcTerm.abstract_ == term) { interpretedFieldname = "abstract$"; } else if (DwcTerm.class_ == term) { interpretedFieldname = "class$"; } else if (DwcTerm.group == term) { interpretedFieldname = "group"; } else if (DwcTerm.order == term) { interpretedFieldname = "order"; } else if (DcTerm.date == term) { interpretedFieldname = "date"; } else if (DcTerm.format == term) { interpretedFieldname = "format"; } setHdfsRecordField(hr, field, interpretedFieldname, v); } }); } })); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> extendedRecordMapper() { return (hr, sr) -> { ExtendedRecord er = (ExtendedRecord) sr; er.getCoreTerms() .forEach( (k, v) -> Optional.ofNullable(TERM_FACTORY.findTerm(k)) .ifPresent( term -> { if (TermUtils.verbatimTerms().contains(term)) { Optional.ofNullable(verbatimSchemaField(term)) .ifPresent( field -> { String verbatimField = "V" + field.name().substring(2, 3).toUpperCase() + field.name().substring(3); setHdfsRecordField(hr, field, verbatimField, v); }); } if (!TermUtils.isInterpretedSourceTerm(term)) { Optional.ofNullable(interpretedSchemaField(term)) .ifPresent( field -> { if (Objects.isNull(hr.get(field.name()))) { String interpretedFieldname = field.name(); if (DcTerm.abstract_ == term) { interpretedFieldname = "abstract$"; } else if (DwcTerm.class_ == term) { interpretedFieldname = "class$"; } else if (DwcTerm.group == term) { interpretedFieldname = "group"; } else if (DwcTerm.order == term) { interpretedFieldname = "order"; } else if (DcTerm.date == term) { interpretedFieldname = "date"; } else if (DcTerm.format == term) { interpretedFieldname = "format"; } setHdfsRecordField(hr, field, interpretedFieldname, v); } }); } })); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> extendedRecordMapper() { return (hr, sr) -> { ExtendedRecord er = (ExtendedRecord) sr; er.getCoreTerms() .forEach( (k, v) -> Optional.ofNullable(TERM_FACTORY.findTerm(k)) .ifPresent( term -> { if (TermUtils.verbatimTerms().contains(term)) { Optional.ofNullable(verbatimSchemaField(term)) .ifPresent( field -> { String verbatimField = "V" + field.name().substring(2, 3).toUpperCase() + field.name().substring(3); setHdfsRecordField(hr, field, verbatimField, v); }); } if (!TermUtils.isInterpretedSourceTerm(term)) { Optional.ofNullable(interpretedSchemaField(term)) .ifPresent( field -> { if (Objects.isNull(hr.get(field.name()))) { String interpretedFieldname = field.name(); if (DcTerm.abstract_ == term) { interpretedFieldname = "abstract$"; } else if (DwcTerm.class_ == term) { interpretedFieldname = "class$"; } else if (DwcTerm.group == term) { interpretedFieldname = "group"; } else if (DwcTerm.order == term) { interpretedFieldname = "order"; } else if (DcTerm.date == term) { interpretedFieldname = "date"; } else if (DcTerm.format == term) { interpretedFieldname = "format"; } setHdfsRecordField(hr, field, interpretedFieldname, v); } }); } })); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> extendedRecordMapper() { return (hr, sr) -> { ExtendedRecord er = (ExtendedRecord) sr; er.getCoreTerms() .forEach( (k, v) -> Optional.ofNullable(TERM_FACTORY.findTerm(k)) .ifPresent( term -> { if (TermUtils.verbatimTerms().contains(term)) { Optional.ofNullable(verbatimSchemaField(term)) .ifPresent( field -> { String verbatimField = "V" + field.name().substring(2, 3).toUpperCase() + field.name().substring(3); setHdfsRecordField(hr, field, verbatimField, v); }); } if (!TermUtils.isInterpretedSourceTerm(term)) { Optional.ofNullable(interpretedSchemaField(term)) .ifPresent( field -> { if (Objects.isNull(hr.get(field.name()))) { String interpretedFieldname = field.name(); if (DcTerm.abstract_ == term) { interpretedFieldname = "abstract$"; } else if (DwcTerm.class_ == term) { interpretedFieldname = "class$"; } else if (DwcTerm.group == term) { interpretedFieldname = "group"; } else if (DwcTerm.order == term) { interpretedFieldname = "order"; } else if (DcTerm.date == term) { interpretedFieldname = "date"; } else if (DcTerm.format == term) { interpretedFieldname = "format"; } setHdfsRecordField(hr, field, interpretedFieldname, v); } }); } })); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void multimediaMapperTest() { MultimediaRecord multimediaRecord = new MultimediaRecord(); multimediaRecord.setId("1"); Multimedia multimedia = new Multimedia(); multimedia.setType(MediaType.StillImage.name()); multimedia.setLicense(License.CC_BY_4_0.name()); multimedia.setSource("image.jpg"); multimediaRecord.setMultimediaItems(Collections.singletonList(multimedia)); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(multimediaRecord); List<Multimedia> media = MediaSerDeserUtils.fromJson(hdfsRecord.getExtMultimedia()); Assert.assertEquals(media.get(0), multimedia); Assert.assertTrue(hdfsRecord.getMediatype().contains(MediaType.StillImage.name())); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> multimediaMapper() { return (hr, sr) -> { MultimediaRecord mr = (MultimediaRecord) sr; List<String> mediaTypes = mr.getMultimediaItems().stream() .filter(i -> !Strings.isNullOrEmpty(i.getType())) .map(Multimedia::getType) .map(TextNode::valueOf) .map(TextNode::asText) .collect(Collectors.toList()); hr.setExtMultimedia(MediaSerDeserUtils.toJson(mr.getMultimediaItems())); setCreatedIfGreater(hr, mr.getCreated()); hr.setMediatype(mediaTypes); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> multimediaMapper() { return (hr, sr) -> { MultimediaRecord mr = (MultimediaRecord) sr; List<String> mediaTypes = mr.getMultimediaItems().stream() .filter(i -> !Strings.isNullOrEmpty(i.getType())) .map(Multimedia::getType) .map(TextNode::valueOf) .map(TextNode::asText) .collect(Collectors.toList()); hr.setExtMultimedia(MediaSerDeserUtils.toJson(mr.getMultimediaItems())); setCreatedIfGreater(hr, mr.getCreated()); hr.setMediatype(mediaTypes); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> multimediaMapper() { return (hr, sr) -> { MultimediaRecord mr = (MultimediaRecord) sr; List<String> mediaTypes = mr.getMultimediaItems().stream() .filter(i -> !Strings.isNullOrEmpty(i.getType())) .map(Multimedia::getType) .map(TextNode::valueOf) .map(TextNode::asText) .collect(Collectors.toList()); hr.setExtMultimedia(MediaSerDeserUtils.toJson(mr.getMultimediaItems())); setCreatedIfGreater(hr, mr.getCreated()); hr.setMediatype(mediaTypes); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> multimediaMapper() { return (hr, sr) -> { MultimediaRecord mr = (MultimediaRecord) sr; List<String> mediaTypes = mr.getMultimediaItems().stream() .filter(i -> !Strings.isNullOrEmpty(i.getType())) .map(Multimedia::getType) .map(TextNode::valueOf) .map(TextNode::asText) .collect(Collectors.toList()); hr.setExtMultimedia(MediaSerDeserUtils.toJson(mr.getMultimediaItems())); setCreatedIfGreater(hr, mr.getCreated()); hr.setMediatype(mediaTypes); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> multimediaMapper() { return (hr, sr) -> { MultimediaRecord mr = (MultimediaRecord) sr; List<String> mediaTypes = mr.getMultimediaItems().stream() .filter(i -> !Strings.isNullOrEmpty(i.getType())) .map(Multimedia::getType) .map(TextNode::valueOf) .map(TextNode::asText) .collect(Collectors.toList()); hr.setExtMultimedia(MediaSerDeserUtils.toJson(mr.getMultimediaItems())); setCreatedIfGreater(hr, mr.getCreated()); hr.setMediatype(mediaTypes); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void basicRecordMapperTest() { long now = new Date().getTime(); BasicRecord basicRecord = new BasicRecord(); basicRecord.setBasisOfRecord(BasisOfRecord.HUMAN_OBSERVATION.name()); basicRecord.setSex(Sex.HERMAPHRODITE.name()); basicRecord.setIndividualCount(99); basicRecord.setLifeStage(LifeStage.GAMETE.name()); basicRecord.setTypeStatus(TypeStatus.ALLOTYPE.name()); basicRecord.setTypifiedName("noName"); basicRecord.setEstablishmentMeans(EstablishmentMeans.INVASIVE.name()); basicRecord.setCreated(now); basicRecord.setGbifId(1L); basicRecord.setOrganismQuantity(2d); basicRecord.setOrganismQuantityType("type"); basicRecord.setSampleSizeUnit("unit"); basicRecord.setSampleSizeValue(2d); basicRecord.setRelativeOrganismQuantity(2d); basicRecord.setLicense(License.UNSPECIFIED.name()); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(basicRecord); Assert.assertEquals(BasisOfRecord.HUMAN_OBSERVATION.name(), hdfsRecord.getBasisofrecord()); Assert.assertEquals(Sex.HERMAPHRODITE.name(), hdfsRecord.getSex()); Assert.assertEquals(Integer.valueOf(99), hdfsRecord.getIndividualcount()); Assert.assertEquals(LifeStage.GAMETE.name(), hdfsRecord.getLifestage()); Assert.assertEquals(TypeStatus.ALLOTYPE.name(), hdfsRecord.getTypestatus()); Assert.assertEquals("noName", hdfsRecord.getTypifiedname()); Assert.assertEquals(EstablishmentMeans.INVASIVE.name(), hdfsRecord.getEstablishmentmeans()); Assert.assertEquals(Double.valueOf(2d), hdfsRecord.getOrganismquantity()); Assert.assertEquals("type", hdfsRecord.getOrganismquantitytype()); Assert.assertEquals("unit", hdfsRecord.getSamplesizeunit()); Assert.assertEquals(Double.valueOf(2d), hdfsRecord.getSamplesizevalue()); Assert.assertEquals(Double.valueOf(2d), hdfsRecord.getRelativeorganismquantity()); Assert.assertNull(hdfsRecord.getLicense()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> basicRecordMapper() { return (hr, sr) -> { BasicRecord br = (BasicRecord) sr; if (Objects.nonNull(br.getGbifId())) { hr.setGbifid(br.getGbifId()); } hr.setBasisofrecord(br.getBasisOfRecord()); hr.setEstablishmentmeans(br.getEstablishmentMeans()); hr.setIndividualcount(br.getIndividualCount()); hr.setLifestage(br.getLifeStage()); hr.setReferences(br.getReferences()); hr.setSex(br.getSex()); hr.setTypestatus(br.getTypeStatus()); hr.setTypifiedname(br.getTypifiedName()); hr.setOrganismquantity(br.getOrganismQuantity()); hr.setOrganismquantitytype(br.getOrganismQuantityType()); hr.setSamplesizeunit(br.getSampleSizeUnit()); hr.setSamplesizevalue(br.getSampleSizeValue()); hr.setRelativeorganismquantity(br.getRelativeOrganismQuantity()); hr.setOccurrencestatus(br.getOccurrenceStatus()); Optional.ofNullable(br.getRecordedByIds()) .ifPresent( uis -> hr.setRecordedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); Optional.ofNullable(br.getIdentifiedByIds()) .ifPresent( uis -> hr.setIdentifiedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); if (br.getLicense() != null && !License.UNSUPPORTED.name().equals(br.getLicense()) && !License.UNSPECIFIED.name().equals(br.getLicense())) { hr.setLicense(br.getLicense()); } setCreatedIfGreater(hr, br.getCreated()); addIssues(br.getIssues(), hr); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> basicRecordMapper() { return (hr, sr) -> { BasicRecord br = (BasicRecord) sr; if (Objects.nonNull(br.getGbifId())) { hr.setGbifid(br.getGbifId()); } hr.setBasisofrecord(br.getBasisOfRecord()); hr.setEstablishmentmeans(br.getEstablishmentMeans()); hr.setIndividualcount(br.getIndividualCount()); hr.setLifestage(br.getLifeStage()); hr.setReferences(br.getReferences()); hr.setSex(br.getSex()); hr.setTypestatus(br.getTypeStatus()); hr.setTypifiedname(br.getTypifiedName()); hr.setOrganismquantity(br.getOrganismQuantity()); hr.setOrganismquantitytype(br.getOrganismQuantityType()); hr.setSamplesizeunit(br.getSampleSizeUnit()); hr.setSamplesizevalue(br.getSampleSizeValue()); hr.setRelativeorganismquantity(br.getRelativeOrganismQuantity()); hr.setOccurrencestatus(br.getOccurrenceStatus()); Optional.ofNullable(br.getRecordedByIds()) .ifPresent( uis -> hr.setRecordedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); Optional.ofNullable(br.getIdentifiedByIds()) .ifPresent( uis -> hr.setIdentifiedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); if (br.getLicense() != null && !License.UNSUPPORTED.name().equals(br.getLicense()) && !License.UNSPECIFIED.name().equals(br.getLicense())) { hr.setLicense(br.getLicense()); } setCreatedIfGreater(hr, br.getCreated()); addIssues(br.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> basicRecordMapper() { return (hr, sr) -> { BasicRecord br = (BasicRecord) sr; if (Objects.nonNull(br.getGbifId())) { hr.setGbifid(br.getGbifId()); } hr.setBasisofrecord(br.getBasisOfRecord()); hr.setEstablishmentmeans(br.getEstablishmentMeans()); hr.setIndividualcount(br.getIndividualCount()); hr.setLifestage(br.getLifeStage()); hr.setReferences(br.getReferences()); hr.setSex(br.getSex()); hr.setTypestatus(br.getTypeStatus()); hr.setTypifiedname(br.getTypifiedName()); hr.setOrganismquantity(br.getOrganismQuantity()); hr.setOrganismquantitytype(br.getOrganismQuantityType()); hr.setSamplesizeunit(br.getSampleSizeUnit()); hr.setSamplesizevalue(br.getSampleSizeValue()); hr.setRelativeorganismquantity(br.getRelativeOrganismQuantity()); hr.setOccurrencestatus(br.getOccurrenceStatus()); Optional.ofNullable(br.getRecordedByIds()) .ifPresent( uis -> hr.setRecordedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); Optional.ofNullable(br.getIdentifiedByIds()) .ifPresent( uis -> hr.setIdentifiedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); if (br.getLicense() != null && !License.UNSUPPORTED.name().equals(br.getLicense()) && !License.UNSPECIFIED.name().equals(br.getLicense())) { hr.setLicense(br.getLicense()); } setCreatedIfGreater(hr, br.getCreated()); addIssues(br.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> basicRecordMapper() { return (hr, sr) -> { BasicRecord br = (BasicRecord) sr; if (Objects.nonNull(br.getGbifId())) { hr.setGbifid(br.getGbifId()); } hr.setBasisofrecord(br.getBasisOfRecord()); hr.setEstablishmentmeans(br.getEstablishmentMeans()); hr.setIndividualcount(br.getIndividualCount()); hr.setLifestage(br.getLifeStage()); hr.setReferences(br.getReferences()); hr.setSex(br.getSex()); hr.setTypestatus(br.getTypeStatus()); hr.setTypifiedname(br.getTypifiedName()); hr.setOrganismquantity(br.getOrganismQuantity()); hr.setOrganismquantitytype(br.getOrganismQuantityType()); hr.setSamplesizeunit(br.getSampleSizeUnit()); hr.setSamplesizevalue(br.getSampleSizeValue()); hr.setRelativeorganismquantity(br.getRelativeOrganismQuantity()); hr.setOccurrencestatus(br.getOccurrenceStatus()); Optional.ofNullable(br.getRecordedByIds()) .ifPresent( uis -> hr.setRecordedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); Optional.ofNullable(br.getIdentifiedByIds()) .ifPresent( uis -> hr.setIdentifiedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); if (br.getLicense() != null && !License.UNSUPPORTED.name().equals(br.getLicense()) && !License.UNSPECIFIED.name().equals(br.getLicense())) { hr.setLicense(br.getLicense()); } setCreatedIfGreater(hr, br.getCreated()); addIssues(br.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> basicRecordMapper() { return (hr, sr) -> { BasicRecord br = (BasicRecord) sr; if (Objects.nonNull(br.getGbifId())) { hr.setGbifid(br.getGbifId()); } hr.setBasisofrecord(br.getBasisOfRecord()); hr.setEstablishmentmeans(br.getEstablishmentMeans()); hr.setIndividualcount(br.getIndividualCount()); hr.setLifestage(br.getLifeStage()); hr.setReferences(br.getReferences()); hr.setSex(br.getSex()); hr.setTypestatus(br.getTypeStatus()); hr.setTypifiedname(br.getTypifiedName()); hr.setOrganismquantity(br.getOrganismQuantity()); hr.setOrganismquantitytype(br.getOrganismQuantityType()); hr.setSamplesizeunit(br.getSampleSizeUnit()); hr.setSamplesizevalue(br.getSampleSizeValue()); hr.setRelativeorganismquantity(br.getRelativeOrganismQuantity()); hr.setOccurrencestatus(br.getOccurrenceStatus()); Optional.ofNullable(br.getRecordedByIds()) .ifPresent( uis -> hr.setRecordedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); Optional.ofNullable(br.getIdentifiedByIds()) .ifPresent( uis -> hr.setIdentifiedbyid( uis.stream().map(AgentIdentifier::getValue).collect(Collectors.toList()))); if (br.getLicense() != null && !License.UNSUPPORTED.name().equals(br.getLicense()) && !License.UNSPECIFIED.name().equals(br.getLicense())) { hr.setLicense(br.getLicense()); } setCreatedIfGreater(hr, br.getCreated()); addIssues(br.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void taxonMapperTest() { List<RankedName> classification = new ArrayList<>(); classification.add( RankedName.newBuilder().setKey(2).setRank(Rank.KINGDOM).setName("Archaea").build()); classification.add( RankedName.newBuilder().setKey(79).setRank(Rank.PHYLUM).setName("Crenarchaeota").build()); classification.add( RankedName.newBuilder() .setKey(8016360) .setRank(Rank.ORDER) .setName("Acidilobales") .build()); classification.add( RankedName.newBuilder().setKey(292).setRank(Rank.CLASS).setName("Thermoprotei").build()); classification.add( RankedName.newBuilder() .setKey(7785) .setRank(Rank.FAMILY) .setName("Caldisphaeraceae") .build()); classification.add( RankedName.newBuilder() .setKey(1000002) .setRank(Rank.GENUS) .setName("Caldisphaera") .build()); classification.add( RankedName.newBuilder() .setKey(1000003) .setRank(Rank.SPECIES) .setName("Caldisphaera lagunensis") .build()); ParsedName parsedName = ParsedName.newBuilder() .setType(NameType.SCIENTIFIC) .setAbbreviated(Boolean.FALSE) .setBasionymAuthorship( Authorship.newBuilder() .setYear("2003") .setAuthors(Collections.singletonList("Itoh & al.")) .setExAuthors(Collections.emptyList()) .setEmpty(Boolean.FALSE) .build()) .setAutonym(Boolean.FALSE) .setBinomial(Boolean.TRUE) .setGenus("Caldisphaera") .setSpecificEpithet("lagunensis") .setNotho(NamePart.SPECIFIC) .setState(State.COMPLETE) .build(); TaxonRecord taxonRecord = new TaxonRecord(); RankedName rankedName = RankedName.newBuilder() .setKey(2492483) .setRank(Rank.SPECIES) .setName("Caldisphaera lagunensis Itoh & al., 2003") .build(); taxonRecord.setUsage(rankedName); taxonRecord.setUsage(rankedName); taxonRecord.setAcceptedUsage(rankedName); taxonRecord.setSynonym(Boolean.FALSE); taxonRecord.setClassification(classification); taxonRecord.setUsageParsedName(parsedName); taxonRecord.setNomenclature(Nomenclature.newBuilder().setSource("nothing").build()); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(taxonRecord); Assert.assertEquals("Archaea", hdfsRecord.getKingdom()); Assert.assertEquals(Integer.valueOf(2), hdfsRecord.getKingdomkey()); Assert.assertEquals("Crenarchaeota", hdfsRecord.getPhylum()); Assert.assertEquals(Integer.valueOf(79), hdfsRecord.getPhylumkey()); Assert.assertEquals("Acidilobales", hdfsRecord.getOrder()); Assert.assertEquals(Integer.valueOf(8016360), hdfsRecord.getOrderkey()); Assert.assertEquals("Thermoprotei", hdfsRecord.getClass$()); Assert.assertEquals(Integer.valueOf(292), hdfsRecord.getClasskey()); Assert.assertEquals("Caldisphaeraceae", hdfsRecord.getFamily()); Assert.assertEquals(Integer.valueOf(7785), hdfsRecord.getFamilykey()); Assert.assertEquals("Caldisphaera", hdfsRecord.getGenus()); Assert.assertEquals(Integer.valueOf(1000002), hdfsRecord.getGenuskey()); Assert.assertEquals("Caldisphaera lagunensis", hdfsRecord.getSpecies()); Assert.assertEquals(Integer.valueOf(1000003), hdfsRecord.getSpecieskey()); Assert.assertEquals("2492483", hdfsRecord.getAcceptednameusageid()); Assert.assertEquals( "Caldisphaera lagunensis Itoh & al., 2003", hdfsRecord.getAcceptedscientificname()); Assert.assertEquals(Integer.valueOf(2492483), hdfsRecord.getAcceptedtaxonkey()); Assert.assertEquals("Caldisphaera", hdfsRecord.getGenericname()); Assert.assertEquals("lagunensis", hdfsRecord.getSpecificepithet()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> taxonMapper() { return (hr, sr) -> { TaxonRecord tr = (TaxonRecord) sr; Optional.ofNullable(tr.getUsage()).ifPresent(x -> hr.setTaxonkey(x.getKey())); if (Objects.nonNull(tr.getClassification())) { tr.getClassification() .forEach( rankedName -> { switch (rankedName.getRank()) { case KINGDOM: hr.setKingdom(rankedName.getName()); hr.setKingdomkey(rankedName.getKey()); break; case PHYLUM: hr.setPhylum(rankedName.getName()); hr.setPhylumkey(rankedName.getKey()); break; case CLASS: hr.setClass$(rankedName.getName()); hr.setClasskey(rankedName.getKey()); break; case ORDER: hr.setOrder(rankedName.getName()); hr.setOrderkey(rankedName.getKey()); break; case FAMILY: hr.setFamily(rankedName.getName()); hr.setFamilykey(rankedName.getKey()); break; case GENUS: hr.setGenus(rankedName.getName()); hr.setGenuskey(rankedName.getKey()); break; case SUBGENUS: hr.setSubgenus(rankedName.getName()); hr.setSubgenuskey(rankedName.getKey()); break; case SPECIES: hr.setSpecies(rankedName.getName()); hr.setSpecieskey(rankedName.getKey()); break; default: break; } }); } if (Objects.nonNull(tr.getAcceptedUsage())) { hr.setAcceptedscientificname(tr.getAcceptedUsage().getName()); hr.setAcceptednameusageid(tr.getAcceptedUsage().getKey().toString()); if (Objects.nonNull(tr.getAcceptedUsage().getKey())) { hr.setAcceptedtaxonkey(tr.getAcceptedUsage().getKey()); } Optional.ofNullable(tr.getAcceptedUsage().getRank()) .ifPresent(r -> hr.setTaxonrank(r.name())); } else if (Objects.nonNull(tr.getUsage()) && tr.getUsage().getKey() != 0) { hr.setAcceptedtaxonkey(tr.getUsage().getKey()); hr.setAcceptedscientificname(tr.getUsage().getName()); hr.setAcceptednameusageid(tr.getUsage().getKey().toString()); } if (Objects.nonNull(tr.getUsage())) { hr.setTaxonkey(tr.getUsage().getKey()); hr.setScientificname(tr.getUsage().getName()); Optional.ofNullable(tr.getUsage().getRank()).ifPresent(r -> hr.setTaxonrank(r.name())); } if (Objects.nonNull(tr.getUsageParsedName())) { hr.setGenericname( Objects.nonNull(tr.getUsageParsedName().getGenus()) ? tr.getUsageParsedName().getGenus() : tr.getUsageParsedName().getUninomial()); hr.setSpecificepithet(tr.getUsageParsedName().getSpecificEpithet()); hr.setInfraspecificepithet(tr.getUsageParsedName().getInfraspecificEpithet()); } Optional.ofNullable(tr.getDiagnostics()) .map(Diagnostic::getStatus) .ifPresent(d -> hr.setTaxonomicstatus(d.name())); setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> taxonMapper() { return (hr, sr) -> { TaxonRecord tr = (TaxonRecord) sr; Optional.ofNullable(tr.getUsage()).ifPresent(x -> hr.setTaxonkey(x.getKey())); if (Objects.nonNull(tr.getClassification())) { tr.getClassification() .forEach( rankedName -> { switch (rankedName.getRank()) { case KINGDOM: hr.setKingdom(rankedName.getName()); hr.setKingdomkey(rankedName.getKey()); break; case PHYLUM: hr.setPhylum(rankedName.getName()); hr.setPhylumkey(rankedName.getKey()); break; case CLASS: hr.setClass$(rankedName.getName()); hr.setClasskey(rankedName.getKey()); break; case ORDER: hr.setOrder(rankedName.getName()); hr.setOrderkey(rankedName.getKey()); break; case FAMILY: hr.setFamily(rankedName.getName()); hr.setFamilykey(rankedName.getKey()); break; case GENUS: hr.setGenus(rankedName.getName()); hr.setGenuskey(rankedName.getKey()); break; case SUBGENUS: hr.setSubgenus(rankedName.getName()); hr.setSubgenuskey(rankedName.getKey()); break; case SPECIES: hr.setSpecies(rankedName.getName()); hr.setSpecieskey(rankedName.getKey()); break; default: break; } }); } if (Objects.nonNull(tr.getAcceptedUsage())) { hr.setAcceptedscientificname(tr.getAcceptedUsage().getName()); hr.setAcceptednameusageid(tr.getAcceptedUsage().getKey().toString()); if (Objects.nonNull(tr.getAcceptedUsage().getKey())) { hr.setAcceptedtaxonkey(tr.getAcceptedUsage().getKey()); } Optional.ofNullable(tr.getAcceptedUsage().getRank()) .ifPresent(r -> hr.setTaxonrank(r.name())); } else if (Objects.nonNull(tr.getUsage()) && tr.getUsage().getKey() != 0) { hr.setAcceptedtaxonkey(tr.getUsage().getKey()); hr.setAcceptedscientificname(tr.getUsage().getName()); hr.setAcceptednameusageid(tr.getUsage().getKey().toString()); } if (Objects.nonNull(tr.getUsage())) { hr.setTaxonkey(tr.getUsage().getKey()); hr.setScientificname(tr.getUsage().getName()); Optional.ofNullable(tr.getUsage().getRank()).ifPresent(r -> hr.setTaxonrank(r.name())); } if (Objects.nonNull(tr.getUsageParsedName())) { hr.setGenericname( Objects.nonNull(tr.getUsageParsedName().getGenus()) ? tr.getUsageParsedName().getGenus() : tr.getUsageParsedName().getUninomial()); hr.setSpecificepithet(tr.getUsageParsedName().getSpecificEpithet()); hr.setInfraspecificepithet(tr.getUsageParsedName().getInfraspecificEpithet()); } Optional.ofNullable(tr.getDiagnostics()) .map(Diagnostic::getStatus) .ifPresent(d -> hr.setTaxonomicstatus(d.name())); setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> taxonMapper() { return (hr, sr) -> { TaxonRecord tr = (TaxonRecord) sr; Optional.ofNullable(tr.getUsage()).ifPresent(x -> hr.setTaxonkey(x.getKey())); if (Objects.nonNull(tr.getClassification())) { tr.getClassification() .forEach( rankedName -> { switch (rankedName.getRank()) { case KINGDOM: hr.setKingdom(rankedName.getName()); hr.setKingdomkey(rankedName.getKey()); break; case PHYLUM: hr.setPhylum(rankedName.getName()); hr.setPhylumkey(rankedName.getKey()); break; case CLASS: hr.setClass$(rankedName.getName()); hr.setClasskey(rankedName.getKey()); break; case ORDER: hr.setOrder(rankedName.getName()); hr.setOrderkey(rankedName.getKey()); break; case FAMILY: hr.setFamily(rankedName.getName()); hr.setFamilykey(rankedName.getKey()); break; case GENUS: hr.setGenus(rankedName.getName()); hr.setGenuskey(rankedName.getKey()); break; case SUBGENUS: hr.setSubgenus(rankedName.getName()); hr.setSubgenuskey(rankedName.getKey()); break; case SPECIES: hr.setSpecies(rankedName.getName()); hr.setSpecieskey(rankedName.getKey()); break; default: break; } }); } if (Objects.nonNull(tr.getAcceptedUsage())) { hr.setAcceptedscientificname(tr.getAcceptedUsage().getName()); hr.setAcceptednameusageid(tr.getAcceptedUsage().getKey().toString()); if (Objects.nonNull(tr.getAcceptedUsage().getKey())) { hr.setAcceptedtaxonkey(tr.getAcceptedUsage().getKey()); } Optional.ofNullable(tr.getAcceptedUsage().getRank()) .ifPresent(r -> hr.setTaxonrank(r.name())); } else if (Objects.nonNull(tr.getUsage()) && tr.getUsage().getKey() != 0) { hr.setAcceptedtaxonkey(tr.getUsage().getKey()); hr.setAcceptedscientificname(tr.getUsage().getName()); hr.setAcceptednameusageid(tr.getUsage().getKey().toString()); } if (Objects.nonNull(tr.getUsage())) { hr.setTaxonkey(tr.getUsage().getKey()); hr.setScientificname(tr.getUsage().getName()); Optional.ofNullable(tr.getUsage().getRank()).ifPresent(r -> hr.setTaxonrank(r.name())); } if (Objects.nonNull(tr.getUsageParsedName())) { hr.setGenericname( Objects.nonNull(tr.getUsageParsedName().getGenus()) ? tr.getUsageParsedName().getGenus() : tr.getUsageParsedName().getUninomial()); hr.setSpecificepithet(tr.getUsageParsedName().getSpecificEpithet()); hr.setInfraspecificepithet(tr.getUsageParsedName().getInfraspecificEpithet()); } Optional.ofNullable(tr.getDiagnostics()) .map(Diagnostic::getStatus) .ifPresent(d -> hr.setTaxonomicstatus(d.name())); setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> taxonMapper() { return (hr, sr) -> { TaxonRecord tr = (TaxonRecord) sr; Optional.ofNullable(tr.getUsage()).ifPresent(x -> hr.setTaxonkey(x.getKey())); if (Objects.nonNull(tr.getClassification())) { tr.getClassification() .forEach( rankedName -> { switch (rankedName.getRank()) { case KINGDOM: hr.setKingdom(rankedName.getName()); hr.setKingdomkey(rankedName.getKey()); break; case PHYLUM: hr.setPhylum(rankedName.getName()); hr.setPhylumkey(rankedName.getKey()); break; case CLASS: hr.setClass$(rankedName.getName()); hr.setClasskey(rankedName.getKey()); break; case ORDER: hr.setOrder(rankedName.getName()); hr.setOrderkey(rankedName.getKey()); break; case FAMILY: hr.setFamily(rankedName.getName()); hr.setFamilykey(rankedName.getKey()); break; case GENUS: hr.setGenus(rankedName.getName()); hr.setGenuskey(rankedName.getKey()); break; case SUBGENUS: hr.setSubgenus(rankedName.getName()); hr.setSubgenuskey(rankedName.getKey()); break; case SPECIES: hr.setSpecies(rankedName.getName()); hr.setSpecieskey(rankedName.getKey()); break; default: break; } }); } if (Objects.nonNull(tr.getAcceptedUsage())) { hr.setAcceptedscientificname(tr.getAcceptedUsage().getName()); hr.setAcceptednameusageid(tr.getAcceptedUsage().getKey().toString()); if (Objects.nonNull(tr.getAcceptedUsage().getKey())) { hr.setAcceptedtaxonkey(tr.getAcceptedUsage().getKey()); } Optional.ofNullable(tr.getAcceptedUsage().getRank()) .ifPresent(r -> hr.setTaxonrank(r.name())); } else if (Objects.nonNull(tr.getUsage()) && tr.getUsage().getKey() != 0) { hr.setAcceptedtaxonkey(tr.getUsage().getKey()); hr.setAcceptedscientificname(tr.getUsage().getName()); hr.setAcceptednameusageid(tr.getUsage().getKey().toString()); } if (Objects.nonNull(tr.getUsage())) { hr.setTaxonkey(tr.getUsage().getKey()); hr.setScientificname(tr.getUsage().getName()); Optional.ofNullable(tr.getUsage().getRank()).ifPresent(r -> hr.setTaxonrank(r.name())); } if (Objects.nonNull(tr.getUsageParsedName())) { hr.setGenericname( Objects.nonNull(tr.getUsageParsedName().getGenus()) ? tr.getUsageParsedName().getGenus() : tr.getUsageParsedName().getUninomial()); hr.setSpecificepithet(tr.getUsageParsedName().getSpecificEpithet()); hr.setInfraspecificepithet(tr.getUsageParsedName().getInfraspecificEpithet()); } Optional.ofNullable(tr.getDiagnostics()) .map(Diagnostic::getStatus) .ifPresent(d -> hr.setTaxonomicstatus(d.name())); setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> taxonMapper() { return (hr, sr) -> { TaxonRecord tr = (TaxonRecord) sr; Optional.ofNullable(tr.getUsage()).ifPresent(x -> hr.setTaxonkey(x.getKey())); if (Objects.nonNull(tr.getClassification())) { tr.getClassification() .forEach( rankedName -> { switch (rankedName.getRank()) { case KINGDOM: hr.setKingdom(rankedName.getName()); hr.setKingdomkey(rankedName.getKey()); break; case PHYLUM: hr.setPhylum(rankedName.getName()); hr.setPhylumkey(rankedName.getKey()); break; case CLASS: hr.setClass$(rankedName.getName()); hr.setClasskey(rankedName.getKey()); break; case ORDER: hr.setOrder(rankedName.getName()); hr.setOrderkey(rankedName.getKey()); break; case FAMILY: hr.setFamily(rankedName.getName()); hr.setFamilykey(rankedName.getKey()); break; case GENUS: hr.setGenus(rankedName.getName()); hr.setGenuskey(rankedName.getKey()); break; case SUBGENUS: hr.setSubgenus(rankedName.getName()); hr.setSubgenuskey(rankedName.getKey()); break; case SPECIES: hr.setSpecies(rankedName.getName()); hr.setSpecieskey(rankedName.getKey()); break; default: break; } }); } if (Objects.nonNull(tr.getAcceptedUsage())) { hr.setAcceptedscientificname(tr.getAcceptedUsage().getName()); hr.setAcceptednameusageid(tr.getAcceptedUsage().getKey().toString()); if (Objects.nonNull(tr.getAcceptedUsage().getKey())) { hr.setAcceptedtaxonkey(tr.getAcceptedUsage().getKey()); } Optional.ofNullable(tr.getAcceptedUsage().getRank()) .ifPresent(r -> hr.setTaxonrank(r.name())); } else if (Objects.nonNull(tr.getUsage()) && tr.getUsage().getKey() != 0) { hr.setAcceptedtaxonkey(tr.getUsage().getKey()); hr.setAcceptedscientificname(tr.getUsage().getName()); hr.setAcceptednameusageid(tr.getUsage().getKey().toString()); } if (Objects.nonNull(tr.getUsage())) { hr.setTaxonkey(tr.getUsage().getKey()); hr.setScientificname(tr.getUsage().getName()); Optional.ofNullable(tr.getUsage().getRank()).ifPresent(r -> hr.setTaxonrank(r.name())); } if (Objects.nonNull(tr.getUsageParsedName())) { hr.setGenericname( Objects.nonNull(tr.getUsageParsedName().getGenus()) ? tr.getUsageParsedName().getGenus() : tr.getUsageParsedName().getUninomial()); hr.setSpecificepithet(tr.getUsageParsedName().getSpecificEpithet()); hr.setInfraspecificepithet(tr.getUsageParsedName().getInfraspecificEpithet()); } Optional.ofNullable(tr.getDiagnostics()) .map(Diagnostic::getStatus) .ifPresent(d -> hr.setTaxonomicstatus(d.name())); setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void temporalMapperTest() { String rawEventDate = "2019-01-01"; Long eventDate = LocalDate.of(2019, 1, 1).atStartOfDay().toInstant(ZoneOffset.UTC).toEpochMilli(); TemporalRecord temporalRecord = TemporalRecord.newBuilder() .setId("1") .setDay(1) .setYear(2019) .setMonth(1) .setStartDayOfYear(1) .setEventDate(EventDate.newBuilder().setLte(rawEventDate).build()) .setDateIdentified(rawEventDate) .setModified(rawEventDate) .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(temporalRecord); Assert.assertEquals(Integer.valueOf(1), hdfsRecord.getDay()); Assert.assertEquals(Integer.valueOf(1), hdfsRecord.getMonth()); Assert.assertEquals(Integer.valueOf(2019), hdfsRecord.getYear()); Assert.assertEquals("1", hdfsRecord.getStartdayofyear()); Assert.assertEquals(eventDate, hdfsRecord.getEventdate()); Assert.assertEquals(eventDate, hdfsRecord.getDateidentified()); Assert.assertEquals(eventDate, hdfsRecord.getModified()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> temporalMapper() { return (hr, sr) -> { TemporalRecord tr = (TemporalRecord) sr; Optional.ofNullable(tr.getDateIdentified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setDateidentified(date.getTime())); Optional.ofNullable(tr.getModified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setModified(date.getTime())); hr.setDay(tr.getDay()); hr.setMonth(tr.getMonth()); hr.setYear(tr.getYear()); if (Objects.nonNull(tr.getStartDayOfYear())) { hr.setStartdayofyear(tr.getStartDayOfYear().toString()); } else { hr.setStartdayofyear(null); } if (Objects.nonNull(tr.getEndDayOfYear())) { hr.setEnddayofyear(tr.getEndDayOfYear().toString()); } else { hr.setEnddayofyear(null); } if (tr.getEventDate() != null && tr.getEventDate().getGte() != null) { Optional.ofNullable(tr.getEventDate().getGte()) .map(STRING_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } else { TemporalUtils.getTemporal(tr.getYear(), tr.getMonth(), tr.getDay()) .map(TEMPORAL_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> temporalMapper() { return (hr, sr) -> { TemporalRecord tr = (TemporalRecord) sr; Optional.ofNullable(tr.getDateIdentified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setDateidentified(date.getTime())); Optional.ofNullable(tr.getModified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setModified(date.getTime())); hr.setDay(tr.getDay()); hr.setMonth(tr.getMonth()); hr.setYear(tr.getYear()); if (Objects.nonNull(tr.getStartDayOfYear())) { hr.setStartdayofyear(tr.getStartDayOfYear().toString()); } else { hr.setStartdayofyear(null); } if (Objects.nonNull(tr.getEndDayOfYear())) { hr.setEnddayofyear(tr.getEndDayOfYear().toString()); } else { hr.setEnddayofyear(null); } if (tr.getEventDate() != null && tr.getEventDate().getGte() != null) { Optional.ofNullable(tr.getEventDate().getGte()) .map(STRING_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } else { TemporalUtils.getTemporal(tr.getYear(), tr.getMonth(), tr.getDay()) .map(TEMPORAL_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> temporalMapper() { return (hr, sr) -> { TemporalRecord tr = (TemporalRecord) sr; Optional.ofNullable(tr.getDateIdentified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setDateidentified(date.getTime())); Optional.ofNullable(tr.getModified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setModified(date.getTime())); hr.setDay(tr.getDay()); hr.setMonth(tr.getMonth()); hr.setYear(tr.getYear()); if (Objects.nonNull(tr.getStartDayOfYear())) { hr.setStartdayofyear(tr.getStartDayOfYear().toString()); } else { hr.setStartdayofyear(null); } if (Objects.nonNull(tr.getEndDayOfYear())) { hr.setEnddayofyear(tr.getEndDayOfYear().toString()); } else { hr.setEnddayofyear(null); } if (tr.getEventDate() != null && tr.getEventDate().getGte() != null) { Optional.ofNullable(tr.getEventDate().getGte()) .map(STRING_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } else { TemporalUtils.getTemporal(tr.getYear(), tr.getMonth(), tr.getDay()) .map(TEMPORAL_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> temporalMapper() { return (hr, sr) -> { TemporalRecord tr = (TemporalRecord) sr; Optional.ofNullable(tr.getDateIdentified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setDateidentified(date.getTime())); Optional.ofNullable(tr.getModified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setModified(date.getTime())); hr.setDay(tr.getDay()); hr.setMonth(tr.getMonth()); hr.setYear(tr.getYear()); if (Objects.nonNull(tr.getStartDayOfYear())) { hr.setStartdayofyear(tr.getStartDayOfYear().toString()); } else { hr.setStartdayofyear(null); } if (Objects.nonNull(tr.getEndDayOfYear())) { hr.setEnddayofyear(tr.getEndDayOfYear().toString()); } else { hr.setEnddayofyear(null); } if (tr.getEventDate() != null && tr.getEventDate().getGte() != null) { Optional.ofNullable(tr.getEventDate().getGte()) .map(STRING_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } else { TemporalUtils.getTemporal(tr.getYear(), tr.getMonth(), tr.getDay()) .map(TEMPORAL_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> temporalMapper() { return (hr, sr) -> { TemporalRecord tr = (TemporalRecord) sr; Optional.ofNullable(tr.getDateIdentified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setDateidentified(date.getTime())); Optional.ofNullable(tr.getModified()) .map(STRING_TO_DATE) .ifPresent(date -> hr.setModified(date.getTime())); hr.setDay(tr.getDay()); hr.setMonth(tr.getMonth()); hr.setYear(tr.getYear()); if (Objects.nonNull(tr.getStartDayOfYear())) { hr.setStartdayofyear(tr.getStartDayOfYear().toString()); } else { hr.setStartdayofyear(null); } if (Objects.nonNull(tr.getEndDayOfYear())) { hr.setEnddayofyear(tr.getEndDayOfYear().toString()); } else { hr.setEnddayofyear(null); } if (tr.getEventDate() != null && tr.getEventDate().getGte() != null) { Optional.ofNullable(tr.getEventDate().getGte()) .map(STRING_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } else { TemporalUtils.getTemporal(tr.getYear(), tr.getMonth(), tr.getDay()) .map(TEMPORAL_TO_DATE) .ifPresent(eventDate -> hr.setEventdate(eventDate.getTime())); } setCreatedIfGreater(hr, tr.getCreated()); addIssues(tr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void metadataMapperTest() { String datasetKey = UUID.randomUUID().toString(); String nodeKey = UUID.randomUUID().toString(); String installationKey = UUID.randomUUID().toString(); String organizationKey = UUID.randomUUID().toString(); List<String> networkKey = Collections.singletonList(UUID.randomUUID().toString()); MetadataRecord metadataRecord = MetadataRecord.newBuilder() .setId("1") .setDatasetKey(datasetKey) .setCrawlId(1) .setDatasetPublishingCountry(Country.COSTA_RICA.getIso2LetterCode()) .setLicense(License.CC_BY_4_0.name()) .setNetworkKeys(networkKey) .setDatasetTitle("TestDataset") .setEndorsingNodeKey(nodeKey) .setInstallationKey(installationKey) .setLastCrawled(new Date().getTime()) .setProtocol(EndpointType.DWC_ARCHIVE.name()) .setPublisherTitle("Pub") .setPublishingOrganizationKey(organizationKey) .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(metadataRecord); Assert.assertEquals(datasetKey, hdfsRecord.getDatasetkey()); Assert.assertEquals(networkKey, hdfsRecord.getNetworkkey()); Assert.assertEquals(installationKey, hdfsRecord.getInstallationkey()); Assert.assertEquals(organizationKey, hdfsRecord.getPublishingorgkey()); Assert.assertEquals(License.CC_BY_4_0.name(), hdfsRecord.getLicense()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> metadataMapper() { return (hr, sr) -> { MetadataRecord mr = (MetadataRecord) sr; hr.setCrawlid(mr.getCrawlId()); hr.setDatasetkey(mr.getDatasetKey()); hr.setDatasetname(mr.getDatasetTitle()); hr.setInstallationkey(mr.getInstallationKey()); hr.setProtocol(mr.getProtocol()); hr.setNetworkkey(mr.getNetworkKeys()); hr.setPublisher(mr.getPublisherTitle()); hr.setPublishingorgkey(mr.getPublishingOrganizationKey()); hr.setLastcrawled(mr.getLastCrawled()); hr.setProjectid(mr.getProjectId()); hr.setProgrammeacronym(mr.getProgrammeAcronym()); hr.setHostingorganizationkey(mr.getHostingOrganizationKey()); if (hr.getLicense() == null) { hr.setLicense(mr.getLicense()); } setCreatedIfGreater(hr, mr.getCreated()); addIssues(mr.getIssues(), hr); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> metadataMapper() { return (hr, sr) -> { MetadataRecord mr = (MetadataRecord) sr; hr.setCrawlid(mr.getCrawlId()); hr.setDatasetkey(mr.getDatasetKey()); hr.setDatasetname(mr.getDatasetTitle()); hr.setInstallationkey(mr.getInstallationKey()); hr.setProtocol(mr.getProtocol()); hr.setNetworkkey(mr.getNetworkKeys()); hr.setPublisher(mr.getPublisherTitle()); hr.setPublishingorgkey(mr.getPublishingOrganizationKey()); hr.setLastcrawled(mr.getLastCrawled()); hr.setProjectid(mr.getProjectId()); hr.setProgrammeacronym(mr.getProgrammeAcronym()); hr.setHostingorganizationkey(mr.getHostingOrganizationKey()); if (hr.getLicense() == null) { hr.setLicense(mr.getLicense()); } setCreatedIfGreater(hr, mr.getCreated()); addIssues(mr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> metadataMapper() { return (hr, sr) -> { MetadataRecord mr = (MetadataRecord) sr; hr.setCrawlid(mr.getCrawlId()); hr.setDatasetkey(mr.getDatasetKey()); hr.setDatasetname(mr.getDatasetTitle()); hr.setInstallationkey(mr.getInstallationKey()); hr.setProtocol(mr.getProtocol()); hr.setNetworkkey(mr.getNetworkKeys()); hr.setPublisher(mr.getPublisherTitle()); hr.setPublishingorgkey(mr.getPublishingOrganizationKey()); hr.setLastcrawled(mr.getLastCrawled()); hr.setProjectid(mr.getProjectId()); hr.setProgrammeacronym(mr.getProgrammeAcronym()); hr.setHostingorganizationkey(mr.getHostingOrganizationKey()); if (hr.getLicense() == null) { hr.setLicense(mr.getLicense()); } setCreatedIfGreater(hr, mr.getCreated()); addIssues(mr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> metadataMapper() { return (hr, sr) -> { MetadataRecord mr = (MetadataRecord) sr; hr.setCrawlid(mr.getCrawlId()); hr.setDatasetkey(mr.getDatasetKey()); hr.setDatasetname(mr.getDatasetTitle()); hr.setInstallationkey(mr.getInstallationKey()); hr.setProtocol(mr.getProtocol()); hr.setNetworkkey(mr.getNetworkKeys()); hr.setPublisher(mr.getPublisherTitle()); hr.setPublishingorgkey(mr.getPublishingOrganizationKey()); hr.setLastcrawled(mr.getLastCrawled()); hr.setProjectid(mr.getProjectId()); hr.setProgrammeacronym(mr.getProgrammeAcronym()); hr.setHostingorganizationkey(mr.getHostingOrganizationKey()); if (hr.getLicense() == null) { hr.setLicense(mr.getLicense()); } setCreatedIfGreater(hr, mr.getCreated()); addIssues(mr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> metadataMapper() { return (hr, sr) -> { MetadataRecord mr = (MetadataRecord) sr; hr.setCrawlid(mr.getCrawlId()); hr.setDatasetkey(mr.getDatasetKey()); hr.setDatasetname(mr.getDatasetTitle()); hr.setInstallationkey(mr.getInstallationKey()); hr.setProtocol(mr.getProtocol()); hr.setNetworkkey(mr.getNetworkKeys()); hr.setPublisher(mr.getPublisherTitle()); hr.setPublishingorgkey(mr.getPublishingOrganizationKey()); hr.setLastcrawled(mr.getLastCrawled()); hr.setProjectid(mr.getProjectId()); hr.setProgrammeacronym(mr.getProgrammeAcronym()); hr.setHostingorganizationkey(mr.getHostingOrganizationKey()); if (hr.getLicense() == null) { hr.setLicense(mr.getLicense()); } setCreatedIfGreater(hr, mr.getCreated()); addIssues(mr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void locationMapperTest() { LocationRecord locationRecord = LocationRecord.newBuilder() .setId("1") .setCountry(Country.COSTA_RICA.name()) .setCountryCode(Country.COSTA_RICA.getIso2LetterCode()) .setDecimalLatitude(9.934739) .setDecimalLongitude(-84.087502) .setContinent(Continent.NORTH_AMERICA.name()) .setHasCoordinate(Boolean.TRUE) .setCoordinatePrecision(0.1) .setCoordinateUncertaintyInMeters(1.0) .setDepth(5.0) .setDepthAccuracy(0.1) .setElevation(0.0) .setElevationAccuracy(0.1) .setHasGeospatialIssue(Boolean.FALSE) .setRepatriated(Boolean.TRUE) .setStateProvince("Limon") .setWaterBody("Atlantic") .setMaximumDepthInMeters(0.1) .setMinimumDepthInMeters(0.1) .setMaximumDistanceAboveSurfaceInMeters(0.1) .setMaximumElevationInMeters(0.1) .setMinimumElevationInMeters(0.1) .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(locationRecord); Assert.assertEquals(Country.COSTA_RICA.getIso2LetterCode(), hdfsRecord.getCountrycode()); Assert.assertEquals(Double.valueOf(9.934739d), hdfsRecord.getDecimallatitude()); Assert.assertEquals(Double.valueOf(-84.087502d), hdfsRecord.getDecimallongitude()); Assert.assertEquals(Continent.NORTH_AMERICA.name(), hdfsRecord.getContinent()); Assert.assertEquals(Boolean.TRUE, hdfsRecord.getHascoordinate()); Assert.assertEquals(Double.valueOf(0.1d), hdfsRecord.getCoordinateprecision()); Assert.assertEquals(Double.valueOf(1.0d), hdfsRecord.getCoordinateuncertaintyinmeters()); Assert.assertEquals(Double.valueOf(5.0d), hdfsRecord.getDepth()); Assert.assertEquals(Double.valueOf(0.1d), hdfsRecord.getDepthaccuracy()); Assert.assertEquals(Double.valueOf(0.0d), hdfsRecord.getElevation()); Assert.assertEquals(Double.valueOf(0.1d), hdfsRecord.getElevationaccuracy()); Assert.assertEquals(Boolean.FALSE, hdfsRecord.getHasgeospatialissues()); Assert.assertEquals(Boolean.TRUE, hdfsRecord.getRepatriated()); Assert.assertEquals("Limon", hdfsRecord.getStateprovince()); Assert.assertEquals("Atlantic", hdfsRecord.getWaterbody()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> locationMapper() { return (hr, sr) -> { LocationRecord lr = (LocationRecord) sr; hr.setCountrycode(lr.getCountryCode()); hr.setContinent(lr.getContinent()); hr.setDecimallatitude(lr.getDecimalLatitude()); hr.setDecimallongitude(lr.getDecimalLongitude()); hr.setCoordinateprecision(lr.getCoordinatePrecision()); hr.setCoordinateuncertaintyinmeters(lr.getCoordinateUncertaintyInMeters()); hr.setDepth(lr.getDepth()); hr.setDepthaccuracy(lr.getDepthAccuracy()); hr.setElevation(lr.getElevation()); hr.setElevationaccuracy(lr.getElevationAccuracy()); if (Objects.nonNull(lr.getMaximumDistanceAboveSurfaceInMeters())) { hr.setMaximumdistanceabovesurfaceinmeters( lr.getMaximumDistanceAboveSurfaceInMeters().toString()); } if (Objects.nonNull(lr.getMinimumDistanceAboveSurfaceInMeters())) { hr.setMinimumdistanceabovesurfaceinmeters( lr.getMinimumDistanceAboveSurfaceInMeters().toString()); } hr.setStateprovince(lr.getStateProvince()); hr.setWaterbody(lr.getWaterBody()); hr.setHascoordinate(lr.getHasCoordinate()); hr.setHasgeospatialissues(lr.getHasGeospatialIssue()); hr.setRepatriated(lr.getRepatriated()); hr.setLocality(lr.getLocality()); hr.setPublishingcountry(lr.getPublishingCountry()); Optional.ofNullable(lr.getGadm()) .ifPresent( g -> { hr.setLevel0gid(g.getLevel0Gid()); hr.setLevel1gid(g.getLevel1Gid()); hr.setLevel2gid(g.getLevel2Gid()); hr.setLevel3gid(g.getLevel3Gid()); hr.setLevel0name(g.getLevel0Name()); hr.setLevel1name(g.getLevel1Name()); hr.setLevel2name(g.getLevel2Name()); hr.setLevel3name(g.getLevel3Name()); }); setCreatedIfGreater(hr, lr.getCreated()); addIssues(lr.getIssues(), hr); }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> locationMapper() { return (hr, sr) -> { LocationRecord lr = (LocationRecord) sr; hr.setCountrycode(lr.getCountryCode()); hr.setContinent(lr.getContinent()); hr.setDecimallatitude(lr.getDecimalLatitude()); hr.setDecimallongitude(lr.getDecimalLongitude()); hr.setCoordinateprecision(lr.getCoordinatePrecision()); hr.setCoordinateuncertaintyinmeters(lr.getCoordinateUncertaintyInMeters()); hr.setDepth(lr.getDepth()); hr.setDepthaccuracy(lr.getDepthAccuracy()); hr.setElevation(lr.getElevation()); hr.setElevationaccuracy(lr.getElevationAccuracy()); if (Objects.nonNull(lr.getMaximumDistanceAboveSurfaceInMeters())) { hr.setMaximumdistanceabovesurfaceinmeters( lr.getMaximumDistanceAboveSurfaceInMeters().toString()); } if (Objects.nonNull(lr.getMinimumDistanceAboveSurfaceInMeters())) { hr.setMinimumdistanceabovesurfaceinmeters( lr.getMinimumDistanceAboveSurfaceInMeters().toString()); } hr.setStateprovince(lr.getStateProvince()); hr.setWaterbody(lr.getWaterBody()); hr.setHascoordinate(lr.getHasCoordinate()); hr.setHasgeospatialissues(lr.getHasGeospatialIssue()); hr.setRepatriated(lr.getRepatriated()); hr.setLocality(lr.getLocality()); hr.setPublishingcountry(lr.getPublishingCountry()); Optional.ofNullable(lr.getGadm()) .ifPresent( g -> { hr.setLevel0gid(g.getLevel0Gid()); hr.setLevel1gid(g.getLevel1Gid()); hr.setLevel2gid(g.getLevel2Gid()); hr.setLevel3gid(g.getLevel3Gid()); hr.setLevel0name(g.getLevel0Name()); hr.setLevel1name(g.getLevel1Name()); hr.setLevel2name(g.getLevel2Name()); hr.setLevel3name(g.getLevel3Name()); }); setCreatedIfGreater(hr, lr.getCreated()); addIssues(lr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> locationMapper() { return (hr, sr) -> { LocationRecord lr = (LocationRecord) sr; hr.setCountrycode(lr.getCountryCode()); hr.setContinent(lr.getContinent()); hr.setDecimallatitude(lr.getDecimalLatitude()); hr.setDecimallongitude(lr.getDecimalLongitude()); hr.setCoordinateprecision(lr.getCoordinatePrecision()); hr.setCoordinateuncertaintyinmeters(lr.getCoordinateUncertaintyInMeters()); hr.setDepth(lr.getDepth()); hr.setDepthaccuracy(lr.getDepthAccuracy()); hr.setElevation(lr.getElevation()); hr.setElevationaccuracy(lr.getElevationAccuracy()); if (Objects.nonNull(lr.getMaximumDistanceAboveSurfaceInMeters())) { hr.setMaximumdistanceabovesurfaceinmeters( lr.getMaximumDistanceAboveSurfaceInMeters().toString()); } if (Objects.nonNull(lr.getMinimumDistanceAboveSurfaceInMeters())) { hr.setMinimumdistanceabovesurfaceinmeters( lr.getMinimumDistanceAboveSurfaceInMeters().toString()); } hr.setStateprovince(lr.getStateProvince()); hr.setWaterbody(lr.getWaterBody()); hr.setHascoordinate(lr.getHasCoordinate()); hr.setHasgeospatialissues(lr.getHasGeospatialIssue()); hr.setRepatriated(lr.getRepatriated()); hr.setLocality(lr.getLocality()); hr.setPublishingcountry(lr.getPublishingCountry()); Optional.ofNullable(lr.getGadm()) .ifPresent( g -> { hr.setLevel0gid(g.getLevel0Gid()); hr.setLevel1gid(g.getLevel1Gid()); hr.setLevel2gid(g.getLevel2Gid()); hr.setLevel3gid(g.getLevel3Gid()); hr.setLevel0name(g.getLevel0Name()); hr.setLevel1name(g.getLevel1Name()); hr.setLevel2name(g.getLevel2Name()); hr.setLevel3name(g.getLevel3Name()); }); setCreatedIfGreater(hr, lr.getCreated()); addIssues(lr.getIssues(), hr); }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> locationMapper() { return (hr, sr) -> { LocationRecord lr = (LocationRecord) sr; hr.setCountrycode(lr.getCountryCode()); hr.setContinent(lr.getContinent()); hr.setDecimallatitude(lr.getDecimalLatitude()); hr.setDecimallongitude(lr.getDecimalLongitude()); hr.setCoordinateprecision(lr.getCoordinatePrecision()); hr.setCoordinateuncertaintyinmeters(lr.getCoordinateUncertaintyInMeters()); hr.setDepth(lr.getDepth()); hr.setDepthaccuracy(lr.getDepthAccuracy()); hr.setElevation(lr.getElevation()); hr.setElevationaccuracy(lr.getElevationAccuracy()); if (Objects.nonNull(lr.getMaximumDistanceAboveSurfaceInMeters())) { hr.setMaximumdistanceabovesurfaceinmeters( lr.getMaximumDistanceAboveSurfaceInMeters().toString()); } if (Objects.nonNull(lr.getMinimumDistanceAboveSurfaceInMeters())) { hr.setMinimumdistanceabovesurfaceinmeters( lr.getMinimumDistanceAboveSurfaceInMeters().toString()); } hr.setStateprovince(lr.getStateProvince()); hr.setWaterbody(lr.getWaterBody()); hr.setHascoordinate(lr.getHasCoordinate()); hr.setHasgeospatialissues(lr.getHasGeospatialIssue()); hr.setRepatriated(lr.getRepatriated()); hr.setLocality(lr.getLocality()); hr.setPublishingcountry(lr.getPublishingCountry()); Optional.ofNullable(lr.getGadm()) .ifPresent( g -> { hr.setLevel0gid(g.getLevel0Gid()); hr.setLevel1gid(g.getLevel1Gid()); hr.setLevel2gid(g.getLevel2Gid()); hr.setLevel3gid(g.getLevel3Gid()); hr.setLevel0name(g.getLevel0Name()); hr.setLevel1name(g.getLevel1Name()); hr.setLevel2name(g.getLevel2Name()); hr.setLevel3name(g.getLevel3Name()); }); setCreatedIfGreater(hr, lr.getCreated()); addIssues(lr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> locationMapper() { return (hr, sr) -> { LocationRecord lr = (LocationRecord) sr; hr.setCountrycode(lr.getCountryCode()); hr.setContinent(lr.getContinent()); hr.setDecimallatitude(lr.getDecimalLatitude()); hr.setDecimallongitude(lr.getDecimalLongitude()); hr.setCoordinateprecision(lr.getCoordinatePrecision()); hr.setCoordinateuncertaintyinmeters(lr.getCoordinateUncertaintyInMeters()); hr.setDepth(lr.getDepth()); hr.setDepthaccuracy(lr.getDepthAccuracy()); hr.setElevation(lr.getElevation()); hr.setElevationaccuracy(lr.getElevationAccuracy()); if (Objects.nonNull(lr.getMaximumDistanceAboveSurfaceInMeters())) { hr.setMaximumdistanceabovesurfaceinmeters( lr.getMaximumDistanceAboveSurfaceInMeters().toString()); } if (Objects.nonNull(lr.getMinimumDistanceAboveSurfaceInMeters())) { hr.setMinimumdistanceabovesurfaceinmeters( lr.getMinimumDistanceAboveSurfaceInMeters().toString()); } hr.setStateprovince(lr.getStateProvince()); hr.setWaterbody(lr.getWaterBody()); hr.setHascoordinate(lr.getHasCoordinate()); hr.setHasgeospatialissues(lr.getHasGeospatialIssue()); hr.setRepatriated(lr.getRepatriated()); hr.setLocality(lr.getLocality()); hr.setPublishingcountry(lr.getPublishingCountry()); Optional.ofNullable(lr.getGadm()) .ifPresent( g -> { hr.setLevel0gid(g.getLevel0Gid()); hr.setLevel1gid(g.getLevel1Gid()); hr.setLevel2gid(g.getLevel2Gid()); hr.setLevel3gid(g.getLevel3Gid()); hr.setLevel0name(g.getLevel0Name()); hr.setLevel1name(g.getLevel1Name()); hr.setLevel2name(g.getLevel2Name()); hr.setLevel3name(g.getLevel3Name()); }); setCreatedIfGreater(hr, lr.getCreated()); addIssues(lr.getIssues(), hr); }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void issueMappingTest() { String[] issues = { OccurrenceIssue.IDENTIFIED_DATE_INVALID.name(), OccurrenceIssue.MODIFIED_DATE_INVALID.name(), OccurrenceIssue.RECORDED_DATE_UNLIKELY.name() }; TemporalRecord temporalRecord = TemporalRecord.newBuilder() .setId("1") .setDay(1) .setYear(2019) .setMonth(1) .setStartDayOfYear(1) .setIssues(IssueRecord.newBuilder().setIssueList(Arrays.asList(issues)).build()) .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(temporalRecord); Assert.assertArrayEquals(issues, hdfsRecord.getIssue().toArray(new String[issues.length])); }
public static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records) { OccurrenceHdfsRecord occurrenceHdfsRecord = new OccurrenceHdfsRecord(); occurrenceHdfsRecord.setIssue(new ArrayList<>()); for (SpecificRecordBase record : records) { Optional.ofNullable(converters.get(record.getClass())) .ifPresent(consumer -> consumer.accept(occurrenceHdfsRecord, record)); } Optional<SpecificRecordBase> erOpt = Arrays.stream(records).filter(x -> x instanceof ExtendedRecord).findFirst(); Optional<SpecificRecordBase> brOpt = Arrays.stream(records).filter(x -> x instanceof BasicRecord).findFirst(); if (erOpt.isPresent() && brOpt.isPresent()) { setIdentifier((BasicRecord) brOpt.get(), (ExtendedRecord) erOpt.get(), occurrenceHdfsRecord); } return occurrenceHdfsRecord; }
OccurrenceHdfsRecordConverter { public static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records) { OccurrenceHdfsRecord occurrenceHdfsRecord = new OccurrenceHdfsRecord(); occurrenceHdfsRecord.setIssue(new ArrayList<>()); for (SpecificRecordBase record : records) { Optional.ofNullable(converters.get(record.getClass())) .ifPresent(consumer -> consumer.accept(occurrenceHdfsRecord, record)); } Optional<SpecificRecordBase> erOpt = Arrays.stream(records).filter(x -> x instanceof ExtendedRecord).findFirst(); Optional<SpecificRecordBase> brOpt = Arrays.stream(records).filter(x -> x instanceof BasicRecord).findFirst(); if (erOpt.isPresent() && brOpt.isPresent()) { setIdentifier((BasicRecord) brOpt.get(), (ExtendedRecord) erOpt.get(), occurrenceHdfsRecord); } return occurrenceHdfsRecord; } }
OccurrenceHdfsRecordConverter { public static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records) { OccurrenceHdfsRecord occurrenceHdfsRecord = new OccurrenceHdfsRecord(); occurrenceHdfsRecord.setIssue(new ArrayList<>()); for (SpecificRecordBase record : records) { Optional.ofNullable(converters.get(record.getClass())) .ifPresent(consumer -> consumer.accept(occurrenceHdfsRecord, record)); } Optional<SpecificRecordBase> erOpt = Arrays.stream(records).filter(x -> x instanceof ExtendedRecord).findFirst(); Optional<SpecificRecordBase> brOpt = Arrays.stream(records).filter(x -> x instanceof BasicRecord).findFirst(); if (erOpt.isPresent() && brOpt.isPresent()) { setIdentifier((BasicRecord) brOpt.get(), (ExtendedRecord) erOpt.get(), occurrenceHdfsRecord); } return occurrenceHdfsRecord; } }
OccurrenceHdfsRecordConverter { public static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records) { OccurrenceHdfsRecord occurrenceHdfsRecord = new OccurrenceHdfsRecord(); occurrenceHdfsRecord.setIssue(new ArrayList<>()); for (SpecificRecordBase record : records) { Optional.ofNullable(converters.get(record.getClass())) .ifPresent(consumer -> consumer.accept(occurrenceHdfsRecord, record)); } Optional<SpecificRecordBase> erOpt = Arrays.stream(records).filter(x -> x instanceof ExtendedRecord).findFirst(); Optional<SpecificRecordBase> brOpt = Arrays.stream(records).filter(x -> x instanceof BasicRecord).findFirst(); if (erOpt.isPresent() && brOpt.isPresent()) { setIdentifier((BasicRecord) brOpt.get(), (ExtendedRecord) erOpt.get(), occurrenceHdfsRecord); } return occurrenceHdfsRecord; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { public static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records) { OccurrenceHdfsRecord occurrenceHdfsRecord = new OccurrenceHdfsRecord(); occurrenceHdfsRecord.setIssue(new ArrayList<>()); for (SpecificRecordBase record : records) { Optional.ofNullable(converters.get(record.getClass())) .ifPresent(consumer -> consumer.accept(occurrenceHdfsRecord, record)); } Optional<SpecificRecordBase> erOpt = Arrays.stream(records).filter(x -> x instanceof ExtendedRecord).findFirst(); Optional<SpecificRecordBase> brOpt = Arrays.stream(records).filter(x -> x instanceof BasicRecord).findFirst(); if (erOpt.isPresent() && brOpt.isPresent()) { setIdentifier((BasicRecord) brOpt.get(), (ExtendedRecord) erOpt.get(), occurrenceHdfsRecord); } return occurrenceHdfsRecord; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void grscicollMapperTest() { Institution institution = Institution.newBuilder() .setCode("I1") .setKey("cb0098db-6ff6-4a5d-ad29-51348d114e41") .build(); InstitutionMatch institutionMatch = InstitutionMatch.newBuilder() .setInstitution(institution) .setMatchType(MatchType.EXACT.name()) .build(); Collection collection = Collection.newBuilder() .setKey("5c692584-d517-48e8-93a8-a916ba131d9b") .setCode("C1") .build(); CollectionMatch collectionMatch = CollectionMatch.newBuilder() .setCollection(collection) .setMatchType(MatchType.FUZZY.name()) .build(); GrscicollRecord record = GrscicollRecord.newBuilder() .setId("1") .setInstitutionMatch(institutionMatch) .setCollectionMatch(collectionMatch) .build(); OccurrenceHdfsRecord hdfsRecord = toOccurrenceHdfsRecord(record); Assert.assertEquals(institution.getKey(), hdfsRecord.getInstitutionkey()); Assert.assertEquals(collection.getKey(), hdfsRecord.getCollectionkey()); }
private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> grscicollMapper() { return (hr, sr) -> { GrscicollRecord gr = (GrscicollRecord) sr; if (gr.getInstitutionMatch() != null) { Institution institution = gr.getInstitutionMatch().getInstitution(); if (institution != null) { hr.setInstitutionkey(institution.getKey()); } } if (gr.getCollectionMatch() != null) { Collection collection = gr.getCollectionMatch().getCollection(); if (collection != null) { hr.setCollectionkey(collection.getKey()); } } }; }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> grscicollMapper() { return (hr, sr) -> { GrscicollRecord gr = (GrscicollRecord) sr; if (gr.getInstitutionMatch() != null) { Institution institution = gr.getInstitutionMatch().getInstitution(); if (institution != null) { hr.setInstitutionkey(institution.getKey()); } } if (gr.getCollectionMatch() != null) { Collection collection = gr.getCollectionMatch().getCollection(); if (collection != null) { hr.setCollectionkey(collection.getKey()); } } }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> grscicollMapper() { return (hr, sr) -> { GrscicollRecord gr = (GrscicollRecord) sr; if (gr.getInstitutionMatch() != null) { Institution institution = gr.getInstitutionMatch().getInstitution(); if (institution != null) { hr.setInstitutionkey(institution.getKey()); } } if (gr.getCollectionMatch() != null) { Collection collection = gr.getCollectionMatch().getCollection(); if (collection != null) { hr.setCollectionkey(collection.getKey()); } } }; } }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> grscicollMapper() { return (hr, sr) -> { GrscicollRecord gr = (GrscicollRecord) sr; if (gr.getInstitutionMatch() != null) { Institution institution = gr.getInstitutionMatch().getInstitution(); if (institution != null) { hr.setInstitutionkey(institution.getKey()); } } if (gr.getCollectionMatch() != null) { Collection collection = gr.getCollectionMatch().getCollection(); if (collection != null) { hr.setCollectionkey(collection.getKey()); } } }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
OccurrenceHdfsRecordConverter { private static BiConsumer<OccurrenceHdfsRecord, SpecificRecordBase> grscicollMapper() { return (hr, sr) -> { GrscicollRecord gr = (GrscicollRecord) sr; if (gr.getInstitutionMatch() != null) { Institution institution = gr.getInstitutionMatch().getInstitution(); if (institution != null) { hr.setInstitutionkey(institution.getKey()); } } if (gr.getCollectionMatch() != null) { Collection collection = gr.getCollectionMatch().getCollection(); if (collection != null) { hr.setCollectionkey(collection.getKey()); } } }; } static OccurrenceHdfsRecord toOccurrenceHdfsRecord(SpecificRecordBase... records); }
@Test public void createSimpleJsonFromSpecificRecordBase() { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId("777").setCoreRowType("core").build(); TemporalRecord temporalRecord = TemporalRecord.newBuilder() .setId("777") .setEventDate(EventDate.newBuilder().setLte("01-01-2018").setGte("01-01-2011").build()) .setDay(1) .setYear(2000) .setStartDayOfYear(1) .build(); LocationRecord locationRecord = LocationRecord.newBuilder() .setId("777") .setCountry("Country") .setCountryCode("Code 1'2\"") .setDecimalLatitude(1d) .setDecimalLongitude(2d) .build(); MetadataRecord metadataRecord = MetadataRecord.newBuilder() .setId("777") .setNetworkKeys(Collections.singletonList("NK1")) .build(); String expected = "{\"id\":\"777\",\"coreRowType\":\"core\",\"coreTerms\":\"{}\",\"extensions\":\"{}\",\"year\":2000," + "\"day\":1,\"eventDate\":{\"gte\":\"01-01-2011\",\"lte\":\"01-01-2018\"},\"startDayOfYear\":1," + "\"issues\":{},\"country\":\"Country\",\"countryCode\":\"Code 1'2\\\"\",\"decimalLatitude\":1.0," + "\"decimalLongitude\":2.0,\"networkKeys\":[\"NK1\"]}"; String result = JsonConverter.builder() .record(extendedRecord) .record(temporalRecord) .record(locationRecord) .record(metadataRecord) .build() .toString(); Assert.assertTrue(JsonValidationUtils.isValid(result)); Assert.assertEquals(expected, result); }
@Override public String toString() { return toJson().toString(); }
JsonConverter { @Override public String toString() { return toJson().toString(); } }
JsonConverter { @Override public String toString() { return toJson().toString(); } }
JsonConverter { @Override public String toString() { return toJson().toString(); } ObjectNode toJson(); @Override String toString(); }
JsonConverter { @Override public String toString() { return toJson().toString(); } ObjectNode toJson(); @Override String toString(); }
@Test public void avroDeserializingNoramlIdTest() throws IOException { DwcaToAvroConverter.create().inputPath(inpPath).outputPath(outPath).convert(); File verbatim = new File(outPath); Assert.assertTrue(verbatim.exists()); DatumReader<ExtendedRecord> datumReader = new SpecificDatumReader<>(ExtendedRecord.class); try (DataFileReader<ExtendedRecord> dataFileReader = new DataFileReader<>(verbatim, datumReader)) { while (dataFileReader.hasNext()) { ExtendedRecord record = dataFileReader.next(); Assert.assertNotNull(record); Assert.assertNotNull(record.getId()); } } Files.deleteIfExists(verbatim.toPath()); }
@Override protected long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) throws IOException { DwcaReader reader = DwcaReader.fromLocation(inputPath.toString()); log.info("Exporting the DwC Archive to Avro started {}", inputPath); while (reader.advance()) { ExtendedRecord record = reader.getCurrent(); if (!record.getId().equals(ExtendedRecordConverter.getRecordIdError())) { dataFileWriter.append(record); } } reader.close(); return reader.getRecordsReturned(); }
DwcaToAvroConverter extends ConverterToVerbatim { @Override protected long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) throws IOException { DwcaReader reader = DwcaReader.fromLocation(inputPath.toString()); log.info("Exporting the DwC Archive to Avro started {}", inputPath); while (reader.advance()) { ExtendedRecord record = reader.getCurrent(); if (!record.getId().equals(ExtendedRecordConverter.getRecordIdError())) { dataFileWriter.append(record); } } reader.close(); return reader.getRecordsReturned(); } }
DwcaToAvroConverter extends ConverterToVerbatim { @Override protected long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) throws IOException { DwcaReader reader = DwcaReader.fromLocation(inputPath.toString()); log.info("Exporting the DwC Archive to Avro started {}", inputPath); while (reader.advance()) { ExtendedRecord record = reader.getCurrent(); if (!record.getId().equals(ExtendedRecordConverter.getRecordIdError())) { dataFileWriter.append(record); } } reader.close(); return reader.getRecordsReturned(); } }
DwcaToAvroConverter extends ConverterToVerbatim { @Override protected long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) throws IOException { DwcaReader reader = DwcaReader.fromLocation(inputPath.toString()); log.info("Exporting the DwC Archive to Avro started {}", inputPath); while (reader.advance()) { ExtendedRecord record = reader.getCurrent(); if (!record.getId().equals(ExtendedRecordConverter.getRecordIdError())) { dataFileWriter.append(record); } } reader.close(); return reader.getRecordsReturned(); } static void main(String... args); }
DwcaToAvroConverter extends ConverterToVerbatim { @Override protected long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) throws IOException { DwcaReader reader = DwcaReader.fromLocation(inputPath.toString()); log.info("Exporting the DwC Archive to Avro started {}", inputPath); while (reader.advance()) { ExtendedRecord record = reader.getCurrent(); if (!record.getId().equals(ExtendedRecordConverter.getRecordIdError())) { dataFileWriter.append(record); } } reader.close(); return reader.getRecordsReturned(); } static void main(String... args); }
@Test public void occurrenceAsExtensionTest() { String id = "1"; String somethingCore = "somethingCore"; String somethingExt = "somethingExt"; Map<String, String> coreMap = Collections.singletonMap(somethingCore, somethingCore); Map<String, String> extMap = new HashMap<>(2); extMap.put(DwcTerm.occurrenceID.qualifiedName(), id); extMap.put(somethingExt, somethingExt); Optional<ExtendedRecord> result = OccurrenceExtensionConverter.convert(coreMap, extMap); Assert.assertTrue(result.isPresent()); ExtendedRecord erResult = result.get(); Assert.assertEquals(id, erResult.getId()); Assert.assertEquals(somethingCore, erResult.getCoreTerms().get(somethingCore)); Assert.assertEquals(somethingExt, erResult.getCoreTerms().get(somethingExt)); }
public static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap) { String id = extMap.get(DwcTerm.occurrenceID.qualifiedName()); if (!Strings.isNullOrEmpty(id)) { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId(id).build(); extendedRecord.getCoreTerms().putAll(coreMap); extendedRecord.getCoreTerms().putAll(extMap); return Optional.of(extendedRecord); } return Optional.empty(); }
OccurrenceExtensionConverter { public static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap) { String id = extMap.get(DwcTerm.occurrenceID.qualifiedName()); if (!Strings.isNullOrEmpty(id)) { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId(id).build(); extendedRecord.getCoreTerms().putAll(coreMap); extendedRecord.getCoreTerms().putAll(extMap); return Optional.of(extendedRecord); } return Optional.empty(); } }
OccurrenceExtensionConverter { public static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap) { String id = extMap.get(DwcTerm.occurrenceID.qualifiedName()); if (!Strings.isNullOrEmpty(id)) { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId(id).build(); extendedRecord.getCoreTerms().putAll(coreMap); extendedRecord.getCoreTerms().putAll(extMap); return Optional.of(extendedRecord); } return Optional.empty(); } }
OccurrenceExtensionConverter { public static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap) { String id = extMap.get(DwcTerm.occurrenceID.qualifiedName()); if (!Strings.isNullOrEmpty(id)) { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId(id).build(); extendedRecord.getCoreTerms().putAll(coreMap); extendedRecord.getCoreTerms().putAll(extMap); return Optional.of(extendedRecord); } return Optional.empty(); } static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap); }
OccurrenceExtensionConverter { public static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap) { String id = extMap.get(DwcTerm.occurrenceID.qualifiedName()); if (!Strings.isNullOrEmpty(id)) { ExtendedRecord extendedRecord = ExtendedRecord.newBuilder().setId(id).build(); extendedRecord.getCoreTerms().putAll(coreMap); extendedRecord.getCoreTerms().putAll(extMap); return Optional.of(extendedRecord); } return Optional.empty(); } static Optional<ExtendedRecord> convert( Map<String, String> coreMap, Map<String, String> extMap); }
@Test public void regularExtendedRecordsTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("2").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); Map<String, ExtendedRecord> result = AvroReader.readRecords("", "", ExtendedRecord.class, verbatimPath1.toString()); assertMap(result, expectedOne, expectedTwo, expectedThree); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); }
public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void regularExtendedRecordsWildcardTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("2").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); ExtendedRecord expectedFour = ExtendedRecord.newBuilder().setId("4").build(); ExtendedRecord expectedFive = ExtendedRecord.newBuilder().setId("5").build(); ExtendedRecord expectedSix = ExtendedRecord.newBuilder().setId("6").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); writeExtendedRecords(verbatimPath2, expectedFour, expectedFive, expectedSix); Map<String, ExtendedRecord> result = AvroReader.readRecords( "", "", ExtendedRecord.class, new Path("target/verbatim*.avro").toString()); assertMap( result, expectedOne, expectedTwo, expectedThree, expectedFour, expectedFive, expectedSix); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); Files.deleteIfExists(Paths.get(verbatimPath2.toString())); }
public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void uniqueExtendedRecordsTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("2").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); Map<String, ExtendedRecord> result = AvroReader.readUniqueRecords("", "", ExtendedRecord.class, verbatimPath1.toString()); assertMap(result, expectedOne, expectedTwo, expectedThree); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); }
public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void uniqueOneEqualDuplicateTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); Map<String, ExtendedRecord> result = AvroReader.readUniqueRecords("", "", ExtendedRecord.class, verbatimPath1.toString()); assertMap(result, expectedOne, expectedThree); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); }
public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void uniqueOneNotEqualDuplicateTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key", "value")) .build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); Map<String, ExtendedRecord> result = AvroReader.readUniqueRecords("", "", ExtendedRecord.class, verbatimPath1.toString()); assertMap(result, expectedThree); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); }
public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void uniqueOneNotEqualDuplicateWildcardTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key", "value")) .build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder().setId("3").build(); ExtendedRecord expectedFour = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key", "value")) .build(); ExtendedRecord expectedFive = ExtendedRecord.newBuilder().setId("1").build(); ExtendedRecord expectedSix = ExtendedRecord.newBuilder().setId("3").build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); writeExtendedRecords(verbatimPath2, expectedFour, expectedFive, expectedSix); Map<String, ExtendedRecord> result = AvroReader.readUniqueRecords( "", "", ExtendedRecord.class, new Path("target/verbatim*.avro").toString()); assertMap(result, expectedThree); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); Files.deleteIfExists(Paths.get(verbatimPath2.toString())); }
public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void uniqueAllNotEqualDuplicateTest() throws IOException { ExtendedRecord expectedOne = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key1", "value")) .build(); ExtendedRecord expectedTwo = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key2", "value")) .build(); ExtendedRecord expectedThree = ExtendedRecord.newBuilder() .setId("1") .setCoreTerms(Collections.singletonMap("key3", "value")) .build(); writeExtendedRecords(verbatimPath1, expectedOne, expectedTwo, expectedThree); Map<String, ExtendedRecord> result = AvroReader.readUniqueRecords("", "", ExtendedRecord.class, verbatimPath1.toString()); assertMap(result); Files.deleteIfExists(Paths.get(verbatimPath1.toString())); }
public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
AvroReader { public static <T extends Record> Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path) { FileSystem fs = FsUtils.getFileSystem(hdfsSiteConfig, coreSiteConfig, path); List<Path> paths = parseWildcardPath(fs, path); return readUniqueRecords(fs, clazz, paths); } static Map<String, T> readUniqueRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); static Map<String, T> readRecords( String hdfsSiteConfig, String coreSiteConfig, Class<T> clazz, String path); }
@Test public void testIsValidDate() { assertTrue(TemporalParser.isValidDate(Year.of(2005))); assertTrue(TemporalParser.isValidDate(YearMonth.of(2005, 1))); assertTrue(TemporalParser.isValidDate(LocalDate.of(2005, 1, 1))); assertTrue(TemporalParser.isValidDate(LocalDateTime.of(2005, 1, 1, 2, 3, 4))); assertTrue(TemporalParser.isValidDate(LocalDate.now())); assertTrue(TemporalParser.isValidDate(LocalDateTime.now().plus(23, ChronoUnit.HOURS))); assertFalse(TemporalParser.isValidDate(YearMonth.of(1599, 12))); assertFalse(TemporalParser.isValidDate(LocalDate.now().plusDays(2))); }
protected static boolean isValidDate(TemporalAccessor temporalAccessor) { LocalDate upperBound = LocalDate.now().plusDays(1); return isValidDate(temporalAccessor, Range.closed(MIN_LOCAL_DATE, upperBound)); }
TemporalParser implements Serializable { protected static boolean isValidDate(TemporalAccessor temporalAccessor) { LocalDate upperBound = LocalDate.now().plusDays(1); return isValidDate(temporalAccessor, Range.closed(MIN_LOCAL_DATE, upperBound)); } }
TemporalParser implements Serializable { protected static boolean isValidDate(TemporalAccessor temporalAccessor) { LocalDate upperBound = LocalDate.now().plusDays(1); return isValidDate(temporalAccessor, Range.closed(MIN_LOCAL_DATE, upperBound)); } private TemporalParser(List<DateComponentOrdering> orderings); }
TemporalParser implements Serializable { protected static boolean isValidDate(TemporalAccessor temporalAccessor) { LocalDate upperBound = LocalDate.now().plusDays(1); return isValidDate(temporalAccessor, Range.closed(MIN_LOCAL_DATE, upperBound)); } private TemporalParser(List<DateComponentOrdering> orderings); static TemporalParser create(List<DateComponentOrdering> orderings); static TemporalParser create(); OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString); OccurrenceParseResult<TemporalAccessor> parseRecordedDate(String dateString); OccurrenceParseResult<TemporalAccessor> parseLocalDate( String dateString, Range<LocalDate> likelyRange, OccurrenceIssue unlikelyIssue); }
TemporalParser implements Serializable { protected static boolean isValidDate(TemporalAccessor temporalAccessor) { LocalDate upperBound = LocalDate.now().plusDays(1); return isValidDate(temporalAccessor, Range.closed(MIN_LOCAL_DATE, upperBound)); } private TemporalParser(List<DateComponentOrdering> orderings); static TemporalParser create(List<DateComponentOrdering> orderings); static TemporalParser create(); OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString); OccurrenceParseResult<TemporalAccessor> parseRecordedDate(String dateString); OccurrenceParseResult<TemporalAccessor> parseLocalDate( String dateString, Range<LocalDate> likelyRange, OccurrenceIssue unlikelyIssue); }
@Test public void testParseRecordedDate() { TemporalParser temporalParser = TemporalParser.create(); OccurrenceParseResult<TemporalAccessor> result; result = temporalParser.parseRecordedDate("2005", "1", "", "2005-01-01"); assertEquals(LocalDate.of(2005, 1, 1), result.getPayload()); assertEquals(0, result.getIssues().size()); result = temporalParser.parseRecordedDate("1996", "1", "26", "1996-01-26T01:00Z"); assertEquals( ZonedDateTime.of(LocalDateTime.of(1996, 1, 26, 1, 0), ZoneId.of("Z")), result.getPayload()); assertEquals(0, result.getIssues().size()); result = temporalParser.parseRecordedDate("2005", "1", "2", "2005-01-05"); assertNull(result.getPayload()); assertEquals(1, result.getIssues().size()); assertEquals(OccurrenceIssue.RECORDED_DATE_MISMATCH, result.getIssues().iterator().next()); }
public OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString) { boolean atomizedDateProvided = StringUtils.isNotBlank(year) || StringUtils.isNotBlank(month) || StringUtils.isNotBlank(day); boolean dateStringProvided = StringUtils.isNotBlank(dateString); if (!atomizedDateProvided && !dateStringProvided) { return OccurrenceParseResult.fail(); } Set<OccurrenceIssue> issues = EnumSet.noneOf(OccurrenceIssue.class); TemporalAccessor parsedTemporalAccessor; ParseResult.CONFIDENCE confidence; ParseResult<TemporalAccessor> parsedYMDResult = atomizedDateProvided ? temporalParser.parse(year, month, day) : ParseResult.fail(); ParseResult<TemporalAccessor> parsedDateResult = dateStringProvided ? temporalParser.parse(dateString) : ParseResult.fail(); TemporalAccessor parsedYmdTa = parsedYMDResult.getPayload(); TemporalAccessor parsedDateTa = parsedDateResult.getPayload(); if (atomizedDateProvided && dateStringProvided && !TemporalAccessorUtils.sameOrContained(parsedYmdTa, parsedDateTa)) { boolean ambiguityResolved = false; if (parsedDateResult.getAlternativePayloads() != null) { for (TemporalAccessor possibleTa : parsedDateResult.getAlternativePayloads()) { if (TemporalAccessorUtils.sameOrContained(parsedYmdTa, possibleTa)) { parsedDateTa = possibleTa; ambiguityResolved = true; log.debug( "Ambiguous date {} matches year-month-day date {}-{}-{} for {}", dateString, year, month, day, parsedDateTa); } } } if (!ambiguityResolved) { issues.add(OccurrenceIssue.RECORDED_DATE_MISMATCH); log.debug("Date mismatch: [{} vs {}].", parsedYmdTa, parsedDateTa); } Optional<TemporalAccessor> bestResolution = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa); if (bestResolution.isPresent()) { parsedTemporalAccessor = bestResolution.get(); confidence = (parsedYmdTa == null || parsedDateTa == null) ? PROBABLE : DEFINITE; } else { return OccurrenceParseResult.fail(issues); } } else { parsedTemporalAccessor = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa).orElse(null); confidence = parsedDateTa != null ? parsedDateResult.getConfidence() : parsedYMDResult.getConfidence(); } if (!isValidDate(parsedTemporalAccessor)) { if (parsedTemporalAccessor == null) { issues.add(OccurrenceIssue.RECORDED_DATE_INVALID); } else { issues.add(OccurrenceIssue.RECORDED_DATE_UNLIKELY); } log.debug("Invalid date: [{}]].", parsedTemporalAccessor); return OccurrenceParseResult.fail(issues); } return OccurrenceParseResult.success(confidence, parsedTemporalAccessor, issues); }
TemporalParser implements Serializable { public OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString) { boolean atomizedDateProvided = StringUtils.isNotBlank(year) || StringUtils.isNotBlank(month) || StringUtils.isNotBlank(day); boolean dateStringProvided = StringUtils.isNotBlank(dateString); if (!atomizedDateProvided && !dateStringProvided) { return OccurrenceParseResult.fail(); } Set<OccurrenceIssue> issues = EnumSet.noneOf(OccurrenceIssue.class); TemporalAccessor parsedTemporalAccessor; ParseResult.CONFIDENCE confidence; ParseResult<TemporalAccessor> parsedYMDResult = atomizedDateProvided ? temporalParser.parse(year, month, day) : ParseResult.fail(); ParseResult<TemporalAccessor> parsedDateResult = dateStringProvided ? temporalParser.parse(dateString) : ParseResult.fail(); TemporalAccessor parsedYmdTa = parsedYMDResult.getPayload(); TemporalAccessor parsedDateTa = parsedDateResult.getPayload(); if (atomizedDateProvided && dateStringProvided && !TemporalAccessorUtils.sameOrContained(parsedYmdTa, parsedDateTa)) { boolean ambiguityResolved = false; if (parsedDateResult.getAlternativePayloads() != null) { for (TemporalAccessor possibleTa : parsedDateResult.getAlternativePayloads()) { if (TemporalAccessorUtils.sameOrContained(parsedYmdTa, possibleTa)) { parsedDateTa = possibleTa; ambiguityResolved = true; log.debug( "Ambiguous date {} matches year-month-day date {}-{}-{} for {}", dateString, year, month, day, parsedDateTa); } } } if (!ambiguityResolved) { issues.add(OccurrenceIssue.RECORDED_DATE_MISMATCH); log.debug("Date mismatch: [{} vs {}].", parsedYmdTa, parsedDateTa); } Optional<TemporalAccessor> bestResolution = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa); if (bestResolution.isPresent()) { parsedTemporalAccessor = bestResolution.get(); confidence = (parsedYmdTa == null || parsedDateTa == null) ? PROBABLE : DEFINITE; } else { return OccurrenceParseResult.fail(issues); } } else { parsedTemporalAccessor = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa).orElse(null); confidence = parsedDateTa != null ? parsedDateResult.getConfidence() : parsedYMDResult.getConfidence(); } if (!isValidDate(parsedTemporalAccessor)) { if (parsedTemporalAccessor == null) { issues.add(OccurrenceIssue.RECORDED_DATE_INVALID); } else { issues.add(OccurrenceIssue.RECORDED_DATE_UNLIKELY); } log.debug("Invalid date: [{}]].", parsedTemporalAccessor); return OccurrenceParseResult.fail(issues); } return OccurrenceParseResult.success(confidence, parsedTemporalAccessor, issues); } }
TemporalParser implements Serializable { public OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString) { boolean atomizedDateProvided = StringUtils.isNotBlank(year) || StringUtils.isNotBlank(month) || StringUtils.isNotBlank(day); boolean dateStringProvided = StringUtils.isNotBlank(dateString); if (!atomizedDateProvided && !dateStringProvided) { return OccurrenceParseResult.fail(); } Set<OccurrenceIssue> issues = EnumSet.noneOf(OccurrenceIssue.class); TemporalAccessor parsedTemporalAccessor; ParseResult.CONFIDENCE confidence; ParseResult<TemporalAccessor> parsedYMDResult = atomizedDateProvided ? temporalParser.parse(year, month, day) : ParseResult.fail(); ParseResult<TemporalAccessor> parsedDateResult = dateStringProvided ? temporalParser.parse(dateString) : ParseResult.fail(); TemporalAccessor parsedYmdTa = parsedYMDResult.getPayload(); TemporalAccessor parsedDateTa = parsedDateResult.getPayload(); if (atomizedDateProvided && dateStringProvided && !TemporalAccessorUtils.sameOrContained(parsedYmdTa, parsedDateTa)) { boolean ambiguityResolved = false; if (parsedDateResult.getAlternativePayloads() != null) { for (TemporalAccessor possibleTa : parsedDateResult.getAlternativePayloads()) { if (TemporalAccessorUtils.sameOrContained(parsedYmdTa, possibleTa)) { parsedDateTa = possibleTa; ambiguityResolved = true; log.debug( "Ambiguous date {} matches year-month-day date {}-{}-{} for {}", dateString, year, month, day, parsedDateTa); } } } if (!ambiguityResolved) { issues.add(OccurrenceIssue.RECORDED_DATE_MISMATCH); log.debug("Date mismatch: [{} vs {}].", parsedYmdTa, parsedDateTa); } Optional<TemporalAccessor> bestResolution = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa); if (bestResolution.isPresent()) { parsedTemporalAccessor = bestResolution.get(); confidence = (parsedYmdTa == null || parsedDateTa == null) ? PROBABLE : DEFINITE; } else { return OccurrenceParseResult.fail(issues); } } else { parsedTemporalAccessor = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa).orElse(null); confidence = parsedDateTa != null ? parsedDateResult.getConfidence() : parsedYMDResult.getConfidence(); } if (!isValidDate(parsedTemporalAccessor)) { if (parsedTemporalAccessor == null) { issues.add(OccurrenceIssue.RECORDED_DATE_INVALID); } else { issues.add(OccurrenceIssue.RECORDED_DATE_UNLIKELY); } log.debug("Invalid date: [{}]].", parsedTemporalAccessor); return OccurrenceParseResult.fail(issues); } return OccurrenceParseResult.success(confidence, parsedTemporalAccessor, issues); } private TemporalParser(List<DateComponentOrdering> orderings); }
TemporalParser implements Serializable { public OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString) { boolean atomizedDateProvided = StringUtils.isNotBlank(year) || StringUtils.isNotBlank(month) || StringUtils.isNotBlank(day); boolean dateStringProvided = StringUtils.isNotBlank(dateString); if (!atomizedDateProvided && !dateStringProvided) { return OccurrenceParseResult.fail(); } Set<OccurrenceIssue> issues = EnumSet.noneOf(OccurrenceIssue.class); TemporalAccessor parsedTemporalAccessor; ParseResult.CONFIDENCE confidence; ParseResult<TemporalAccessor> parsedYMDResult = atomizedDateProvided ? temporalParser.parse(year, month, day) : ParseResult.fail(); ParseResult<TemporalAccessor> parsedDateResult = dateStringProvided ? temporalParser.parse(dateString) : ParseResult.fail(); TemporalAccessor parsedYmdTa = parsedYMDResult.getPayload(); TemporalAccessor parsedDateTa = parsedDateResult.getPayload(); if (atomizedDateProvided && dateStringProvided && !TemporalAccessorUtils.sameOrContained(parsedYmdTa, parsedDateTa)) { boolean ambiguityResolved = false; if (parsedDateResult.getAlternativePayloads() != null) { for (TemporalAccessor possibleTa : parsedDateResult.getAlternativePayloads()) { if (TemporalAccessorUtils.sameOrContained(parsedYmdTa, possibleTa)) { parsedDateTa = possibleTa; ambiguityResolved = true; log.debug( "Ambiguous date {} matches year-month-day date {}-{}-{} for {}", dateString, year, month, day, parsedDateTa); } } } if (!ambiguityResolved) { issues.add(OccurrenceIssue.RECORDED_DATE_MISMATCH); log.debug("Date mismatch: [{} vs {}].", parsedYmdTa, parsedDateTa); } Optional<TemporalAccessor> bestResolution = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa); if (bestResolution.isPresent()) { parsedTemporalAccessor = bestResolution.get(); confidence = (parsedYmdTa == null || parsedDateTa == null) ? PROBABLE : DEFINITE; } else { return OccurrenceParseResult.fail(issues); } } else { parsedTemporalAccessor = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa).orElse(null); confidence = parsedDateTa != null ? parsedDateResult.getConfidence() : parsedYMDResult.getConfidence(); } if (!isValidDate(parsedTemporalAccessor)) { if (parsedTemporalAccessor == null) { issues.add(OccurrenceIssue.RECORDED_DATE_INVALID); } else { issues.add(OccurrenceIssue.RECORDED_DATE_UNLIKELY); } log.debug("Invalid date: [{}]].", parsedTemporalAccessor); return OccurrenceParseResult.fail(issues); } return OccurrenceParseResult.success(confidence, parsedTemporalAccessor, issues); } private TemporalParser(List<DateComponentOrdering> orderings); static TemporalParser create(List<DateComponentOrdering> orderings); static TemporalParser create(); OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString); OccurrenceParseResult<TemporalAccessor> parseRecordedDate(String dateString); OccurrenceParseResult<TemporalAccessor> parseLocalDate( String dateString, Range<LocalDate> likelyRange, OccurrenceIssue unlikelyIssue); }
TemporalParser implements Serializable { public OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString) { boolean atomizedDateProvided = StringUtils.isNotBlank(year) || StringUtils.isNotBlank(month) || StringUtils.isNotBlank(day); boolean dateStringProvided = StringUtils.isNotBlank(dateString); if (!atomizedDateProvided && !dateStringProvided) { return OccurrenceParseResult.fail(); } Set<OccurrenceIssue> issues = EnumSet.noneOf(OccurrenceIssue.class); TemporalAccessor parsedTemporalAccessor; ParseResult.CONFIDENCE confidence; ParseResult<TemporalAccessor> parsedYMDResult = atomizedDateProvided ? temporalParser.parse(year, month, day) : ParseResult.fail(); ParseResult<TemporalAccessor> parsedDateResult = dateStringProvided ? temporalParser.parse(dateString) : ParseResult.fail(); TemporalAccessor parsedYmdTa = parsedYMDResult.getPayload(); TemporalAccessor parsedDateTa = parsedDateResult.getPayload(); if (atomizedDateProvided && dateStringProvided && !TemporalAccessorUtils.sameOrContained(parsedYmdTa, parsedDateTa)) { boolean ambiguityResolved = false; if (parsedDateResult.getAlternativePayloads() != null) { for (TemporalAccessor possibleTa : parsedDateResult.getAlternativePayloads()) { if (TemporalAccessorUtils.sameOrContained(parsedYmdTa, possibleTa)) { parsedDateTa = possibleTa; ambiguityResolved = true; log.debug( "Ambiguous date {} matches year-month-day date {}-{}-{} for {}", dateString, year, month, day, parsedDateTa); } } } if (!ambiguityResolved) { issues.add(OccurrenceIssue.RECORDED_DATE_MISMATCH); log.debug("Date mismatch: [{} vs {}].", parsedYmdTa, parsedDateTa); } Optional<TemporalAccessor> bestResolution = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa); if (bestResolution.isPresent()) { parsedTemporalAccessor = bestResolution.get(); confidence = (parsedYmdTa == null || parsedDateTa == null) ? PROBABLE : DEFINITE; } else { return OccurrenceParseResult.fail(issues); } } else { parsedTemporalAccessor = TemporalAccessorUtils.bestResolution(parsedYmdTa, parsedDateTa).orElse(null); confidence = parsedDateTa != null ? parsedDateResult.getConfidence() : parsedYMDResult.getConfidence(); } if (!isValidDate(parsedTemporalAccessor)) { if (parsedTemporalAccessor == null) { issues.add(OccurrenceIssue.RECORDED_DATE_INVALID); } else { issues.add(OccurrenceIssue.RECORDED_DATE_UNLIKELY); } log.debug("Invalid date: [{}]].", parsedTemporalAccessor); return OccurrenceParseResult.fail(issues); } return OccurrenceParseResult.success(confidence, parsedTemporalAccessor, issues); } private TemporalParser(List<DateComponentOrdering> orderings); static TemporalParser create(List<DateComponentOrdering> orderings); static TemporalParser create(); OccurrenceParseResult<TemporalAccessor> parseRecordedDate( String year, String month, String day, String dateString); OccurrenceParseResult<TemporalAccessor> parseRecordedDate(String dateString); OccurrenceParseResult<TemporalAccessor> parseLocalDate( String dateString, Range<LocalDate> likelyRange, OccurrenceIssue unlikelyIssue); }
@Test public void avroDeserializingNoramlIdTest() throws IOException { String inputPath = inpPath + "61"; XmlToAvroConverter.create().inputPath(inputPath).outputPath(outPath).convert(); File verbatim = new File(outPath); Assert.assertTrue(verbatim.exists()); DatumReader<ExtendedRecord> datumReader = new SpecificDatumReader<>(ExtendedRecord.class); try (DataFileReader<ExtendedRecord> dataFileReader = new DataFileReader<>(verbatim, datumReader)) { while (dataFileReader.hasNext()) { ExtendedRecord record = dataFileReader.next(); Assert.assertNotNull(record); Assert.assertNotNull(record.getId()); Assert.assertTrue(record.getId().contains("catalog")); } } Files.deleteIfExists(verbatim.toPath()); }
@Override public long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) { return ExtendedRecordConverter.create(executor).toAvro(inputPath.toString(), dataFileWriter); }
XmlToAvroConverter extends ConverterToVerbatim { @Override public long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) { return ExtendedRecordConverter.create(executor).toAvro(inputPath.toString(), dataFileWriter); } }
XmlToAvroConverter extends ConverterToVerbatim { @Override public long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) { return ExtendedRecordConverter.create(executor).toAvro(inputPath.toString(), dataFileWriter); } }
XmlToAvroConverter extends ConverterToVerbatim { @Override public long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) { return ExtendedRecordConverter.create(executor).toAvro(inputPath.toString(), dataFileWriter); } XmlToAvroConverter executor(ExecutorService executor); XmlToAvroConverter xmlReaderParallelism(int xmlReaderParallelism); static void main(String... args); @Override long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter); }
XmlToAvroConverter extends ConverterToVerbatim { @Override public long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter) { return ExtendedRecordConverter.create(executor).toAvro(inputPath.toString(), dataFileWriter); } XmlToAvroConverter executor(ExecutorService executor); XmlToAvroConverter xmlReaderParallelism(int xmlReaderParallelism); static void main(String... args); @Override long convert(Path inputPath, SyncDataFileWriter<ExtendedRecord> dataFileWriter); }
@Test public void singleDateRangeTest() { TemporalRangeParser trp = TemporalRangeParser.builder() .temporalParser(TemporalParser.create(Collections.singletonList(DMY))) .create(); EventRange range = trp.parse("1930/1929"); assertEquals("1930", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("1930/1931"); assertEquals("1930", range.getFrom().get().toString()); assertEquals("1931", range.getTo().get().toString()); range = trp.parse("1930/1930"); assertEquals("1930", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("1930-01"); assertEquals("1930-01", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("01/1930"); assertFalse(range.getFrom().isPresent()); assertFalse(range.getTo().isPresent()); range = trp.parse("1930-01-02/1930-02-01"); assertEquals("1930-01-02", range.getFrom().get().toString()); assertEquals("1930-02-01", range.getTo().get().toString()); range = trp.parse("02/01/1930"); assertEquals("1930-01-02", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("1930-01-02/02-01"); assertEquals("1930-01-02", range.getFrom().get().toString()); assertEquals("1930-02-01", range.getTo().get().toString()); range = trp.parse("1930-01-02/15"); assertEquals("1930-01-02", range.getFrom().get().toString()); assertEquals("1930-01-15", range.getTo().get().toString()); }
public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
@Test public void ambigousDateTest() { TemporalRangeParser trp = TemporalRangeParser.builder().create(); EventRange range = trp.parse("01/02/1999"); assertTrue(range.hasIssues()); assertEquals(1, range.getIssues().size()); assertTrue(range.getIssues().contains(OccurrenceIssue.RECORDED_DATE_INVALID)); range = trp.parse("1999-01-02"); assertFalse(range.hasIssues()); assertEquals("1999-01-02", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); trp = TemporalRangeParser.builder() .temporalParser(TemporalParser.create(Arrays.asList(DMY_FORMATS))) .create(); range = trp.parse("01/02/1999"); assertFalse(range.hasIssues()); assertEquals("1999-02-01", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("1999-01-02"); assertFalse(range.hasIssues()); assertEquals("1999-01-02", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); }
public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
@Test public void alternativePayloadTest() { TemporalRangeParser trp = TemporalRangeParser.builder().create(); EventRange range = trp.parse("1999", "1", "2", "01/02/1999"); assertEquals("1999-01-02", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); range = trp.parse("1999", "1", null, "01/02/1999"); assertEquals("1999-01-02", range.getFrom().get().toString()); assertFalse(range.getTo().isPresent()); }
public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
@Test public void teatYMDT() { TemporalRangeParser trp = TemporalRangeParser.builder() .temporalParser(TemporalParser.create(Collections.singletonList(DMY))) .create(); EventRange range = trp.parse("01/03/1930T12:01"); assertFalse(range.getFrom().isPresent()); trp = TemporalRangeParser.builder() .temporalParser(TemporalParser.create(Arrays.asList(DMY, DMYT))) .create(); range = trp.parse("01/03/1930T12:01"); assertEquals("1930-03-01T12:01", range.getFrom().get().toString()); }
public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
TemporalRangeParser implements Serializable { public EventRange parse(String dateRange) { return parse(null, null, null, dateRange); } @Builder(buildMethodName = "create") private TemporalRangeParser(TemporalParser temporalParser); EventRange parse(String dateRange); EventRange parse(String year, String month, String day, String dateRange); }
@Test public void isValidTest() { WikidataValidator validator = new WikidataValidator(); assertTrue(validator.isValid("https: assertTrue(validator.isValid("https: assertTrue(validator.isValid("https: assertTrue(validator.isValid("http: assertTrue(validator.isValid("http: assertTrue(validator.isValid("http: assertTrue(validator.isValid("wikidata.org/wiki/Property:P569")); assertTrue(validator.isValid("www.wikidata.org/wiki/Lexeme:L1")); assertTrue(validator.isValid("www.wikidata.org/entity/ID")); assertFalse(validator.isValid(null)); assertFalse(validator.isValid("")); assertFalse(validator.isValid("http.wikidata.org/entity/ID")); assertFalse(validator.isValid("ftp: assertFalse(validator.isValid("http: assertFalse(validator.isValid("https: assertFalse(validator.isValid("https: assertFalse(validator.isValid("https: assertFalse(validator.isValid("http: assertFalse(validator.isValid("https: assertFalse(validator.isValid("awdawdawd")); }
@Override public boolean isValid(String value) { if (Strings.isNullOrEmpty(value)) { return false; } Matcher matcher = WIKIDATA_PATTERN.matcher(value); return matcher.matches(); }
WikidataValidator implements IdentifierSchemeValidator { @Override public boolean isValid(String value) { if (Strings.isNullOrEmpty(value)) { return false; } Matcher matcher = WIKIDATA_PATTERN.matcher(value); return matcher.matches(); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public boolean isValid(String value) { if (Strings.isNullOrEmpty(value)) { return false; } Matcher matcher = WIKIDATA_PATTERN.matcher(value); return matcher.matches(); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public boolean isValid(String value) { if (Strings.isNullOrEmpty(value)) { return false; } Matcher matcher = WIKIDATA_PATTERN.matcher(value); return matcher.matches(); } @Override boolean isValid(String value); @Override String normalize(String value); }
WikidataValidator implements IdentifierSchemeValidator { @Override public boolean isValid(String value) { if (Strings.isNullOrEmpty(value)) { return false; } Matcher matcher = WIKIDATA_PATTERN.matcher(value); return matcher.matches(); } @Override boolean isValid(String value); @Override String normalize(String value); }
@Test public void normalizeTest() { WikidataValidator validator = new WikidataValidator(); assertEquals( "https: validator.normalize("https: assertEquals( "https: validator.normalize("https: assertEquals( "https: validator.normalize("https: assertEquals( "http: validator.normalize("http: assertEquals( "http: validator.normalize("http: assertEquals( "http: validator.normalize("http: assertEquals( "wikidata.org/wiki/Property:P569", validator.normalize("wikidata.org/wiki/Property:P569")); assertEquals( "www.wikidata.org/wiki/Lexeme:L1", validator.normalize("www.wikidata.org/wiki/Lexeme:L1")); assertEquals("www.wikidata.org/entity/ID", validator.normalize("www.wikidata.org/entity/ID")); }
@Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } @Override boolean isValid(String value); @Override String normalize(String value); }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } @Override boolean isValid(String value); @Override String normalize(String value); }
@Test(expected = IllegalArgumentException.class) public void normalizExeptionTest() { WikidataValidator validator = new WikidataValidator(); validator.normalize("awdawd"); }
@Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } @Override boolean isValid(String value); @Override String normalize(String value); }
WikidataValidator implements IdentifierSchemeValidator { @Override public String normalize(String value) { Preconditions.checkNotNull(value, "Identifier value can't be null"); String trimmedValue = value.trim(); Matcher matcher = WIKIDATA_PATTERN.matcher(trimmedValue); if (matcher.matches()) { return value; } throw new IllegalArgumentException(value + " it not a valid Wikidata"); } @Override boolean isValid(String value); @Override String normalize(String value); }
@Test public void parseNullTest() { Set<AgentIdentifier> set = AgentIdentifierParser.parse(null); assertTrue(set.isEmpty()); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseEmptyTest() { String raw = ""; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertTrue(set.isEmpty()); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseEmptyDelimitrTest() { String raw = "|||"; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertTrue(set.isEmpty()); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void testBadXmlFileReadWithBufferedReaderReadLines() throws IOException { String fileName = getClass().getResource("/responses/problematic/spanish_bad_xml.gz").getFile(); File file = new File(fileName); FileInputStream fis = new FileInputStream(file); GZIPInputStream inputStream = new GZIPInputStream(fis); StringBuilder sb = new StringBuilder(); try (BufferedReader buffReader = new BufferedReader( new XmlSanitizingReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)))) { while (buffReader.ready()) { String line = buffReader.readLine(); sb.append(line); } } assertEquals(6097, sb.toString().trim().length()); }
@Override public boolean ready() throws IOException { return (!endOfStreamReached && in.ready()); }
XmlSanitizingReader extends FilterReader { @Override public boolean ready() throws IOException { return (!endOfStreamReached && in.ready()); } }
XmlSanitizingReader extends FilterReader { @Override public boolean ready() throws IOException { return (!endOfStreamReached && in.ready()); } XmlSanitizingReader(Reader in); }
XmlSanitizingReader extends FilterReader { @Override public boolean ready() throws IOException { return (!endOfStreamReached && in.ready()); } XmlSanitizingReader(Reader in); @Override synchronized int read(); @Override synchronized int read(char[] buffer, int offset, int length); @Override boolean ready(); @Override synchronized void close(); @Override boolean markSupported(); }
XmlSanitizingReader extends FilterReader { @Override public boolean ready() throws IOException { return (!endOfStreamReached && in.ready()); } XmlSanitizingReader(Reader in); @Override synchronized int read(); @Override synchronized int read(char[] buffer, int offset, int length); @Override boolean ready(); @Override synchronized void close(); @Override boolean markSupported(); }
@Test public void parseOrcidTest() { Set<AgentIdentifier> expected = Collections.singleton( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.ORCID.name()) .setValue("https: .build()); String raw = "https: Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseOrcidWithoutSchemaTest() { Set<AgentIdentifier> expected = Collections.singleton( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.ORCID.name()) .setValue("https: .build()); String raw = "0000-0002-0144-1997"; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseWikidataTest() { Set<AgentIdentifier> expected = Collections.singleton( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.WIKIDATA.name()) .setValue("https: .build()); String raw = "https: Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseWikidataWithoutSchemaTest() { Set<AgentIdentifier> expected = Collections.singleton( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.WIKIDATA.name()) .setValue("wikidata.org/wiki/0000") .build()); String raw = "wikidata.org/wiki/0000"; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
@Test public void parseOtherTest() { Set<AgentIdentifier> expected = Collections.singleton( AgentIdentifier.newBuilder() .setType(AgentIdentifierType.OTHER.name()) .setValue("something") .build()); String raw = "something"; Set<AgentIdentifier> set = AgentIdentifierParser.parse(raw); assertFalse(set.isEmpty()); assertEquals(expected, set); }
public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }
AgentIdentifierParser { public static Set<AgentIdentifier> parse(String raw) { if (Strings.isNullOrEmpty(raw)) { return Collections.emptySet(); } return Stream.of(raw.split(DELIMITER)) .map(String::trim) .map(AgentIdentifierParser::parseValue) .collect(Collectors.toSet()); } static Set<AgentIdentifier> parse(String raw); }