src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
DhcpRelayValidator implements Validator<Relay> { @VisibleForTesting void validateRelay(final Relay relay) { final boolean isIpv6 = Ipv6.class == relay.getAddressFamily(); try { isAddressCorrect(relay.getGatewayAddress(), isIpv6); } catch (IllegalArgumentException e) { throw new IllegalArgumentException(String.format("Gateway address validation error: %s", e.getMessage())); } checkArgument(relay.getServer() != null && !relay.getServer().isEmpty(), "At least one DHCP server needs to be configured"); for (final Server server : relay.getServer()) { validateServer(server, isIpv6); } } @Override void validateWrite(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay relay, @Nonnull final WriteContext writeContext); @Override void validateUpdate(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void validateDelete(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final WriteContext writeContext); }
@Test(expected = IllegalArgumentException.class) public void testMixedIpAddressFamilies( @InjectTestData(resourcePath = "/relay/ipv4DhcpRelay.json", id = RELAYS_PATH) Relays relays) { RelayBuilder builder = new RelayBuilder(); builder.fieldsFrom(extractRelay(relays)); builder.setGatewayAddress(IpAddressNoZoneBuilder.getDefaultInstance("2001::10")); validator.validateRelay(builder.build()); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements ListWriterCustomizer<Relay, RelayKey>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Override public void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext) throws WriteFailedException { LOG.debug("Writing Relay {} dataAfter={}", id, dataAfter); for (final Server server : dataAfter.getServer()) { setRelay(id, dataAfter, server, true); } } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final WriteContext writeContext); }
@Test public void testWrite(@InjectTestData(resourcePath = "/relay/ipv4DhcpRelay.json", id = RELAYS_PATH) Relays relays) throws WriteFailedException { final Relay data = relays.getRelay().get(0); final int rxVrfId = 0; customizer.writeCurrentAttributes(getId(rxVrfId, Ipv4.class), data, writeContext); final DhcpProxyConfig request = new DhcpProxyConfig(); request.rxVrfId = rxVrfId; request.isIpv6 = 0; request.isAdd = 1; request.dhcpServer = new byte[] {1, 2, 3, 4}; request.dhcpSrcAddress = new byte[] {5, 6, 7, 8}; verify(api).dhcpProxyConfig(request); request.dhcpServer = new byte[] {1, 2, 3, 5}; verify(api).dhcpProxyConfig(request); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements ListWriterCustomizer<Relay, RelayKey>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Override public void updateCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext) throws WriteFailedException { LOG.debug("Updating Relay {} before={} after={}", id, dataBefore, dataAfter); final List<Server> serversBefore = dataBefore.getServer(); final List<Server> serversAfter = dataAfter.getServer(); for (final Server server : serversBefore) { if (!serversAfter.contains(server)) { setRelay(id, dataAfter, server, false); } } for (final Server server : serversAfter) { if (!serversBefore.contains(server)) { setRelay(id, dataAfter, server, true); } } } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final WriteContext writeContext); }
@Test public void testUpdate( @InjectTestData(resourcePath = "/relay/ipv6DhcpRelayBefore.json", id = RELAYS_PATH) Relays relaysBefore, @InjectTestData(resourcePath = "/relay/ipv6DhcpRelayAfter.json", id = RELAYS_PATH) Relays relayAfter) throws WriteFailedException { final Relay before = relaysBefore.getRelay().get(0); final Relay after = relayAfter.getRelay().get(0); final int rxVrfId = 1; customizer.updateCurrentAttributes(getId(rxVrfId, Ipv6.class), before, after, writeContext); final DhcpProxyConfig request = new DhcpProxyConfig(); request.rxVrfId = rxVrfId; request.serverVrfId = 2; request.isIpv6 = 1; request.isAdd = 0; request.dhcpServer = new byte[] {0x20, 0x01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x01}; request.dhcpSrcAddress = new byte[] {0x20, 0x01, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x02}; verify(api).dhcpProxyConfig(request); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements ListWriterCustomizer<Relay, RelayKey>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Override public void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final WriteContext writeContext) throws WriteFailedException { LOG.debug("Removing Relay {} dataBefore={}", id, dataBefore); for (final Server server : dataBefore.getServer()) { setRelay(id, dataBefore, server, false); } } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final Relay dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay dataBefore, @Nonnull final WriteContext writeContext); }
@Test public void testDelete(@InjectTestData(resourcePath = "/relay/ipv4DhcpRelay.json", id = RELAYS_PATH) Relays relays) throws WriteFailedException { final Relay data = relays.getRelay().get(0); final int rxVrfId = 0; customizer.deleteCurrentAttributes(getId(rxVrfId, Ipv4.class), data, writeContext); final DhcpProxyConfig request = new DhcpProxyConfig(); request.rxVrfId = rxVrfId; request.isIpv6 = 0; request.isAdd = 0; request.dhcpServer = new byte[] {1, 2, 3, 4}; request.dhcpSrcAddress = new byte[] {5, 6, 7, 8}; verify(api).dhcpProxyConfig(request); request.dhcpServer = new byte[] {1, 2, 3, 5}; verify(api).dhcpProxyConfig(request); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements InitializingListReaderCustomizer<Relay, RelayKey, RelayBuilder>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Nonnull @Override public List<RelayKey> getAllIds(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final ReadContext context) throws ReadFailedException { Collections.emptyList(); final Optional<DhcpProxyDetailsReplyDump> dump = dumpManager.getDump(id, context.getModificationCache()); if (!dump.isPresent() || dump.get().dhcpProxyDetails.isEmpty()) { return Collections.emptyList(); } return dump.get().dhcpProxyDetails.stream().map(detail -> new RelayKey(detail.isIpv6 == 1 ? Ipv6.class : Ipv4.class, new VniReference(UnsignedInts.toLong(detail.rxVrfId)))).collect(Collectors.toList()); } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Nonnull @Override List<RelayKey> getAllIds(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final ReadContext context); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<Relay> readData); @Nonnull @Override RelayBuilder getBuilder(@Nonnull final InstanceIdentifier<Relay> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final RelayBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay readValue, @Nonnull final ReadContext ctx); }
@Test public void testGetAllIds() throws ReadFailedException { final List<RelayKey> allIds = getCustomizer().getAllIds(RELAYS.child(Relay.class), ctx); assertEquals(2, allIds.size()); assertThat(allIds, containsInAnyOrder(IP4_IID.getKey(), IP6_IID.getKey())); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements InitializingListReaderCustomizer<Relay, RelayKey, RelayBuilder>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Override public void readCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final RelayBuilder builder, @Nonnull final ReadContext ctx) throws ReadFailedException { final Optional<DhcpProxyDetailsReplyDump> dump = dumpManager.getDump(id, ctx.getModificationCache()); if (!dump.isPresent() || dump.get().dhcpProxyDetails.isEmpty()) { return; } final RelayKey key = id.firstKeyOf(Relay.class); final byte isIpv6 = (byte) (Ipv6.class == key.getAddressFamily() ? 1 : 0); final int rxVrfId = key.getRxVrfId().getValue().intValue(); final java.util.Optional<DhcpProxyDetails> result = dump.get().dhcpProxyDetails.stream().filter(d -> d.isIpv6 == isIpv6 && d.rxVrfId == rxVrfId).findFirst(); if (result.isPresent()) { final DhcpProxyDetails detail = result.get(); builder.setAddressFamily(key.getAddressFamily()); builder.setRxVrfId(key.getRxVrfId()); final boolean isIp6 = byteToBoolean(detail.isIpv6); builder.setGatewayAddress(readAddress(detail.dhcpSrcAddress, isIp6)); if (detail.servers != null) { builder.setServer(Arrays.stream(detail.servers).map( server -> new ServerBuilder() .setAddress(readAddress(server.dhcpServer, isIp6)) .setVrfId(UnsignedInts.toLong(server.serverVrfId)) .build() ).collect(Collectors.toList())); } } } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Nonnull @Override List<RelayKey> getAllIds(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final ReadContext context); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<Relay> readData); @Nonnull @Override RelayBuilder getBuilder(@Nonnull final InstanceIdentifier<Relay> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final RelayBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay readValue, @Nonnull final ReadContext ctx); }
@Test public void testReadIp4() throws ReadFailedException { final RelayBuilder builder = new RelayBuilder(); getCustomizer().readCurrentAttributes(IP4_IID, builder, ctx); assertEquals(IP4_IID.getKey().getAddressFamily(), builder.getAddressFamily()); assertEquals(IP4_IID.getKey().getRxVrfId(), builder.getRxVrfId()); assertEquals("1.2.3.4", builder.getGatewayAddress().stringValue()); final List<Server> server = builder.getServer(); assertEquals(2, server.size()); assertEquals(11L, server.get(0).getVrfId().longValue()); assertEquals("8.8.8.8", server.get(0).getAddress().stringValue()); assertEquals(12L, server.get(1).getVrfId().longValue()); assertEquals("8.8.8.4", server.get(1).getAddress().stringValue()); } @Test public void testReadIp6() throws ReadFailedException { final RelayBuilder builder = new RelayBuilder(); getCustomizer().readCurrentAttributes(IP6_IID, builder, ctx); assertEquals(IP6_IID.getKey().getAddressFamily(), builder.getAddressFamily()); assertEquals(IP6_IID.getKey().getRxVrfId(), builder.getRxVrfId()); assertEquals(22L, builder.getServer().get(0).getVrfId().longValue()); assertEquals("2001:db8:a0b:12f0::1", builder.getGatewayAddress().stringValue()); assertEquals("2001:db8:a0b:12f0::2", builder.getServer().get(0).getAddress().stringValue()); }
DhcpRelayCustomizer extends FutureJVppCustomizer implements InitializingListReaderCustomizer<Relay, RelayKey, RelayBuilder>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator, Ipv4Translator { @Nonnull @Override public Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay readValue, @Nonnull final ReadContext ctx) { return Initialized.create(id, readValue); } DhcpRelayCustomizer(final FutureJVppCore vppApi); @Nonnull @Override List<RelayKey> getAllIds(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final ReadContext context); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<Relay> readData); @Nonnull @Override RelayBuilder getBuilder(@Nonnull final InstanceIdentifier<Relay> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final RelayBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Relay> id, @Nonnull final Relay readValue, @Nonnull final ReadContext ctx); }
@Test public void testInit() { final Relay data = new RelayBuilder().build(); invokeInitTest(IP4_IID, data, IP4_IID, data); }
AclValidator implements Validator<Acl>, AclDataExtractor { @Override public void validateUpdate(final InstanceIdentifier<Acl> id, final Acl dataBefore, final Acl dataAfter, final WriteContext ctx) throws UpdateValidationFailedException { try { validateAcl(dataAfter); } catch (RuntimeException e) { throw new UpdateValidationFailedException(id, dataBefore, dataAfter, e); } } @Override void validateWrite(final InstanceIdentifier<Acl> id, final Acl dataAfter, final WriteContext ctx); @Override void validateUpdate(final InstanceIdentifier<Acl> id, final Acl dataBefore, final Acl dataAfter, final WriteContext ctx); @Override void validateDelete(final InstanceIdentifier<Acl> id, final Acl dataBefore, final WriteContext ctx); }
@Test public void testValidateUpdate( @InjectTestData(resourcePath = "/acl/standard/standard-acl-icmp.json") Acls acls) throws DataValidationFailedException.UpdateValidationFailedException { final Acl data = acls.getAcl().get(0); validator.validateUpdate(ID, data, data, writeContext); } @Test(expected = DataValidationFailedException.UpdateValidationFailedException.class) public void testValidateUpdateUnsupportedType( @InjectTestData(resourcePath = "/acl/ipv4/ipv4-acl.json") Acls acls) throws DataValidationFailedException.UpdateValidationFailedException { final Acl data = acls.getAcl().get(0); validator.validateUpdate(ID, data, data, writeContext); }
BgpPrefixSidMplsWriter implements RouteWriter<LabeledUnicastRoute>, MplsRouteRequestProducer, IpRouteRequestProducer, JvppReplyConsumer { @Override public void create(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route) throws WriteFailedException.CreateFailedException { LOG.debug("Translating id={}, route={}", id, route); final MplsRouteAddDel mplsRequest = mplsRouteAddDelFor(route, true, LOG); getReplyForCreate(vppApi.mplsRouteAddDel(mplsRequest).toCompletableFuture(), id, route); mplsRequest.mrEos = 1; getReplyForCreate(vppApi.mplsRouteAddDel(mplsRequest).toCompletableFuture(), id, route); getReplyForCreate(vppApi.ipAddDelRoute(ipAddDelRouteFor(route, true)).toCompletableFuture(), id, route); LOG.debug("VPP FIB updated successfully (added id={}).", id); } BgpPrefixSidMplsWriter(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void delete(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void update(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute routeBefore, @Nonnull final LabeledUnicastRoute routeAfter); @Nonnull @Override InstanceIdentifier<LabeledUnicastRoute> getManagedDataObjectType(); }
@Test public void testCreate() throws WriteFailedException.CreateFailedException { final String routeKey = "route-key"; final PathId pathId = new PathId(123L); writer.create(id(pathId, routeKey), route(pathId, routeKey)); verify(vppApi, times(2)).mplsRouteAddDel(any()); verify(vppApi, atLeastOnce()).mplsRouteAddDel(getRequest(true, true)); verify(vppApi).ipAddDelRoute(getRequest(true)); }
BgpPrefixSidMplsWriter implements RouteWriter<LabeledUnicastRoute>, MplsRouteRequestProducer, IpRouteRequestProducer, JvppReplyConsumer { @Override public void delete(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route) throws WriteFailedException.DeleteFailedException { LOG.debug("Removing id={}, route={}", id, route); final MplsRouteAddDel mplsRequest = mplsRouteAddDelFor(route, false, LOG); getReplyForDelete(vppApi.mplsRouteAddDel(mplsRequest).toCompletableFuture(), id); mplsRequest.mrEos = 1; getReplyForDelete(vppApi.mplsRouteAddDel(mplsRequest).toCompletableFuture(), id); getReplyForDelete(vppApi.ipAddDelRoute(ipAddDelRouteFor(route, false)).toCompletableFuture(), id); LOG.debug("VPP FIB updated successfully (removed id={}).", id); } BgpPrefixSidMplsWriter(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void delete(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void update(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute routeBefore, @Nonnull final LabeledUnicastRoute routeAfter); @Nonnull @Override InstanceIdentifier<LabeledUnicastRoute> getManagedDataObjectType(); }
@Test public void testDelete() throws WriteFailedException.DeleteFailedException { final String routeKey = "route-key"; final PathId pathId = new PathId(123L); writer.delete(id(pathId, routeKey), route(pathId, routeKey)); verify(vppApi, times(2)).mplsRouteAddDel(any()); verify(vppApi, atLeastOnce()).mplsRouteAddDel(getRequest(false, true)); verify(vppApi).ipAddDelRoute(getRequest(false)); }
BgpPrefixSidMplsWriter implements RouteWriter<LabeledUnicastRoute>, MplsRouteRequestProducer, IpRouteRequestProducer, JvppReplyConsumer { @Override public void update(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute routeBefore, @Nonnull final LabeledUnicastRoute routeAfter) throws WriteFailedException.UpdateFailedException { throw new WriteFailedException.UpdateFailedException(id, routeBefore, routeAfter, new UnsupportedOperationException("Operation not supported")); } BgpPrefixSidMplsWriter(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void delete(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute route); @Override void update(@Nonnull final InstanceIdentifier<LabeledUnicastRoute> id, @Nonnull final LabeledUnicastRoute routeBefore, @Nonnull final LabeledUnicastRoute routeAfter); @Nonnull @Override InstanceIdentifier<LabeledUnicastRoute> getManagedDataObjectType(); }
@Test(expected = WriteFailedException.UpdateFailedException.class) public void testUpdate() throws WriteFailedException.UpdateFailedException { final String routeKey = "route-key"; final PathId pathId = new PathId(123L); writer.update(id(pathId, routeKey), mock(LabeledUnicastRoute.class), mock(LabeledUnicastRoute.class)); verifyZeroInteractions(vppApi); }
Ipv4Writer implements RouteWriter<Ipv4Route>, Ipv4Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void create(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route) throws WriteFailedException.CreateFailedException { final IpAddDelRoute request = request(route, true); LOG.debug("Translating id={}, route={} to {}", id, route, request); getReplyForCreate(vppApi.ipAddDelRoute(request).toCompletableFuture(), id, route); LOG.debug("VPP FIB updated successfully (added id={}).", id); } Ipv4Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route routeBefore, @Nonnull final Ipv4Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv4Route> getManagedDataObjectType(); }
@Test public void testCreate() throws WriteFailedException.CreateFailedException { final Ipv4Prefix destination = new Ipv4Prefix("1.2.3.4/24"); final PathId pathId = new PathId(123L); final Ipv4Address nextHopAddress = new Ipv4AddressNoZone("5.6.7.8"); writer.create( id(destination, pathId), route(destination, pathId, nextHopAddress) ); verifyRequest(true); }
Ipv4Writer implements RouteWriter<Ipv4Route>, Ipv4Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void delete(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route) throws WriteFailedException.DeleteFailedException { LOG.debug("Removing id={}, route={}", id, route); getReplyForDelete(vppApi.ipAddDelRoute(request(route, false)).toCompletableFuture(), id); LOG.debug("VPP FIB updated successfully (removed id={}).", id); } Ipv4Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route routeBefore, @Nonnull final Ipv4Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv4Route> getManagedDataObjectType(); }
@Test public void testDelete() throws WriteFailedException.DeleteFailedException { final Ipv4Prefix destination = new Ipv4Prefix("1.2.3.4/24"); final PathId pathId = new PathId(456L); final Ipv4Address nextHopAddress = new Ipv4AddressNoZone("5.6.7.8"); writer.delete( id(destination, pathId), route(destination, pathId, nextHopAddress) ); verifyRequest(false); }
Ipv4Writer implements RouteWriter<Ipv4Route>, Ipv4Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void update(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route routeBefore, @Nonnull final Ipv4Route routeAfter) throws WriteFailedException.UpdateFailedException { throw new WriteFailedException.UpdateFailedException(id, routeBefore, routeAfter, new UnsupportedOperationException("Operation not supported")); } Ipv4Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv4Route> id, @Nonnull final Ipv4Route routeBefore, @Nonnull final Ipv4Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv4Route> getManagedDataObjectType(); }
@Test(expected = WriteFailedException.UpdateFailedException.class) public void testUpdate() throws WriteFailedException.UpdateFailedException { final Ipv4Prefix destination = new Ipv4Prefix("10.1.0.1/28"); final PathId pathId = new PathId(456L); writer.update(id(destination, pathId), mock(Ipv4Route.class), mock(Ipv4Route.class)); }
Ipv6Writer implements RouteWriter<Ipv6Route>, Ipv6Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void create(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route) throws WriteFailedException.CreateFailedException { final IpAddDelRoute request = request(route, true); LOG.debug("Translating id={}, route={} to {}", id, route, request); getReplyForCreate(vppApi.ipAddDelRoute(request).toCompletableFuture(), id, route); LOG.debug("VPP FIB updated successfully (added id={}).", id); } Ipv6Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route routeBefore, @Nonnull final Ipv6Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv6Route> getManagedDataObjectType(); }
@Test public void testCreate() throws WriteFailedException.CreateFailedException { final Ipv6Prefix destination = new Ipv6Prefix("2001:db8:a0b:12f0:0:0:0:1/64"); final PathId pathId = new PathId(123L); final Ipv6Address nextHopAddress = new Ipv6AddressNoZone("2001:db8:a0b:12f0:0:0:0:2"); writer.create( id(destination, pathId), route(destination, pathId, nextHopAddress) ); verifyRequest(true); }
Ipv6Writer implements RouteWriter<Ipv6Route>, Ipv6Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void delete(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route) throws WriteFailedException.DeleteFailedException { LOG.debug("Removing id={}, route={}", id, route); getReplyForDelete(vppApi.ipAddDelRoute(request(route, false)).toCompletableFuture(), id); LOG.debug("VPP FIB updated successfully (removed id={}).", id); } Ipv6Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route routeBefore, @Nonnull final Ipv6Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv6Route> getManagedDataObjectType(); }
@Test public void testDelete() throws WriteFailedException.DeleteFailedException { final Ipv6Prefix destination = new Ipv6Prefix("2001:db8:a0b:12f0:0:0:0:1/64"); final PathId pathId = new PathId(456L); final Ipv6Address nextHopAddress = new Ipv6AddressNoZone("2001:db8:a0b:12f0:0:0:0:2"); writer.delete( id(destination, pathId), route(destination, pathId, nextHopAddress) ); verifyRequest(false); }
Ipv6Writer implements RouteWriter<Ipv6Route>, Ipv6Translator, JvppReplyConsumer, RouteRequestProducer { @Override public void update(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route routeBefore, @Nonnull final Ipv6Route routeAfter) throws WriteFailedException.UpdateFailedException { throw new WriteFailedException.UpdateFailedException(id, routeBefore, routeAfter, new UnsupportedOperationException("Operation not supported")); } Ipv6Writer(@Nonnull final FutureJVppCore vppApi); @Override void create(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void delete(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route route); @Override void update(@Nonnull final InstanceIdentifier<Ipv6Route> id, @Nonnull final Ipv6Route routeBefore, @Nonnull final Ipv6Route routeAfter); @Nonnull @Override InstanceIdentifier<Ipv6Route> getManagedDataObjectType(); }
@Test(expected = WriteFailedException.UpdateFailedException.class) public void testUpdate() throws WriteFailedException.UpdateFailedException { final Ipv6Prefix destination = new Ipv6Prefix("2001:db8:a0b:12f0:0:0:0:1/64"); final PathId pathId = new PathId(456L); writer.update(id(destination, pathId), mock(Ipv6Route.class), mock(Ipv6Route.class)); }
NshMapReaderCustomizer extends FutureJVppNshCustomizer implements InitializingListReaderCustomizer<NshMap, NshMapKey, NshMapBuilder>, JvppReplyConsumer { @Nonnull @Override public List<NshMapKey> getAllIds(@Nonnull final InstanceIdentifier<NshMap> id, @Nonnull final ReadContext context) throws ReadFailedException { LOG.debug("Reading list of keys for nsh map: {}", id); final NshMapDump request = new NshMapDump(); request.mapIndex = -1; NshMapDetailsReplyDump reply; try { reply = getFutureJVppNsh().nshMapDump(request).toCompletableFuture().get(); } catch (Exception e) { throw new IllegalStateException("Nsh Map dump failed", e); } if (reply == null || reply.nshMapDetails == null) { return Collections.emptyList(); } final int nIdsLength = reply.nshMapDetails.size(); LOG.debug("vppstate.NshMapCustomizer.getAllIds: nIds.length={}", nIdsLength); if (nIdsLength == 0) { return Collections.emptyList(); } final List<NshMapKey> allIds = new ArrayList<>(nIdsLength); for (NshMapDetails detail : reply.nshMapDetails) { final String nshName = nshMapContext.getName(detail.mapIndex, context.getMappingContext()); LOG.debug("vppstate.NshMapCustomizer.getAllIds: nName={}", nshName); allIds.add(new NshMapKey(nshName)); } return allIds; } NshMapReaderCustomizer(@Nonnull final FutureJVppNsh futureJVppNsh, @Nonnull final NamingContext nshMapContext, @Nonnull final NamingContext interfaceContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<NshMap> readData); @Nonnull @Override NshMapBuilder getBuilder(@Nonnull final InstanceIdentifier<NshMap> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<NshMap> id, @Nonnull final NshMapBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override List<NshMapKey> getAllIds(@Nonnull final InstanceIdentifier<NshMap> id, @Nonnull final ReadContext context); @Override Initialized<org.opendaylight.yang.gen.v1.http.fd.io.hc2vpp.yang.vpp.nsh.rev170315.vpp.nsh.nsh.maps.NshMap> init( @Nonnull final InstanceIdentifier<NshMap> id, @Nonnull final NshMap readValue, @Nonnull final ReadContext ctx); }
@Test public void testGetAllIds() throws ReadFailedException { final NshMapDetailsReplyDump reply = new NshMapDetailsReplyDump(); final NshMapDetails nshMapDetails_1 = new NshMapDetails(); nshMapDetails_1.mapIndex = MAP_INDEX_1; nshMapDetails_1.nspNsi = (184<<8 | 255); nshMapDetails_1.mappedNspNsi = (183<<8 | 254); nshMapDetails_1.swIfIndex = ITF_INDEX; nshMapDetails_1.nextNode = 2; reply.nshMapDetails = Lists.newArrayList(nshMapDetails_1); final NshMapDetails nshMapDetails_2 = new NshMapDetails(); nshMapDetails_2.mapIndex = MAP_INDEX_2; nshMapDetails_2.nspNsi = (84<<8 | 255); nshMapDetails_2.mappedNspNsi = (83<<8 | 254); nshMapDetails_2.swIfIndex = ITF_INDEX; nshMapDetails_2.nextNode = 1; reply.nshMapDetails = Lists.newArrayList(nshMapDetails_2); doReturn(future(reply)).when(jvppNsh).nshMapDump(any(NshMapDump.class)); final List<NshMapKey> allIds = getCustomizer().getAllIds(getNshMapId(MAP_NAME_1), ctx); assertEquals(reply.nshMapDetails.size(), allIds.size()); }
AclValidator implements Validator<Acl>, AclDataExtractor { @Override public void validateDelete(final InstanceIdentifier<Acl> id, final Acl dataBefore, final WriteContext ctx) throws DeleteValidationFailedException { try { validateAcl(dataBefore); final List<String> references = checkAclReferenced(ctx, dataBefore); checkState(references.isEmpty(), "%s cannot be removed, it is referenced in following interfaces %s", dataBefore, references); } catch (RuntimeException e) { throw new DeleteValidationFailedException(id, e); } } @Override void validateWrite(final InstanceIdentifier<Acl> id, final Acl dataAfter, final WriteContext ctx); @Override void validateUpdate(final InstanceIdentifier<Acl> id, final Acl dataBefore, final Acl dataAfter, final WriteContext ctx); @Override void validateDelete(final InstanceIdentifier<Acl> id, final Acl dataBefore, final WriteContext ctx); }
@Test public void testValidateDelete( @InjectTestData(resourcePath = "/acl/standard/standard-acl-icmp.json") Acls acls) throws DataValidationFailedException.DeleteValidationFailedException { validator.validateDelete(ID, acls.getAcl().get(0), writeContext); } @Test(expected = DataValidationFailedException.DeleteValidationFailedException.class) public void testValidateDeleteReferenced( @InjectTestData(resourcePath = "/acl/standard/standard-acl-udp.json") Acls standardAcls, @InjectTestData(id = "/ietf-access-control-list:acls/ietf-access-control-list:attachment-points", resourcePath = "/acl/standard/interface-ref-acl-udp.json") AttachmentPoints references) throws Exception { when(writeContext.readAfter(AclIIds.ACLS_AP)).thenReturn( Optional.of(new AttachmentPointsBuilder().setInterface(references.getInterface()).build())); validator.validateDelete(ID, standardAcls.getAcl().get(0), writeContext); }
AclValidator implements Validator<Acl>, AclDataExtractor { @VisibleForTesting static List<String> checkAclReferenced(@Nonnull final WriteContext writeContext, @Nonnull final Acl acl) { Preconditions.checkNotNull(acl.getType(), "Cannot validate acl: {}, type is not set.", acl); if (!acl.getType().equals(VppAcl.class) && !acl.getType().equals(VppMacipAcl.class)) { throw new IllegalArgumentException(String.format("Acl type %s not supported", acl.getType())); } Optional<AttachmentPoints> attachmentPointsOpt = writeContext.readAfter(AclIIds.ACLS_AP); if (!attachmentPointsOpt.isPresent() || attachmentPointsOpt.get().getInterface() == null) { return Collections.emptyList(); } final List<Interface> interfaces = attachmentPointsOpt.get().getInterface(); if (interfaces == null) { return Collections.emptyList(); } final String aclName = acl.getName(); HashMap<String, AclSets> sets = getIngressAclSets(interfaces); sets.putAll(getEgressAclSets(interfaces)); List<String> referencedIfcs = new ArrayList<>(); sets.forEach((ifc, aclSets) -> { if (aclSets.getAclSet() != null) { if (aclSets.getAclSet().stream() .map(AclSet::getName) .filter(Objects::nonNull) .anyMatch(name -> name.equalsIgnoreCase(aclName))) { referencedIfcs.add(ifc); } } }); return referencedIfcs.stream().distinct().collect(Collectors.toList()); } @Override void validateWrite(final InstanceIdentifier<Acl> id, final Acl dataAfter, final WriteContext ctx); @Override void validateUpdate(final InstanceIdentifier<Acl> id, final Acl dataBefore, final Acl dataAfter, final WriteContext ctx); @Override void validateDelete(final InstanceIdentifier<Acl> id, final Acl dataBefore, final WriteContext ctx); }
@Test public void testReferencedVppAclFirst() { final List<String> referenced = checkAclReferenced(writeContext, new AclBuilder() .setName("acl1").setType(VppAcl.class).build()); assertThat(referenced, hasSize(3)); assertThat(new HashSet<>(referenced), containsInAnyOrder("eth0", "eth1", "eth2")); } @Test public void testReferencedVppAclSecond() { final List<String> referenced = checkAclReferenced(writeContext, new AclBuilder() .setName("acl2").setType(VppAcl.class).build()); assertThat(referenced, hasSize(1)); assertThat(new HashSet<>(referenced), containsInAnyOrder("eth1")); } @Test public void testReferencedMacipAcl() { final List<String> referenced = checkAclReferenced(writeContext, new AclBuilder() .setName("acl4").setType(VppMacipAcl.class).build()); assertThat(referenced, hasSize(1)); assertThat(new HashSet<>(referenced), containsInAnyOrder("eth2")); }
EgressAclCustomizer extends AbstractAclCustomizer implements InitializingListReaderCustomizer<AclSet, AclSetKey, AclSetBuilder>, JvppReplyConsumer { @Nonnull @Override public List<AclSetKey> getAllIds(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final ReadContext readContext) throws ReadFailedException { final String parentInterfaceName = instanceIdentifier.firstKeyOf(Interface.class).getInterfaceId(); final int parentInterfaceIndex = interfaceContext.getIndex(parentInterfaceName, readContext.getMappingContext()); final Optional<AclInterfaceListDetailsReplyDump> stdDumpReply = aclReferenceDumpManager .getDump(instanceIdentifier, readContext.getModificationCache(), parentInterfaceIndex); return getStandardAclSetKeys(readContext, stdDumpReply, false).collect(Collectors.toList()); } EgressAclCustomizer(final FutureJVppAclFacade futureAclFacade, final NamingContext interfaceContext, final AclContextManager standardAclContext); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final AclSet aclSet, @Nonnull final ReadContext readContext); @Nonnull @Override List<AclSetKey> getAllIds(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final ReadContext readContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<AclSet> list); @Nonnull @Override AclSetBuilder getBuilder(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final AclSetBuilder aclSetBuilder, @Nonnull final ReadContext readContext); }
@Test public void testGetAllIdsNoOutputAclConfigured() throws ReadFailedException { final AclInterfaceListDetailsReplyDump reply = aclInterfaceDump((byte) 1, "acl1"); when(aclApi.aclInterfaceListDump(any())).thenReturn(future(reply)); assertTrue(getCustomizer().getAllIds(getWildcardedIid(IF_NAME), ctx).isEmpty()); } @Test public void testGetAllIds() throws ReadFailedException { final AclInterfaceListDetailsReplyDump reply = aclInterfaceDump((byte) 2, "acl1", "acl2", "acl3"); when(aclApi.aclInterfaceListDump(any())).thenReturn(future(reply)); assertEquals(1, getCustomizer().getAllIds(getWildcardedIid(IF_NAME), ctx).size()); }
AclCustomizer extends FutureJVppAclCustomizer implements InitializingListReaderCustomizer<Acl, AclKey, AclBuilder>, JvppReplyConsumer, Ipv6Translator, Ipv4Translator, IpProtocolReader, AceConverter { @Nonnull @Override public List<AclKey> getAllIds(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final ReadContext context) throws ReadFailedException { final List<AclKey> keys = new ArrayList<>(); final Optional<AclDetailsReplyDump> vppAclDump = vppAclDumpManager.getDump(id, context.getModificationCache(), READ_ALL); if (vppAclDump.isPresent()) { vppAclDump.get().aclDetails.stream() .map(details -> standardAclContext.getAclName(details.aclIndex, context.getMappingContext())) .forEach(name -> keys.add(new AclKey(name))); } final Optional<MacipAclDetailsReplyDump> macipAclDump = macipAclDumpManager.getDump(id, context.getModificationCache(), READ_ALL); if (macipAclDump.isPresent()) { macipAclDump.get().macipAclDetails.stream() .map(details -> macipAclContext.getAclName(details.aclIndex, context.getMappingContext())) .forEach(name -> keys.add(new AclKey(name))); } return keys; } AclCustomizer(@Nonnull final FutureJVppAclFacade jVppAclFacade, @Nonnull final AclContextManager standardAclContext, @Nonnull final AclContextManager macipAclContext); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final Acl readValue, @Nonnull final ReadContext ctx); @Nonnull @Override List<AclKey> getAllIds(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final ReadContext context); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<Acl> readData); @Nonnull @Override AclBuilder getBuilder(@Nonnull final InstanceIdentifier<Acl> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final AclBuilder builder, @Nonnull final ReadContext ctx); }
@Test public void testGetAllIds() throws ReadFailedException { final List<AclKey> allIds = getCustomizer().getAllIds(InstanceIdentifier.create(Acls.class).child(Acl.class), ctx); assertEquals(2, allIds.size()); assertEquals(ACL_IID.getKey(), allIds.get(0)); assertEquals(MACIP_ACL_IID.getKey(), allIds.get(1)); }
AclCustomizer extends FutureJVppAclCustomizer implements InitializingListReaderCustomizer<Acl, AclKey, AclBuilder>, JvppReplyConsumer, Ipv6Translator, Ipv4Translator, IpProtocolReader, AceConverter { @Override public void readCurrentAttributes(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final AclBuilder builder, @Nonnull final ReadContext ctx) throws ReadFailedException { final AclKey key = id.firstKeyOf(Acl.class); builder.withKey(key); final String name = key.getName(); if (standardAclContext.containsAcl(name, ctx.getMappingContext())) { final int index = standardAclContext.getAclIndex(name, ctx.getMappingContext()); final Optional<AclDetailsReplyDump> dump = vppAclDumpManager.getDump(id, ctx.getModificationCache(), index); if (dump.isPresent() && !dump.get().aclDetails.isEmpty()) { final java.util.Optional<AclDetails> detail = dump.get().aclDetails.stream() .filter(acl -> acl.aclIndex == index).findFirst(); if (detail.isPresent()) { final AclDetails aclDetails = detail.get(); setTag(builder, aclDetails.tag); builder.setAces(new AcesBuilder() .setAce(toStandardAces(name, aclDetails.r, standardAclContext, ctx.getMappingContext())) .build()); } } } else if (macipAclContext.containsAcl(name, ctx.getMappingContext())) { final int index = macipAclContext.getAclIndex(name, ctx.getMappingContext()); final Optional<MacipAclDetailsReplyDump> dump = macipAclDumpManager.getDump(id, ctx.getModificationCache(), index); if (dump.isPresent() && !dump.get().macipAclDetails.isEmpty()) { final java.util.Optional<MacipAclDetails> detail = dump.get().macipAclDetails.stream().filter(acl -> acl.aclIndex == index).findFirst(); final MacipAclDetails macipAclDetails = detail.get(); setTag(builder, macipAclDetails.tag); if (detail.isPresent()) { builder.setAces(new AcesBuilder() .setAce(toMacIpAces(name, macipAclDetails.r, macipAclContext, ctx.getMappingContext())) .build()); } } } else { throw new IllegalArgumentException("Unsupported acl: " + id); } } AclCustomizer(@Nonnull final FutureJVppAclFacade jVppAclFacade, @Nonnull final AclContextManager standardAclContext, @Nonnull final AclContextManager macipAclContext); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final Acl readValue, @Nonnull final ReadContext ctx); @Nonnull @Override List<AclKey> getAllIds(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final ReadContext context); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<Acl> readData); @Nonnull @Override AclBuilder getBuilder(@Nonnull final InstanceIdentifier<Acl> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<Acl> id, @Nonnull final AclBuilder builder, @Nonnull final ReadContext ctx); }
@Test public void testReadStandardAcl() throws ReadFailedException { final AclBuilder builder = new AclBuilder(); getCustomizer().readCurrentAttributes(ACL_IID, builder, ctx); assertEquals(ACL_IID.getKey(), builder.key()); final List<Ace> aces = builder.getAces().getAce(); assertEquals(1, aces.size()); final Ace ace = aces.get(0); assertEquals(ACE_NAME, ace.key().getName()); assertTrue(ace.getActions().getForwarding().equals(Drop.class)); final L4 l4 = ((ace.getMatches())).getL4(); assertEquals(Icmp.class, l4.getImplementedInterface()); } @Test public void testReadMacipAcl() throws ReadFailedException { final AclBuilder builder = new AclBuilder(); getCustomizer().readCurrentAttributes(MACIP_ACL_IID, builder, ctx); assertEquals(MACIP_ACL_IID.getKey(), builder.key()); final List<Ace> aces = builder.getAces().getAce(); assertEquals(1, aces.size()); final Ace ace = aces.get(0); assertEquals(MACIP_ACE_NAME, ace.key().getName()); assertTrue(ace.getActions().getForwarding().equals(Drop.class)); }
NshEntryReaderCustomizer extends FutureJVppNshCustomizer implements InitializingListReaderCustomizer<NshEntry, NshEntryKey, NshEntryBuilder>, JvppReplyConsumer { @Override public void readCurrentAttributes(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final NshEntryBuilder builder, @Nonnull final ReadContext ctx) throws ReadFailedException { LOG.debug("Reading attributes for nsh entry: {}", id); final NshEntryKey key = id.firstKeyOf(NshEntry.class); checkArgument(key != null, "could not find NshEntry key in {}", id); final NshEntryDump request = new NshEntryDump(); final String entryName = key.getName(); if (!nshEntryContext.containsIndex(entryName, ctx.getMappingContext())) { LOG.debug("Could not find nsh entry {} in the naming context", entryName); return; } request.entryIndex = nshEntryContext.getIndex(entryName, ctx.getMappingContext()); final CompletionStage<NshEntryDetailsReplyDump> nshEntryDetailsReplyDumpCompletionStage = getFutureJVppNsh().nshEntryDump(request); final NshEntryDetailsReplyDump reply = getReplyForRead(nshEntryDetailsReplyDumpCompletionStage.toCompletableFuture(), id); if (reply == null || reply.nshEntryDetails == null || reply.nshEntryDetails.isEmpty()) { LOG.debug("Has no Nsh Entry {} in VPP. ", key.getName()); return; } LOG.trace("Nsh Entry : {} attributes returned from VPP: {}", key.getName(), reply); final NshEntryDetails nshEntryDetails = reply.nshEntryDetails.get(0); builder.setName(entryName); builder.withKey(key); builder.setVersion((short) nshEntryDetails.verOC); builder.setLength((short) nshEntryDetails.length); switch (nshEntryDetails.nextProtocol) { case 1: builder.setNextProtocol(Ipv4.class); break; case 2: builder.setNextProtocol(Ipv6.class); break; case 3: builder.setNextProtocol(Ethernet.class); break; default: LOG.trace("Unsupported next protocol for nsh entry: {}", nshEntryDetails.nextProtocol); return; } switch (nshEntryDetails.mdType) { case 1: { builder.setMdType(MdType1.class); setNshEntryMdType1Augment(builder, nshEntryDetails); break; } case 2: { builder.setMdType(MdType2.class); setNshEntryMdType2Augment(builder, nshEntryDetails); break; } default: LOG.trace("Unsupported Mdtype for nsh entry: {}", nshEntryDetails.mdType); return; } builder.setNsp((long) ((nshEntryDetails.nspNsi >> 8) & 0xFFFFFF)); builder.setNsi((short) (nshEntryDetails.nspNsi & 0xFF)); if (LOG.isTraceEnabled()) { LOG.trace("Attributes for nsh entry {} successfully read: {}", id, builder.build()); } } NshEntryReaderCustomizer(@Nonnull final FutureJVppNsh futureJVppNsh, @Nonnull final NamingContext nshEntryContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<NshEntry> readData); @Nonnull @Override NshEntryBuilder getBuilder(@Nonnull final InstanceIdentifier<NshEntry> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final NshEntryBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override List<NshEntryKey> getAllIds(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final ReadContext context); @Override Initialized<org.opendaylight.yang.gen.v1.http.fd.io.hc2vpp.yang.vpp.nsh.rev170315.vpp.nsh.nsh.entries.NshEntry> init( @Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final NshEntry readValue, @Nonnull final ReadContext ctx); }
@Test public void testreadCurrentAttributes() throws ReadFailedException { NshEntryBuilder builder = new NshEntryBuilder(); getCustomizer().readCurrentAttributes(getNshEntryId(ENTRY_NAME_1), builder, ctx); assertEquals(0, builder.getVersion().intValue()); assertEquals(6, builder.getLength().intValue()); assertEquals(MdType1.class, builder.getMdType()); assertEquals(Ethernet.class, builder.getNextProtocol()); assertEquals(123, builder.getNsp().intValue()); assertEquals(4, builder.getNsi().intValue()); assertEquals(1, builder.augmentation(NshMdType1StateAugment.class).getC1().intValue()); assertEquals(2, builder.augmentation(NshMdType1StateAugment.class).getC2().intValue()); assertEquals(3, builder.augmentation(NshMdType1StateAugment.class).getC3().intValue()); assertEquals(4, builder.augmentation(NshMdType1StateAugment.class).getC4().intValue()); verify(jvppNsh).nshEntryDump(any(NshEntryDump.class)); }
IngressAclCustomizer extends AbstractAclCustomizer implements InitializingListReaderCustomizer<AclSet, AclSetKey, AclSetBuilder>, JvppReplyConsumer { @Nonnull @Override public List<AclSetKey> getAllIds(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final ReadContext readContext) throws ReadFailedException { final String parentInterfaceName = instanceIdentifier.firstKeyOf(Interface.class).getInterfaceId(); final int parentInterfaceIndex = interfaceContext.getIndex(parentInterfaceName, readContext.getMappingContext()); final Optional<AclInterfaceListDetailsReplyDump> stdDumpReply = aclReferenceDumpManager .getDump(instanceIdentifier, readContext.getModificationCache(), parentInterfaceIndex); final Optional<MacipAclInterfaceListDetailsReplyDump> macipDumpReply = macAclReferenceDumpManager .getDump(instanceIdentifier, readContext.getModificationCache(), parentInterfaceIndex); Stream<AclSetKey> macIpAclSetKeys = getMacIpAclSetKeys(readContext, macipDumpReply); Stream<AclSetKey> standardAclSetKeys = getStandardAclSetKeys(readContext, stdDumpReply, true); return Streams.concat(standardAclSetKeys, macIpAclSetKeys).distinct().collect(Collectors.toList()); } IngressAclCustomizer(final FutureJVppAclFacade futureAclFacade, final NamingContext interfaceContext, final AclContextManager standardAclContext, final AclContextManager macIpAClContext); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final AclSet aclSet, @Nonnull final ReadContext readContext); @Nonnull @Override List<AclSetKey> getAllIds(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final ReadContext readContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<AclSet> list); @Nonnull @Override AclSetBuilder getBuilder(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final AclSetBuilder aclSetBuilder, @Nonnull final ReadContext readContext); void parseMacIpAclSet(@Nonnull final InstanceIdentifier<AclSet> instanceIdentifier, @Nonnull final AclSetBuilder aclSetBuilder, final String interfaceName, final MappingContext mappingContext, final ModificationCache modificationCache); }
@Test public void testGetAllIdsNoInputAclConfigured() throws ReadFailedException { final AclInterfaceListDetailsReplyDump reply = aclInterfaceDump((byte) 0, "acl1"); when(aclApi.aclInterfaceListDump(aclInterfaceRequest(IF_ID))).thenReturn(future(reply)); assertTrue(getCustomizer().getAllIds(getWildcardedIid(IF_NAME), ctx).isEmpty()); } @Test public void testGetAllIds() throws ReadFailedException { final byte nInput = 2; final AclInterfaceListDetailsReplyDump reply = aclInterfaceDump(nInput, "acl1", "acl2", "acl3"); when(aclApi.aclInterfaceListDump(aclInterfaceRequest(IF_ID))).thenReturn(future(reply)); assertEquals(nInput, getCustomizer().getAllIds(getWildcardedIid(IF_NAME), ctx).size()); }
IoamPotWriterCustomizer extends FutureJVppIoampotCustomizer implements ListWriterCustomizer<PotProfileSet,PotProfileSetKey>, JvppReplyConsumer { @Override public void writeCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataAfter, @Nonnull WriteContext writeContext) throws WriteFailedException { try { addPotProfile(dataAfter,id); } catch (WriteFailedException exCreate) { LOG.error("Add POT profile failed", exCreate); throw new WriteFailedException.CreateFailedException(id, dataAfter, exCreate); } LOG.info("POT profile added iid={}, added {}", id, dataAfter); } IoamPotWriterCustomizer(@Nonnull FutureJVppIoampot futureJVppIoampot); @Override void writeCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataAfter, @Nonnull WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataBefore, @Nonnull PotProfileSet dataAfter, @Nonnull WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataBefore, @Nonnull WriteContext writeContext); }
@Test public void testCreate() throws Exception { final PotProfileSet potProfileSet = generatePotProfileSet(); final InstanceIdentifier<PotProfileSet> id = getPotProfileSetId(POT_TEST_NAME); whenPotAddThenSuccess(); customizer.writeCurrentAttributes(id, potProfileSet, writeContext); verify(jVppIoamPot).potProfileAdd(generatePotProfileAdd()); } @Test public void testCreateFailed() throws Exception { final PotProfileSet potProfileSet = generatePotProfileSet(); final InstanceIdentifier<PotProfileSet> id = getPotProfileSetId(POT_TEST_NAME); whenPotAddThenFailure(); try { customizer.writeCurrentAttributes(id, potProfileSet, writeContext); } catch (WriteFailedException e) { verify(jVppIoamPot).potProfileAdd(generatePotProfileAdd()); return; } fail("WriteFailedException.CreateFailedException was expected"); }
IoamPotWriterCustomizer extends FutureJVppIoampotCustomizer implements ListWriterCustomizer<PotProfileSet,PotProfileSetKey>, JvppReplyConsumer { @Override public void deleteCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataBefore, @Nonnull WriteContext writeContext) throws WriteFailedException { try { delPotProfile(dataBefore,id); } catch (WriteFailedException exDelete) { LOG.error("Del POT Profile failed", exDelete); throw new WriteFailedException.DeleteFailedException(id, exDelete); } LOG.info("POT profile deleted iid={}, added {}", id, dataBefore); } IoamPotWriterCustomizer(@Nonnull FutureJVppIoampot futureJVppIoampot); @Override void writeCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataAfter, @Nonnull WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataBefore, @Nonnull PotProfileSet dataAfter, @Nonnull WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull InstanceIdentifier<PotProfileSet> id, @Nonnull PotProfileSet dataBefore, @Nonnull WriteContext writeContext); }
@Test public void testDelete() throws Exception { final PotProfileSet potProfileSet = generatePotProfileSet(); final InstanceIdentifier<PotProfileSet> id = getPotProfileSetId(POT_TEST_NAME); whenPotDelThenSuccess(); customizer.deleteCurrentAttributes(id, potProfileSet, writeContext); verify(jVppIoamPot).potProfileDel(generatePotProfileDel(POT_TEST_NAME)); } @Test public void testDeleteFailed() throws Exception { final PotProfileSet potProfileSet = generatePotProfileSet(); final InstanceIdentifier<PotProfileSet> id = getPotProfileSetId(POT_TEST_NAME); whenPotDelThenFailure(); try { customizer.deleteCurrentAttributes(id, potProfileSet, writeContext); } catch (WriteFailedException e) { verify(jVppIoamPot).potProfileDel(generatePotProfileDel(POT_TEST_NAME)); return; } fail("WriteFailedException.DeleteFailedException was expected"); customizer.deleteCurrentAttributes(id, potProfileSet, writeContext); }
IoamExportWriterCustomizer extends FutureJVppIoamexportCustomizer implements WriterCustomizer<IoamExport>, JvppReplyConsumer, Ipv4Translator { @Override public void writeCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext) throws WriteFailedException { addExportProfile(ioamExport,instanceIdentifier); LOG.info("Export profile {} created, id: {}", ioamExport, instanceIdentifier); } IoamExportWriterCustomizer(FutureJVppIoamexport jVppIoamExport); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport dataBefore, @Nonnull final IoamExport dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext); }
@Test public void testCreate() throws Exception { final IoamExport ioamExport = generateExportProfile(); final InstanceIdentifier<IoamExport> id = InstanceIdentifier.create(IoamExport.class); whenExportAddThenSuccess(); customizer.writeCurrentAttributes(id, ioamExport, writeContext); verify(jVppIoamexport).ioamExportIp6EnableDisable(generateIoamExportIp6EnableDisable(false)); } @Test public void testCreateFailed() throws Exception { final IoamExport ioamExport = generateExportProfile(); final InstanceIdentifier<IoamExport> id = InstanceIdentifier.create(IoamExport.class); whenExportAddThenFailure(); try { customizer.writeCurrentAttributes(id, ioamExport, writeContext); } catch (WriteFailedException e) { verify(jVppIoamexport).ioamExportIp6EnableDisable(generateIoamExportIp6EnableDisable(false)); return; } fail("WriteFailedException.CreateFailedException was expected"); } @Test public void testCreateWithMissingDisabledField() throws Exception { IoamExportBuilder builder = new IoamExportBuilder(); builder.setSourceAddress(new Ipv4Address("127.0.0.1")); builder.setCollectorAddress(new Ipv4Address("127.0.0.2")); final IoamExport ioamExport = builder.build(); final InstanceIdentifier<IoamExport> id = InstanceIdentifier.create(IoamExport.class); whenExportAddThenSuccess(); customizer.writeCurrentAttributes(id, ioamExport, writeContext); verify(jVppIoamexport).ioamExportIp6EnableDisable(generateIoamExportIp6EnableDisable(true)); }
IoamExportWriterCustomizer extends FutureJVppIoamexportCustomizer implements WriterCustomizer<IoamExport>, JvppReplyConsumer, Ipv4Translator { @Override public void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext) throws WriteFailedException { deleteExportProfile(ioamExport,instanceIdentifier); LOG.info("Export profile {} deleted, id: {}", ioamExport, instanceIdentifier); } IoamExportWriterCustomizer(FutureJVppIoamexport jVppIoamExport); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport dataBefore, @Nonnull final IoamExport dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<IoamExport> instanceIdentifier, @Nonnull final IoamExport ioamExport, @Nonnull final WriteContext writeContext); }
@Test public void testDelete() throws Exception { final IoamExport ioamExport = generateExportProfile(); final InstanceIdentifier<IoamExport> id = InstanceIdentifier.create(IoamExport.class); whenExportDelThenSuccess(); customizer.deleteCurrentAttributes(id, ioamExport, writeContext); verify(jVppIoamexport).ioamExportIp6EnableDisable(generateIoamExportIp6EnableDisable(true)); } @Test public void testDeleteFailed() throws Exception { final IoamExport ioamExport = generateExportProfile(); final InstanceIdentifier<IoamExport> id = InstanceIdentifier.create(IoamExport.class); whenExportDelThenFailure(); try { customizer.deleteCurrentAttributes(id, ioamExport, writeContext); } catch (WriteFailedException e) { verify(jVppIoamexport).ioamExportIp6EnableDisable(generateIoamExportIp6EnableDisable(true)); return; } fail("WriteFailedException.CreateFailedException was expected"); }
NshEntryReaderCustomizer extends FutureJVppNshCustomizer implements InitializingListReaderCustomizer<NshEntry, NshEntryKey, NshEntryBuilder>, JvppReplyConsumer { @Nonnull @Override public List<NshEntryKey> getAllIds(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final ReadContext context) throws ReadFailedException { LOG.debug("Reading list of keys for nsh entry: {}", id); final NshEntryDump request = new NshEntryDump(); request.entryIndex = -1; NshEntryDetailsReplyDump reply; try { reply = getFutureJVppNsh().nshEntryDump(request).toCompletableFuture().get(); } catch (Exception e) { throw new IllegalStateException("Nsh Entry dump failed", e); } if (reply == null || reply.nshEntryDetails == null) { return Collections.emptyList(); } final int nIdsLength = reply.nshEntryDetails.size(); LOG.debug("vppstate.NshEntryCustomizer.getAllIds: nIds.length={}", nIdsLength); if (nIdsLength == 0) { return Collections.emptyList(); } final List<NshEntryKey> allIds = new ArrayList<>(nIdsLength); for (NshEntryDetails detail : reply.nshEntryDetails) { final String nshName = nshEntryContext.getName(detail.entryIndex, context.getMappingContext()); LOG.debug("vppstate.NshEntryCustomizer.getAllIds: nName={}", nshName); allIds.add(new NshEntryKey(nshName)); } return allIds; } NshEntryReaderCustomizer(@Nonnull final FutureJVppNsh futureJVppNsh, @Nonnull final NamingContext nshEntryContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<NshEntry> readData); @Nonnull @Override NshEntryBuilder getBuilder(@Nonnull final InstanceIdentifier<NshEntry> id); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final NshEntryBuilder builder, @Nonnull final ReadContext ctx); @Nonnull @Override List<NshEntryKey> getAllIds(@Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final ReadContext context); @Override Initialized<org.opendaylight.yang.gen.v1.http.fd.io.hc2vpp.yang.vpp.nsh.rev170315.vpp.nsh.nsh.entries.NshEntry> init( @Nonnull final InstanceIdentifier<NshEntry> id, @Nonnull final NshEntry readValue, @Nonnull final ReadContext ctx); }
@Test public void testGetAllIds() throws ReadFailedException { final NshEntryDetailsReplyDump reply = new NshEntryDetailsReplyDump(); final NshEntryDetails nshEntryDetails_1 = new NshEntryDetails(); nshEntryDetails_1.entryIndex = ENTRY_INDEX_1; nshEntryDetails_1.verOC = 0; nshEntryDetails_1.length = 6; nshEntryDetails_1.mdType = 1; nshEntryDetails_1.nextProtocol = 3; nshEntryDetails_1.nspNsi = (123<<8 | 4); nshEntryDetails_1.c1 = 1; nshEntryDetails_1.c2 = 2; nshEntryDetails_1.c3 = 3; nshEntryDetails_1.c4 = 4; reply.nshEntryDetails = Lists.newArrayList(nshEntryDetails_1); final NshEntryDetails nshEntryDetails_2 = new NshEntryDetails(); nshEntryDetails_2.entryIndex = ENTRY_INDEX_2; nshEntryDetails_2.verOC = 0; nshEntryDetails_2.length = 6; nshEntryDetails_2.mdType = 1; nshEntryDetails_2.nextProtocol = 2; nshEntryDetails_2.nspNsi = (223<<8 | 24); nshEntryDetails_2.c1 = 21; nshEntryDetails_2.c2 = 22; nshEntryDetails_2.c3 = 23; nshEntryDetails_2.c4 = 24; reply.nshEntryDetails = Lists.newArrayList(nshEntryDetails_2); doReturn(future(reply)).when(jvppNsh).nshEntryDump(any(NshEntryDump.class)); final List<NshEntryKey> allIds = getCustomizer().getAllIds(getNshEntryId(ENTRY_NAME_1), ctx); assertEquals(reply.nshEntryDetails.size(), allIds.size()); }
IoamTraceWriterCustomizer extends FutureJVppIoamtraceCustomizer implements ListWriterCustomizer<TraceConfig, TraceConfigKey>, ByteDataTranslator, JvppReplyConsumer { @Override public void writeCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataCurr, @Nonnull final WriteContext writeContext) throws WriteFailedException { try { addTraceConfig(dataCurr, id); } catch (Exception exCreate) { LOG.error("Add Trace Configuration failed", exCreate); throw new WriteFailedException.CreateFailedException(id, dataCurr, exCreate); } LOG.debug("Trace config added iid={}, added {}", id, dataCurr); } IoamTraceWriterCustomizer(@Nonnull FutureJVppIoamtrace futureJVppIoamtrace); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataCurr, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataBefore, @Nonnull final TraceConfig dataAfter, @Nonnull final WriteContext ctx); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataBefore, @Nonnull final WriteContext ctx); TraceProfileAddReply addTraceConfig(TraceConfig traceConfig, final InstanceIdentifier<TraceConfig> id); TraceProfileDelReply deleteTraceConfig(final InstanceIdentifier<TraceConfig> id); }
@Test public void testCreate() throws Exception { final TraceConfig traceConfig = generateTraceConfig(TRACE_NAME); final InstanceIdentifier<TraceConfig> id = getTraceConfigId(TRACE_NAME); whenTraceAddThenSuccess(); customizer.writeCurrentAttributes(id, traceConfig, writeContext); verify(jvppIoamtrace).traceProfileAdd(generateTraceProfileAdd()); } @Test public void testCreateFailed() throws Exception { final TraceConfig traceConfig = generateTraceConfig(TRACE_NAME); final InstanceIdentifier<TraceConfig> id = getTraceConfigId(TRACE_NAME); whenTraceAddThenFailure(); try { customizer.writeCurrentAttributes(id, traceConfig, writeContext); } catch (WriteFailedException e) { verify(jvppIoamtrace).traceProfileAdd(generateTraceProfileAdd()); return; } fail("WriteFailedException.CreateFailedException was expected"); }
IoamTraceWriterCustomizer extends FutureJVppIoamtraceCustomizer implements ListWriterCustomizer<TraceConfig, TraceConfigKey>, ByteDataTranslator, JvppReplyConsumer { @Override public void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataBefore, @Nonnull final WriteContext ctx) throws WriteFailedException { try { deleteTraceConfig(id); } catch (Exception exDelete) { LOG.error("Delete Trace Configuration failed", exDelete); throw new WriteFailedException.DeleteFailedException(id, exDelete); } LOG.debug("Trace config deleted:iid={} dataBefore={}", id, dataBefore); } IoamTraceWriterCustomizer(@Nonnull FutureJVppIoamtrace futureJVppIoamtrace); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataCurr, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataBefore, @Nonnull final TraceConfig dataAfter, @Nonnull final WriteContext ctx); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<TraceConfig> id, @Nonnull final TraceConfig dataBefore, @Nonnull final WriteContext ctx); TraceProfileAddReply addTraceConfig(TraceConfig traceConfig, final InstanceIdentifier<TraceConfig> id); TraceProfileDelReply deleteTraceConfig(final InstanceIdentifier<TraceConfig> id); }
@Test public void testDelete() throws Exception { final TraceConfig traceConfig = generateTraceConfig(TRACE_NAME); final InstanceIdentifier<TraceConfig> id = getTraceConfigId(TRACE_NAME); whenTraceDelThenSuccess(); customizer.deleteCurrentAttributes(id, traceConfig, writeContext); verify(jvppIoamtrace).traceProfileDel(generateTraceProfileDel()); } @Test public void testDeleteFailed() throws Exception { final TraceConfig traceConfig = generateTraceConfig(TRACE_NAME); final InstanceIdentifier<TraceConfig> id = getTraceConfigId(TRACE_NAME); whenTraceDelThenFailure(); try { customizer.deleteCurrentAttributes(id, traceConfig, writeContext); } catch (WriteFailedException e) { verify(jvppIoamtrace).traceProfileDel(generateTraceProfileDel()); return; } fail("WriteFailedException.DeleteFailedException was expected"); customizer.deleteCurrentAttributes(id, traceConfig, writeContext); }
TraceProfileReaderCustomizer extends FutureJVppIoamtraceCustomizer implements InitializingListReaderCustomizer<TraceConfig, TraceConfigKey, TraceConfigBuilder>, JvppReplyConsumer { @Override public void readCurrentAttributes(@Nonnull InstanceIdentifier<TraceConfig> id, @Nonnull TraceConfigBuilder builder, @Nonnull ReadContext ctx) throws ReadFailedException { LOG.debug("reading attribute for trace config {}",id); final TraceProfileShowConfig request = new TraceProfileShowConfig(); TraceProfileShowConfigReply reply = getReplyForRead (getFutureJVppIoamtrace().traceProfileShowConfig(request) .toCompletableFuture(),id); if (reply == null) { LOG.debug("{} returned null as reply from vpp",id); return; } if (reply.traceType == 0) { LOG.debug("{} no configured trace config found",id); return; } builder.setNodeId((long) reply.nodeId); builder.setTraceAppData((long) reply.appData); builder.setTraceNumElt((short) reply.numElts); builder.setTraceTsp(TraceConfig.TraceTsp.forValue(reply.traceTsp)); builder.setTraceType((short) reply.traceType); LOG.debug("Item {} successfully read: {}",id,builder.build()); } TraceProfileReaderCustomizer(@Nonnull FutureJVppIoamtrace futureJVppIoamtrace); @Nonnull @Override TraceConfigBuilder getBuilder(@Nonnull InstanceIdentifier<TraceConfig> id); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull InstanceIdentifier<TraceConfig> id, @Nonnull TraceConfig readValue, @Nonnull ReadContext ctx); @Override void readCurrentAttributes(@Nonnull InstanceIdentifier<TraceConfig> id, @Nonnull TraceConfigBuilder builder, @Nonnull ReadContext ctx); @Override void merge(@Nonnull Builder<? extends DataObject> parentBuilder, @Nonnull TraceConfig readValue); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<TraceConfig> list); @Nonnull @Override List<TraceConfigKey> getAllIds(@Nonnull final InstanceIdentifier<TraceConfig> instanceIdentifier, @Nonnull final ReadContext readContext); }
@Test public void testReadCurrentAttributes() throws ReadFailedException { TraceConfigBuilder builder = new TraceConfigBuilder(); getCustomizer().readCurrentAttributes(InstanceIdentifier.create(IoamTraceConfig.class) .child(TraceConfig.class, new TraceConfigKey("Trace config")), builder, ctx); assertEquals(1234,builder.getTraceAppData().longValue()); assertEquals(TraceConfig.TraceTsp.Milliseconds.getIntValue(),builder.getTraceTsp().getIntValue()); assertEquals(1,builder.getNodeId().longValue()); assertEquals(4,builder.getTraceNumElt().shortValue()); assertEquals(0x1f,builder.getTraceType().shortValue()); verify(jVppIoamtrace).traceProfileShowConfig(any(TraceProfileShowConfig.class)); }
PotProfileReaderCustomizer extends FutureJVppIoampotCustomizer implements JvppReplyConsumer, InitializingListReaderCustomizer<PotProfileList,PotProfileListKey,PotProfileListBuilder> { @Override public void readCurrentAttributes(@Nonnull final InstanceIdentifier<PotProfileList> instanceIdentifier, @Nonnull final PotProfileListBuilder builder, @Nonnull final ReadContext readContext) throws ReadFailedException { final PotProfileShowConfigDump request = new PotProfileShowConfigDump(); PotProfileListKey key = instanceIdentifier.firstKeyOf(PotProfileList.class); request.id = key.getIndex().getValue().byteValue(); final PotProfileShowConfigDetailsReplyDump reply = getReplyForRead(getFutureJVppIoampot() .potProfileShowConfigDump(request) .toCompletableFuture(), instanceIdentifier); if (reply == null || reply.potProfileShowConfigDetails == null || reply.potProfileShowConfigDetails.isEmpty()) { LOG.debug("Vpp returned no pot profiles"); return; } final PotProfileShowConfigDetails details = reply.potProfileShowConfigDetails.get(0); builder.setValidator(details.validator == 1); builder.setValidatorKey(BigInteger.valueOf(details.secretKey)); builder.setSecretShare(BigInteger.valueOf(details.secretShare)); builder.setPrimeNumber(BigInteger.valueOf(details.prime)); builder.setPublicPolynomial(BigInteger.valueOf(details.polynomialPublic)); builder.setIndex(new ProfileIndexRange((int)details.id)); builder.setLpc(BigInteger.valueOf(details.lpc)); builder.setNumberOfBits(getMaxBitsfromBitmask(BigInteger.valueOf(details.bitMask))); LOG.info("Item {} successfully read: {}",instanceIdentifier, builder.build()); } PotProfileReaderCustomizer(FutureJVppIoampot futureJVppIoamPot); @Nonnull @Override Initialized<? extends DataObject> init(@Nonnull final InstanceIdentifier<PotProfileList> instanceIdentifier, @Nonnull final PotProfileList potProfileList, @Nonnull final ReadContext readContext); @Nonnull @Override List<PotProfileListKey> getAllIds(@Nonnull final InstanceIdentifier<PotProfileList> instanceIdentifier, @Nonnull final ReadContext readContext); @Override void merge(@Nonnull final Builder<? extends DataObject> builder, @Nonnull final List<PotProfileList> list); @Nonnull @Override PotProfileListBuilder getBuilder(@Nonnull final InstanceIdentifier<PotProfileList> instanceIdentifier); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<PotProfileList> instanceIdentifier, @Nonnull final PotProfileListBuilder builder, @Nonnull final ReadContext readContext); }
@Test public void testReadCurrentAttributes() throws ReadFailedException { PotProfileListBuilder builder = new PotProfileListBuilder(); getCustomizer().readCurrentAttributes(getPotProfileListId(0),builder,ctx); assertEquals(0x4,builder.getNumberOfBits().longValue()); assertEquals(0,builder.getIndex().getValue().intValue()); assertEquals(1234,builder.getLpc().longValue()); assertEquals(1234,builder.getPublicPolynomial().longValue()); assertEquals(7,builder.getPrimeNumber().longValue()); assertEquals(1234,builder.getValidatorKey().longValue()); assertEquals(1234,builder.getSecretShare().longValue()); assertEquals(true,builder.isValidator()); }
Srv6Util { public static String getCandidatePathName(final Ipv6Address bsid, final long weight) { return bsid.getValue() + "-" + weight; } private Srv6Util(); static String getCandidatePathName(final Ipv6Address bsid, final long weight); static Ipv6Address extractBsid( final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); static int extractVrfFib(final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); }
@Test public void getCandidatePathNameTest() { Assert.assertEquals(CANDIDATE_PATH_NAME, Srv6Util.getCandidatePathName(BSID, 0L)); }
Srv6Util { public static <T extends DataObject> Ipv6Address extractBsid( final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite) { Optional<Policy> policyOptional = isWrite ? writeContext.readAfter(RWUtils.cutId(instanceIdentifier, Policy.class)) : writeContext.readBefore(RWUtils.cutId(instanceIdentifier, Policy.class)); if (policyOptional.isPresent() && policyOptional.get().getBindingSid() != null && policyOptional.get().getBindingSid().getConfig() != null) { org.opendaylight.yang.gen.v1.http.cisco.com.ns.yang.oc.srte.policy.rev170918.binding.sid.properties.binding.sid.Config config = policyOptional.get().getBindingSid().getConfig(); if (config.getType() == DataplaneType.Srv6 && config.getValue() != null && config.getValue().getIpAddress() != null && config.getValue().getIpAddress().getIpv6Address() != null) { return config.getValue().getIpAddress().getIpv6Address(); } } return null; } private Srv6Util(); static String getCandidatePathName(final Ipv6Address bsid, final long weight); static Ipv6Address extractBsid( final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); static int extractVrfFib(final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); }
@Test public void extractBsidTest() { Assert.assertEquals(BSID.getValue(), Srv6Util.extractBsid(POLICY_IID, ctx, true).getValue()); }
Srv6Util { public static <T extends DataObject> int extractVrfFib(final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite) { Optional<Policy> policyOptional = isWrite ? writeContext.readAfter(RWUtils.cutId(instanceIdentifier, Policy.class)) : writeContext.readBefore(RWUtils.cutId(instanceIdentifier, Policy.class)); if (policyOptional.isPresent() && policyOptional.get().augmentation(VppSrPolicyAugmentation.class) != null && policyOptional.get().augmentation(VppSrPolicyAugmentation.class).getVppSrPolicy() != null) { VppSrPolicy vppSrPolicy = policyOptional.get().augmentation(VppSrPolicyAugmentation.class).getVppSrPolicy(); if (vppSrPolicy.getConfig() != null && vppSrPolicy.getConfig().getTableId() != null) { return vppSrPolicy.getConfig().getTableId().getValue().intValue(); } } return 0; } private Srv6Util(); static String getCandidatePathName(final Ipv6Address bsid, final long weight); static Ipv6Address extractBsid( final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); static int extractVrfFib(final @Nonnull InstanceIdentifier<T> instanceIdentifier, final @Nonnull WriteContext writeContext, boolean isWrite); }
@Test public void extractVrfFibTest() { Assert.assertEquals(0, Srv6Util.extractVrfFib(POLICY_IID, ctx, true)); }
PrefixCustomizer extends FutureJVppCustomizer implements ListWriterCustomizer<Prefix, PrefixKey>, JvppReplyConsumer, ByteDataTranslator, Ipv6Translator { @Override public void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Prefix> id, @Nonnull final Prefix dataAfter, @Nonnull final WriteContext writeContext) throws WriteFailedException { LOG.debug("Writing Prefix {} dataAfter={}", id, dataAfter); setPrefix(id, dataAfter, writeContext, false); } PrefixCustomizer(@Nonnull final FutureJVppCore jvpp, @Nonnull final NamingContext interfaceContext); @Override void writeCurrentAttributes(@Nonnull final InstanceIdentifier<Prefix> id, @Nonnull final Prefix dataAfter, @Nonnull final WriteContext writeContext); @Override void updateCurrentAttributes(@Nonnull final InstanceIdentifier<Prefix> id, @Nonnull final Prefix dataBefore, @Nonnull final Prefix dataAfter, @Nonnull final WriteContext writeContext); @Override void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<Prefix> id, @Nonnull final Prefix dataBefore, @Nonnull final WriteContext writeContext); }
@Test public void testWriteSimple(@InjectTestData(resourcePath = "/ra/simplePrefix.json", id = RA_PATH) PrefixList prefixList) throws WriteFailedException, InterruptedException { final Prefix data = getPrefix(prefixList); customizer.writeCurrentAttributes(IID, data, writeContext); final SwInterfaceIp6NdRaPrefix request = new SwInterfaceIp6NdRaPrefix(); request.swIfIndex = IFC_INDEX; request.prefix = getPrefix( new byte[]{0x20, 0x01, 0x0d, (byte) 0xb8, 0x0a, 0x0b, 0x12, (byte) 0xf0, 0, 0, 0, 0, 0, 0, 0, 0x02}, (byte) 64); request.valLifetime = 2592000; request.prefLifetime = 604800; verify(api).swInterfaceIp6NdRaPrefix(request); }
PrettyPrinter { public static String toString(Node n) { return toString(n, null); } static String toString(Node n); static String toString(Node n, PrefixMapping prefixes); static String toString(Statement s); static String toString(Triple t); static String toString(Triple t, PrefixMapping prefixes); static String toString(RDFDatatype datatype); static String toString(RDFNode n); static String toString(Collection<? extends RDFNode> res); static final PrefixMapping LIBRARY; }
@Test public void testNodePrettyPrinting() { Assert.assertEquals("\"foo\"", PrettyPrinter.toString(NodeFactory.createLiteral("foo"))); Assert.assertEquals("\"foo\"@en", PrettyPrinter.toString(NodeFactory.createLiteral("foo", "en", null))); Assert.assertEquals("\"1\"^^" + PrettyPrinter.LIBRARY.shortForm(XSDDatatype.XSDint.getURI()), PrettyPrinter.toString(NodeFactory.createLiteral("1", null, XSDDatatype.XSDint))); Assert.assertEquals("\"1\"^^xsd:int", PrettyPrinter.toString(NodeFactory.createLiteral("1", null, XSDDatatype.XSDint), PrefixMapping.Standard)); Assert.assertEquals("_:foo", PrettyPrinter.toString(NodeFactory.createBlankNode("foo"))); Assert.assertEquals("<http: Assert.assertEquals("<" + RDF.type.getURI() + ">", PrettyPrinter.toString(RDF.type.asNode(), new PrefixMappingImpl())); Assert.assertEquals("rdf:type", PrettyPrinter.toString(RDF.type.asNode(), PrefixMapping.Standard)); Assert.assertEquals("?x", PrettyPrinter.toString(NodeFactory.createVariable("x"))); Assert.assertEquals("?ANY", PrettyPrinter.toString(Node.ANY)); } @Test public void testTriplePrettyPrinting() { Assert.assertEquals("<http: PrettyPrinter.toString(new Triple(NodeFactory.createURI("http: RDFS.label.asNode(), NodeFactory.createLiteral("Example", null, null)))); } @Test public void testTriplePrettyPrintingWithNodeANY() { Assert.assertEquals("?ANY ?ANY ?ANY .", PrettyPrinter.toString(Triple.ANY)); } @Test public void testTriplePrettyPrintingWithPrefixMapping() { PrefixMappingImpl prefixes = new PrefixMappingImpl(); prefixes.setNsPrefixes(PrefixMapping.Standard); prefixes.setNsPrefix("ex", "http: Assert.assertEquals("ex:a rdfs:label \"Example\" .", PrettyPrinter.toString(new Triple(NodeFactory.createURI("http: RDFS.label.asNode(), NodeFactory.createLiteral("Example", null, null)), prefixes)); } @Test public void testResourcePrettyPrinting() { Model m = ModelFactory.createDefaultModel(); Assert.assertEquals("\"foo\"", PrettyPrinter.toString(m.createLiteral("foo"))); Assert.assertEquals("<http: } @Test public void testUsePrefixMappingWhenPrintingURIResources() { Model m = ModelFactory.createDefaultModel(); m.setNsPrefix("ex", "http: Assert.assertEquals("ex:foo", PrettyPrinter.toString(m.createResource("http: } @Test public void testD2RQTermsHaveD2RQPrefix() { Assert.assertEquals("d2rq:ClassMap", PrettyPrinter.toString(D2RQ.ClassMap)); } @Test public void testSomeRDFDatatypeToString() { RDFDatatype someDatatype = TypeMapper.getInstance().getSafeTypeByName("http: Assert.assertEquals("<http: } @Test public void testXSDTypeToString() { Assert.assertEquals("xsd:string", PrettyPrinter.toString(XSDDatatype.XSDstring)); }
FilterParser { public Filter parseTableFilter(boolean matchParents) throws ParseException { List<Filter> result = new ArrayList<>(); for (List<IdentifierMatcher> list : parse()) { if (list.size() < 1 || list.size() > 2) { throw new ParseException("Syntax error in table filter list; expected list of comma- or newline-separated names in [schema.]table notation: '" + s + "'"); } if (list.size() == 1) { result.add(new FilterMatchTable(Filter.NULL_MATCHER, list.get(0), matchParents)); } else { result.add(new FilterMatchTable(list.get(0), list.get(1), matchParents)); } } return FilterMatchAny.create(result); } FilterParser(String filterSpec); Filter parseSchemaFilter(); Filter parseTableFilter(boolean matchParents); Filter parseColumnFilter(boolean matchParents); List<List<IdentifierMatcher>> parse(); }
@Test public void testParseAsTableFilter() throws ParseException { Filter result = new FilterParser("schema.table1,schema.table2,table3").parseTableFilter(false); Assert.assertTrue(result.matchesTable("schema", "table1")); Assert.assertTrue(result.matchesTable("schema", "table2")); Assert.assertTrue(result.matchesTable(null, "table3")); Assert.assertFalse(result.matchesTable("schema", "table3")); Assert.assertFalse(result.matchesTable("schema", "table4")); Assert.assertFalse(result.matchesTable("schema2", "table1")); Assert.assertFalse(result.matchesTable(null, "table1")); Assert.assertFalse(result.matchesTable(null, "table4")); } @Test public void testTableFilterTooMany() { try { new FilterParser("a.b.c").parseTableFilter(true); Assert.fail("Should have failed because not in schema notation"); } catch (ParseException ex) { } }
FilterParser { public Filter parseColumnFilter(boolean matchParents) throws ParseException { List<Filter> result = new ArrayList<>(); for (List<IdentifierMatcher> list : parse()) { if (list.size() < 2 || list.size() > 3) { throw new ParseException("Syntax error in column filter list; expected list of comma- or newline-separated names in [schema.]table.column notation: '" + s + "'"); } if (list.size() == 2) { result.add(new FilterMatchColumn(Filter.NULL_MATCHER, list.get(0), list.get(1), matchParents)); } else { result.add(new FilterMatchColumn(list.get(0), list.get(1), list.get(2), matchParents)); } } return FilterMatchAny.create(result); } FilterParser(String filterSpec); Filter parseSchemaFilter(); Filter parseTableFilter(boolean matchParents); Filter parseColumnFilter(boolean matchParents); List<List<IdentifierMatcher>> parse(); }
@Test public void testParseAsColumnFilter() throws ParseException { Filter result = new FilterParser("s.t1.c1,t2.c2,t2.c3").parseColumnFilter(false); Assert.assertTrue(result.matchesColumn("s", "t1", "c1")); Assert.assertTrue(result.matchesColumn(null, "t2", "c2")); Assert.assertTrue(result.matchesColumn(null, "t2", "c3")); Assert.assertFalse(result.matchesColumn(null, "t1", "c1")); Assert.assertFalse(result.matchesColumn("s", "t2", "c2")); Assert.assertFalse(result.matchesColumn(null, "t1", "c3")); }
IRIEncoder { public static String encode(String s) { StringBuilder sbuffer = new StringBuilder(s.length()); for (int i = 0; i < s.length(); i++) { char c = s.charAt(i); int cCode = (int) c; if (c == '-' || c == '_' || c == '~' || c == '.' || isDigit(cCode) || isLetter(cCode) || cCode >= 0x00A0) { sbuffer.append(c); } else { for (byte b : s.substring(i, i + 1).getBytes(StandardCharsets.UTF_8)) { sbuffer.append('%'); sbuffer.append(hexDigits[(b >> 4) & 0x0F]); sbuffer.append(hexDigits[b & 0x0F]); } } } return sbuffer.toString(); } static String encode(String s); }
@Test public void testDontEncodeAlphanumeric() { Assert.assertEquals("azAZ09", IRIEncoder.encode("azAZ09")); } @Test public void testDontEncodeSafePunctuation() { Assert.assertEquals("-_.~", IRIEncoder.encode("-_.~")); } @Test public void testDontEncodeUnicodeChars() { Assert.assertEquals("\u00E4", IRIEncoder.encode("\u00E4")); Assert.assertEquals("\u00A0", IRIEncoder.encode("\u00A0")); Assert.assertEquals("\uD7FF", IRIEncoder.encode("\uD7FF")); Assert.assertEquals("\uFFEF", IRIEncoder.encode("\uFFEF")); } @Test public void testEncodeGenDelims() { Assert.assertEquals("%3A%2F%3F%23%5B%5D%40", IRIEncoder.encode(":/?#[]@")); } @Test public void testEncodeSubDelims() { Assert.assertEquals("%21%24%26%27%28%29%2A%2B%2C%3B%3D", IRIEncoder.encode("!$&'()*+,;=")); } @Test public void testEncodePercentSign() { Assert.assertEquals("%25", IRIEncoder.encode("%")); } @Test public void testEncodeOtherASCIIChars() { Assert.assertEquals("%20%22%3C%3E%5C%5E%60%7B%7C%7D", IRIEncoder.encode(" \"<>\\^`{|}")); } @Test public void testEncodeASCIIControlChars() { Assert.assertEquals("%00%01%02%03%04%05%06%07%08%09%0A%0B%0C%0D%0E%0F", IRIEncoder.encode("\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\u0008\u0009\n\u000B\u000C\r\u000E\u000F")); Assert.assertEquals("%10%11%12%13%14%15%16%17%18%19%1A%1B%1C%1D%1E%1F", IRIEncoder.encode("\u0010\u0011\u0012\u0013\u0014\u0015\u0016\u0017\u0018\u0019\u001A\u001B\u001C\u001D\u001E\u001F")); Assert.assertEquals("%7F", IRIEncoder.encode("\u007F")); } @Test public void testEncodeUnicodeControlChars() { Assert.assertEquals("%C2%80", IRIEncoder.encode("\u0080")); Assert.assertEquals("%C2%9F", IRIEncoder.encode("\u009F")); }
TranslationTableParser { public Collection<Row> parseTranslations() { try { List<Row> result = new ArrayList<>(); while (true) { String line = this.reader.readLine(); if (line == null) { break; } String[] fields = this.csvLineParser.parse(line); if (fields.length != 2) { LOGGER.warn("Skipping line with {} instead of 2 columns in CSV file {}", fields.length, url); continue; } result.add(new Row(fields[0], fields[1])); } return result; } catch (IOException iex) { throw new D2RQException(iex); } } TranslationTableParser(Reader reader); TranslationTableParser(String url); Collection<Row> parseTranslations(); }
@Test public void testSimple() { String csv = "key,value"; Collection<TranslationTableParser.Row> translations = new TranslationTableParser(new StringReader(csv)).parseTranslations(); Assert.assertEquals(1, translations.size()); TranslationTableParser.Row t = translations.iterator().next(); Assert.assertEquals("key", t.first()); Assert.assertEquals("value", t.second()); } @Test public void testTwoRows() { String csv = "db1,rdf1\ndb2,rdf2"; Collection<TranslationTableParser.Row> translations = new TranslationTableParser(new StringReader(csv)).parseTranslations(); Assert.assertEquals(2, translations.size()); Assert.assertEquals(this.simpleTranslations, new HashSet<>(translations)); } @Test public void testParseFromFile() { String file = JenaModelUtils.getRelativeResourcePath("/csv/translationtable.csv"); Collection<TranslationTableParser.Row> translations = new TranslationTableParser(file).parseTranslations(); Assert.assertEquals(this.simpleTranslations, new HashSet<>(translations)); } @Test public void testParseFromFileWithProtocol() { URL url = TranslationTableParser.class.getResource("/csv/translationtable.csv"); Collection<TranslationTableParser.Row> translations = new TranslationTableParser(url.toString()).parseTranslations(); Assert.assertEquals(this.simpleTranslations, new HashSet<>(translations)); } @Test public void testEmpty() { Collection<TranslationTableParser.Row> translations = new TranslationTableParser(new StringReader("")).parseTranslations(); Assert.assertTrue(translations.isEmpty()); }
GraphPatternTranslator { public List<NodeRelation> translate() { if (triplePatterns.isEmpty()) { return Collections.singletonList(NodeRelation.TRUE); } Iterator<Triple> it = triplePatterns.iterator(); List<CandidateList> candidateLists = new ArrayList<>(triplePatterns.size()); int index = 1; while (it.hasNext()) { Triple triplePattern = it.next(); CandidateList candidates = new CandidateList( triplePattern, triplePatterns.size() > 1, index); if (candidates.isEmpty()) { return Collections.emptyList(); } candidateLists.add(candidates); index++; } Collections.sort(candidateLists); List<TripleRelationJoiner> joiners = new ArrayList<>(); joiners.add(TripleRelationJoiner.create(this.useAllOptimizations)); for (CandidateList candidates : candidateLists) { List<TripleRelationJoiner> nextJoiners = new ArrayList<>(); for (TripleRelationJoiner joiner : joiners) { nextJoiners.addAll(joiner.joinAll(candidates.triplePattern(), candidates.all())); } joiners = nextJoiners; } List<NodeRelation> results = new ArrayList<>(joiners.size()); for (TripleRelationJoiner joiner : joiners) { NodeRelation nodeRelation = joiner.toNodeRelation(); if (!nodeRelation.baseRelation().equals(Relation.EMPTY) || !useAllOptimizations) results.add(nodeRelation); } return results; } GraphPatternTranslator(List<Triple> triplePatterns, Collection<TripleRelation> tripleRelations, boolean useAllOptimizations); List<NodeRelation> translate(); }
@Test public void testReturnMultipleMatchesForSingleTriplePattern() { NodeRelation[] rels = translate("?s ?p ?o", "engine/simple.n3"); Assert.assertEquals(2, rels.length); }
URIMakerRule implements Comparator<TripleRelation> { @Override public int compare(TripleRelation o1, TripleRelation o2) { int priority1 = priority(o1); int priority2 = priority(o2); return Integer.compare(priority2, priority1); } List<TripleRelation> sortRDFRelations(Collection<TripleRelation> tripleRelations); URIMakerRuleChecker createRuleChecker(Node node); @Override int compare(TripleRelation o1, TripleRelation o2); }
@Test public void testComparator() { URIMakerRule u = new URIMakerRule(); Assert.assertEquals(0, u.compare(this.withURIPatternSubject, this.withURIPatternSubject)); Assert.assertEquals(1, u.compare(this.withURIPatternSubject, this.withURIPatternSubjectAndObject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubject, this.withURIColumnSubject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubject, this.withURIPatternSubjectAndURIColumnObject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubjectAndObject, this.withURIPatternSubject)); Assert.assertEquals(0, u.compare(this.withURIPatternSubjectAndObject, this.withURIPatternSubjectAndObject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubjectAndObject, this.withURIColumnSubject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubjectAndObject, this.withURIPatternSubjectAndURIColumnObject)); Assert.assertEquals(1, u.compare(this.withURIColumnSubject, this.withURIPatternSubject)); Assert.assertEquals(1, u.compare(this.withURIColumnSubject, this.withURIPatternSubjectAndObject)); Assert.assertEquals(0, u.compare(this.withURIColumnSubject, this.withURIColumnSubject)); Assert.assertEquals(1, u.compare(this.withURIColumnSubject, this.withURIPatternSubjectAndURIColumnObject)); Assert.assertEquals(1, u.compare(this.withURIPatternSubjectAndURIColumnObject, this.withURIPatternSubject)); Assert.assertEquals(1, u.compare(this.withURIPatternSubjectAndURIColumnObject, this.withURIPatternSubjectAndObject)); Assert.assertEquals(-1, u.compare(this.withURIPatternSubjectAndURIColumnObject, this.withURIColumnSubject)); Assert.assertEquals(0, u.compare(this.withURIPatternSubjectAndURIColumnObject, this.withURIPatternSubjectAndURIColumnObject)); }
URIMakerRule implements Comparator<TripleRelation> { public List<TripleRelation> sortRDFRelations(Collection<TripleRelation> tripleRelations) { ArrayList<TripleRelation> results = new ArrayList<>(tripleRelations); results.sort(this); return results; } List<TripleRelation> sortRDFRelations(Collection<TripleRelation> tripleRelations); URIMakerRuleChecker createRuleChecker(Node node); @Override int compare(TripleRelation o1, TripleRelation o2); }
@Test public void testSort() { Collection<TripleRelation> unsorted = new ArrayList<>(Arrays.asList(this.withURIColumnSubject, this.withURIPatternSubject, this.withURIPatternSubjectAndObject, this.withURIPatternSubjectAndURIColumnObject)); Collection<TripleRelation> sorted = new ArrayList<>(Arrays.asList(this.withURIPatternSubjectAndObject, this.withURIPatternSubject, this.withURIPatternSubjectAndURIColumnObject, this.withURIColumnSubject)); Assert.assertEquals(sorted, new URIMakerRule().sortRDFRelations(unsorted)); }
VocabularySummarizer { public Set<Property> getAllProperties() { return properties; } VocabularySummarizer(Class<?> vocabularyJavaClass); static Set<Resource> getStandardResources(); static Set<Property> getStandardProperties(); static Set<X> getResources(Class<X> type, Class<?>... classes); static Stream<? extends Resource> resources(Class<?>... classes); static Stream<X> resources(Class<X> type, Class<?>... classes); @SuppressWarnings("unchecked") Set<X> get(Class<X> type); Set<Property> getAllProperties(); Set<Resource> getAllClasses(); String getNamespace(); Collection<Resource> getUndefinedClasses(Model model); Collection<Property> getUndefinedProperties(Model model); void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode); }
@Test public void testAllPropertiesEmpty() { VocabularySummarizer vocab = new VocabularySummarizer(Object.class); Assert.assertTrue(vocab.getAllProperties().isEmpty()); } @Test public void testAllPropertiesContainsProperty() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getAllProperties().contains(D2RQ.column)); Assert.assertTrue(vocab.getAllProperties().contains(D2RQ.belongsToClassMap)); } @Test public void testAllPropertiesDoesNotContainClass() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertFalse(vocab.getAllProperties().contains(D2RQ.Database)); } @Test public void testAllPropertiesDoesNotContainTermFromOtherNamespace() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertFalse(vocab.getAllProperties().contains(RDF.type)); }
VocabularySummarizer { public Set<Resource> getAllClasses() { return classes; } VocabularySummarizer(Class<?> vocabularyJavaClass); static Set<Resource> getStandardResources(); static Set<Property> getStandardProperties(); static Set<X> getResources(Class<X> type, Class<?>... classes); static Stream<? extends Resource> resources(Class<?>... classes); static Stream<X> resources(Class<X> type, Class<?>... classes); @SuppressWarnings("unchecked") Set<X> get(Class<X> type); Set<Property> getAllProperties(); Set<Resource> getAllClasses(); String getNamespace(); Collection<Resource> getUndefinedClasses(Model model); Collection<Property> getUndefinedProperties(Model model); void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode); }
@Test public void testAllClassesEmpty() { VocabularySummarizer vocab = new VocabularySummarizer(Object.class); Assert.assertTrue(vocab.getAllClasses().isEmpty()); } @Test public void testAllClassesContainsClass() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getAllClasses().contains(D2RQ.Database)); } @Test public void testAllClassesDoesNotContainProperty() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertFalse(vocab.getAllClasses().contains(D2RQ.column)); } @Test public void testAllClassesDoesNotContainTermFromOtherNamespace() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertFalse(vocab.getAllClasses().contains(D2RConfig.Server)); }
VocabularySummarizer { public String getNamespace() { return namespace; } VocabularySummarizer(Class<?> vocabularyJavaClass); static Set<Resource> getStandardResources(); static Set<Property> getStandardProperties(); static Set<X> getResources(Class<X> type, Class<?>... classes); static Stream<? extends Resource> resources(Class<?>... classes); static Stream<X> resources(Class<X> type, Class<?>... classes); @SuppressWarnings("unchecked") Set<X> get(Class<X> type); Set<Property> getAllProperties(); Set<Resource> getAllClasses(); String getNamespace(); Collection<Resource> getUndefinedClasses(Model model); Collection<Property> getUndefinedProperties(Model model); void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode); }
@Test public void testGetNamespaceEmpty() { Assert.assertNull(new VocabularySummarizer(Object.class).getNamespace()); } @Test public void testGetNamespaceD2RQ() { Assert.assertEquals(D2RQ.NS, new VocabularySummarizer(D2RQ.class).getNamespace()); } @Test public void testGetNamespaceD2RConfig() { Assert.assertEquals(D2RConfig.NS, new VocabularySummarizer(D2RConfig.class).getNamespace()); }
VocabularySummarizer { public Collection<Resource> getUndefinedClasses(Model model) { Set<Resource> result = new HashSet<>(); StmtIterator it = model.listStatements(null, RDF.type, (RDFNode) null); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getObject().isURIResource() && stmt.getResource().getURI().startsWith(namespace) && !classes.contains(stmt.getResource())) { result.add(stmt.getResource()); } } return result; } VocabularySummarizer(Class<?> vocabularyJavaClass); static Set<Resource> getStandardResources(); static Set<Property> getStandardProperties(); static Set<X> getResources(Class<X> type, Class<?>... classes); static Stream<? extends Resource> resources(Class<?>... classes); static Stream<X> resources(Class<X> type, Class<?>... classes); @SuppressWarnings("unchecked") Set<X> get(Class<X> type); Set<Property> getAllProperties(); Set<Resource> getAllClasses(); String getNamespace(); Collection<Resource> getUndefinedClasses(Model model); Collection<Property> getUndefinedProperties(Model model); void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode); }
@Test public void testNoUndefinedClassesForEmptyModel() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedClasses(ModelFactory.createDefaultModel()).isEmpty()); } @Test public void testNoUndefinedClassesWithoutTypeStatement() { Model m = JenaModelUtils.loadTurtle("/vocab/no-type.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedClasses(m).isEmpty()); } @Test public void testNoUndefinedClassesIfAllClassesDefined() { Model m = JenaModelUtils.loadTurtle("/vocab/defined-types.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedClasses(m).isEmpty()); } @Test public void testNoUndefinedClassesIfAllInOtherNamespace() { Model m = JenaModelUtils.loadTurtle("/vocab/other-namespace-types.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedClasses(m).isEmpty()); } @Test public void testFindOneUndefinedClass() { final Model m = JenaModelUtils.loadTurtle("/vocab/one-undefined-type.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Collection<Resource> expected = new HashSet<Resource>() {{ this.add(m.createResource(D2RQ.NS + "Pint")); }}; Assert.assertEquals(expected, vocab.getUndefinedClasses(m)); } @Test public void testFindTwoUndefinedClasses() { final Model m = JenaModelUtils.loadTurtle("/vocab/two-undefined-types.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Collection<Resource> expected = new HashSet<Resource>() {{ this.add(m.createResource(D2RQ.NS + "Pint")); this.add(m.createResource(D2RQ.NS + "Shot")); }}; Assert.assertEquals(expected, vocab.getUndefinedClasses(m)); }
VocabularySummarizer { public Collection<Property> getUndefinedProperties(Model model) { Set<Property> result = new HashSet<>(); StmtIterator it = model.listStatements(); while (it.hasNext()) { Statement stmt = it.nextStatement(); if (stmt.getPredicate().getURI().startsWith(namespace) && !properties.contains(stmt.getPredicate())) { result.add(stmt.getPredicate()); } } return result; } VocabularySummarizer(Class<?> vocabularyJavaClass); static Set<Resource> getStandardResources(); static Set<Property> getStandardProperties(); static Set<X> getResources(Class<X> type, Class<?>... classes); static Stream<? extends Resource> resources(Class<?>... classes); static Stream<X> resources(Class<X> type, Class<?>... classes); @SuppressWarnings("unchecked") Set<X> get(Class<X> type); Set<Property> getAllProperties(); Set<Resource> getAllClasses(); String getNamespace(); Collection<Resource> getUndefinedClasses(Model model); Collection<Property> getUndefinedProperties(Model model); void assertNoUndefinedTerms(Model model, int undefinedPropertyErrorCode, int undefinedClassErrorCode); }
@Test public void testNoUndefinedPropertiesForEmptyModel() { VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedProperties(ModelFactory.createDefaultModel()).isEmpty()); } @Test public void testNoUndefinedPropertiesIfAllPropertiesDefined() { Model m = JenaModelUtils.loadTurtle("/vocab/defined-properties.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedProperties(m).isEmpty()); } @Test public void testNoUndefinedPropertiesIfAllInOtherNamespace() { Model m = JenaModelUtils.loadTurtle("/vocab/other-namespace-properties.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Assert.assertTrue(vocab.getUndefinedProperties(m).isEmpty()); } @Test public void testFindOneUndefinedProperty() { final Model m = JenaModelUtils.loadTurtle("/vocab/one-undefined-property.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Collection<Property> expected = new HashSet<Property>() {{ this.add(m.createProperty(D2RQ.NS + "price")); }}; Assert.assertEquals(expected, vocab.getUndefinedProperties(m)); } @Test public void testFindTwoUndefinedProperties() { final Model m = JenaModelUtils.loadTurtle("/vocab/two-undefined-properties.ttl"); VocabularySummarizer vocab = new VocabularySummarizer(D2RQ.class); Collection<Property> expected = new HashSet<Property>() {{ this.add(m.createProperty(D2RQ.NS + "price")); this.add(m.createProperty(D2RQ.NS + "parallelUniverse")); }}; Assert.assertEquals(expected, vocab.getUndefinedProperties(m)); }
MapParser { public static void fixLegacyPropertyBridges(Model m) { Iter.peek(m.listResourcesWithProperty(RDF.type, LegacyD2RQ.ObjectPropertyBridge), r -> { replace(r, D2RQ.column, D2RQ.uriColumn); replace(r, D2RQ.pattern, D2RQ.uriPattern); }).andThen(m.listResourcesWithProperty(RDF.type, LegacyD2RQ.DataPropertyBridge)) .toSet() .forEach(r -> m.add(r, RDF.type, D2RQ.PropertyBridge) .remove(r, RDF.type, LegacyD2RQ.DataPropertyBridge) .remove(r, RDF.type, LegacyD2RQ.ObjectPropertyBridge)); } static String absolutizeURI(String uri); static void insertBase(Model model, String baseURI); static void validate(Model model); static void fixLegacy(Model model); static void fixLegacyReferences(Model m); static void fixLegacyAdditionalProperty(Model m); static void fixLegacyPropertyBridges(Model m); static void checkVocabulary(Model model); static void checkDistinctMapObjects(Model model); }
@Test public void testFixLegacyPropertyBridges() { String ns = "http: String column = "Persons.URI"; String pattern = ns + "@@Persons.Type@@"; Model m = ModelFactory.createDefaultModel() .setNsPrefixes(MappingFactory.MAPPING).setNsPrefix("test", ns); Resource op = m.createResource(ns + "op", MapParser.LegacyD2RQ.ObjectPropertyBridge) .addProperty(D2RQ.column, column).addProperty(D2RQ.pattern, pattern); Resource dp = m.createResource(ns + "dp", MapParser.LegacyD2RQ.DataPropertyBridge); JenaModelUtils.print(m); Mapping map = MappingFactory.wrap(m); Assert.assertEquals(0, map.propertyBridges().count()); long size = m.size(); MapParser.fixLegacyPropertyBridges(m); JenaModelUtils.print(map.asModel()); Assert.assertEquals(size, m.size()); Assert.assertFalse(m.containsResource(MapParser.LegacyD2RQ.DataPropertyBridge)); Assert.assertFalse(m.containsResource(MapParser.LegacyD2RQ.ObjectPropertyBridge)); Assert.assertEquals(2, map.propertyBridges().count()); PropertyBridge opb = MappingUtils.findPropertyBridge(map, op); PropertyBridge dpb = MappingUtils.findPropertyBridge(map, dp); Assert.assertNull(opb.getPattern()); Assert.assertNull(opb.getColumn()); Assert.assertNull(dpb.getPattern()); Assert.assertNull(dpb.getColumn()); Assert.assertEquals(column, opb.getURIColumn()); Assert.assertEquals(pattern, opb.getURIPattern()); Assert.assertNull(dpb.getURIColumn()); Assert.assertNull(dpb.getURIPattern()); MapParser.fixLegacyPropertyBridges(m); Assert.assertEquals(size, m.size()); }
MapParser { public static void fixLegacyAdditionalProperty(Model m) { m.listStatements(null, LegacyD2RQ.additionalProperty, (RDFNode) null) .filterKeep(s -> s.getObject().isResource()) .toSet() .forEach(s -> { Resource classMap = s.getSubject(); Resource additional = s.getResource(); Resource p = additional.getProperty(D2RQ.propertyName).getResource(); RDFNode v = additional.getProperty(D2RQ.propertyValue).getObject(); m.createResource(null, D2RQ.PropertyBridge) .addProperty(D2RQ.belongsToClassMap, classMap) .addProperty(D2RQ.property, p) .addProperty(D2RQ.constantValue, v); classMap.removeAll(LegacyD2RQ.additionalProperty); }); } static String absolutizeURI(String uri); static void insertBase(Model model, String baseURI); static void validate(Model model); static void fixLegacy(Model model); static void fixLegacyReferences(Model m); static void fixLegacyAdditionalProperty(Model m); static void fixLegacyPropertyBridges(Model m); static void checkVocabulary(Model model); static void checkDistinctMapObjects(Model model); }
@Test public void testFixLegacyAdditionalProperties() { String schemaNS = "http: String mapNS = "http: Resource iri = ResourceFactory.createResource("http: Model m = ModelFactory.createDefaultModel() .setNsPrefixes(MappingFactory.MAPPING) .setNsPrefix("map", mapNS) .setNsPrefix("", schemaNS); Resource a = m.createResource(mapNS + "SeeAlsoStatement", D2RQ.AdditionalProperty) .addProperty(D2RQ.propertyName, RDFS.seeAlso) .addProperty(D2RQ.propertyValue, iri); Resource c = m.createResource(mapNS + "PersonsClassMap", D2RQ.ClassMap) .addProperty(D2RQ.clazz, m.createResource(schemaNS + "Person")) .addProperty(MapParser.LegacyD2RQ.additionalProperty, a); JenaModelUtils.print(m); Mapping map = MappingFactory.wrap(m); Assert.assertEquals(1, map.classMaps().count()); Assert.assertEquals(1, map.additionalProperties().count()); Assert.assertEquals(0, map.propertyBridges().count()); long size = m.size(); MapParser.fixLegacyAdditionalProperty(m); Assert.assertEquals(size + 3, m.size()); Assert.assertFalse(m.containsResource(MapParser.LegacyD2RQ.additionalProperty)); JenaModelUtils.print(m); Assert.assertEquals(1, map.classMaps().count()); Assert.assertEquals(1, map.additionalProperties().count()); Assert.assertEquals(1, map.propertyBridges().count()); ClassMap cm = MappingUtils.findClassMap(map, c); Assert.assertEquals(1, cm.propertyBridges().count()); PropertyBridge p = map.propertyBridges().findFirst().orElseThrow(AssertionError::new); Assert.assertEquals(p, cm.propertyBridges().findFirst().orElseThrow(AssertionError::new)); Assert.assertEquals(iri, p.getConstantValue()); Assert.assertEquals(RDFS.seeAlso, p.properties().findFirst().orElseThrow(AbstractMethodError::new)); MapParser.fixLegacyAdditionalProperty(m); Assert.assertEquals(size + 3, m.size()); }
MapParser { public static void fixLegacyReferences(Model m) { inverse(m, LegacyD2RQ.classMap, D2RQ.clazz); inverse(m, LegacyD2RQ.propertyBridge, D2RQ.property); } static String absolutizeURI(String uri); static void insertBase(Model model, String baseURI); static void validate(Model model); static void fixLegacy(Model model); static void fixLegacyReferences(Model m); static void fixLegacyAdditionalProperty(Model m); static void fixLegacyPropertyBridges(Model m); static void checkVocabulary(Model model); static void checkDistinctMapObjects(Model model); }
@Test public void testFixLegacyReferences() { String schemaNS = "http: String mapNS = "http: Model m = ModelFactory.createDefaultModel() .setNsPrefixes(MappingFactory.MAPPING) .setNsPrefix("owl", OWL.NS) .setNsPrefix("map", mapNS) .setNsPrefix("", schemaNS); Resource mc = m.createResource(mapNS + "PersonsClassMap", D2RQ.ClassMap); Resource mp = m.createResource(mapNS + "SeeAlsoBridge", D2RQ.PropertyBridge); Resource sc = m.createResource(schemaNS + "SomeClass", OWL.Class) .addProperty(MapParser.LegacyD2RQ.classMap, mc); Resource sp = m.createResource(schemaNS + "SomeProperty", OWL.DatatypeProperty) .addProperty(MapParser.LegacyD2RQ.propertyBridge, mp); JenaModelUtils.print(m); Mapping map = MappingFactory.wrap(m); ClassMap cm = MappingUtils.findClassMap(map, mc); PropertyBridge pb = MappingUtils.findPropertyBridge(map, mp); Assert.assertEquals(1, map.classMaps().count()); Assert.assertEquals(1, map.propertyBridges().count()); Assert.assertEquals(0, cm.classes().count()); Assert.assertEquals(0, pb.properties().count()); long size = m.size(); MapParser.fixLegacyReferences(m); Assert.assertEquals(size, m.size()); Assert.assertFalse(m.containsResource(MapParser.LegacyD2RQ.classMap)); Assert.assertFalse(m.containsResource(MapParser.LegacyD2RQ.propertyBridge)); JenaModelUtils.print(m); Assert.assertEquals(1, map.classMaps().count()); Assert.assertEquals(1, map.propertyBridges().count()); Assert.assertEquals(1, cm.classes().count()); Assert.assertEquals(1, pb.properties().count()); Assert.assertEquals(sc, cm.classes().findFirst().orElseThrow(AssertionError::new)); Assert.assertEquals(sp, pb.properties().findFirst().orElseThrow(AssertionError::new)); MapParser.fixLegacyReferences(m); Assert.assertEquals(size, m.size()); }
MapParser { public static void checkDistinctMapObjects(Model model) throws D2RQException { ensureAllDistinct(model, D2RQ.Database, D2RQ.ClassMap, D2RQ.PropertyBridge, D2RQ.TranslationTable, D2RQ.Translation); } static String absolutizeURI(String uri); static void insertBase(Model model, String baseURI); static void validate(Model model); static void fixLegacy(Model model); static void fixLegacyReferences(Model m); static void fixLegacyAdditionalProperty(Model m); static void fixLegacyPropertyBridges(Model m); static void checkVocabulary(Model model); static void checkDistinctMapObjects(Model model); }
@Test(expected = D2RQException.class) public void validateDistinctMembers() { Mapping m = MappingFactory.create() .createDatabase("x").getMapping() .createClassMap("x").getMapping(); MapParser.checkDistinctMapObjects(m.asModel()); }
MapParser { public static void checkVocabulary(Model model) throws D2RQException { new VocabularySummarizer(D2RQ.class).assertNoUndefinedTerms(model, D2RQException.MAPPING_UNKNOWN_D2RQ_PROPERTY, D2RQException.MAPPING_UNKNOWN_D2RQ_CLASS); } static String absolutizeURI(String uri); static void insertBase(Model model, String baseURI); static void validate(Model model); static void fixLegacy(Model model); static void fixLegacyReferences(Model m); static void fixLegacyAdditionalProperty(Model m); static void fixLegacyPropertyBridges(Model m); static void checkVocabulary(Model model); static void checkDistinctMapObjects(Model model); }
@Test(expected = D2RQException.class) public void validateExternalResources() { Model m = ModelFactory.createDefaultModel(); m.createResource("x", m.createResource(D2RQ.NS + "XClass")); MapParser.checkVocabulary(m); }
Pattern implements ValueMaker { @Override public String makeValue(ResultRow row) { int index = 0; StringBuilder result = new StringBuilder(this.firstLiteralPart); while (index < this.columns.size()) { Attribute column = columns.get(index); ColumnFunction function = columnFunctions.get(index); String value = row.get(column); if (value == null) { return null; } value = function.encode(value); if (value == null) { return null; } result.append(value); result.append(this.literalParts.get(index)); index++; } return result.toString(); } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testSimple() { Pattern pattern = new Pattern("foo@@table.col1@@baz"); Assert.assertEquals("foo1baz", pattern.makeValue(row("1"))); } @Test public void testNull() { Pattern pattern = new Pattern("foo@@table.col1@@bar@@table.col2@@baz"); Assert.assertNull(pattern.makeValue(row("123"))); } @Test public void testPatternURLEncode() { Pattern p = new Pattern("aaa@@table.col1|urlencode@@bbb"); assertPattern("aaax+ybbb", p.makeValue(row("x y"))); assertPatternValues(p, "aaax+ybbb", Collections.singletonMap("table.col1", "x y")); } @Test public void testPatternEncode() { Pattern p = new Pattern("aaa@@table.col1|encode@@bbb"); assertPattern("aaahello%20world%21bbb", p.makeValue(row("hello world!"))); assertPattern("aaa%3A%3B%3C%3D%3E%3F%40bbb", p.makeValue(row(":;<=>?@"))); assertPattern("aaa%5B%5C%5D%5E%60bbb", p.makeValue(row("[\\]^`"))); assertPatternValues(p, "aaa%7B%7C%7Dbbb", Collections.singletonMap("table.col1", "{|}")); } @Test public void testPatternURLify() { Pattern p = new Pattern("aaa@@table.col1|urlify@@bbb"); assertPattern("aaax_ybbb", p.makeValue(row("x y"))); assertPatternValues(p, "aaax_ybbb", Collections.singletonMap("table.col1", "x y")); } @Test public void testPatternURLifyEscapeUnderscore() { Pattern p = new Pattern("aaa@@table.col1|urlify@@bbb"); assertPattern("aaax%5Fybbb", p.makeValue(row("x_y"))); assertPatternValues(p, "aaax%5Fybbb", Collections.singletonMap("table.col1", "x_y")); }
Pattern implements ValueMaker { public boolean matches(String value) { return !valueExpression(value).isFalse(); } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testMatches() { Pattern p = new Pattern("http: Assert.assertTrue(matches(p, "http: } @Test public void testMatchesTrivialPattern() { Pattern p = new Pattern("foobar"); assertPatternValues(p, "foobar", new HashMap<>()); Assert.assertFalse(matches(p, "fooba")); Assert.assertFalse(matches(p, "foobarb")); Assert.assertFalse(matches(p, "oobar")); Assert.assertFalse(matches(p, "ffoobar")); Assert.assertFalse(matches(p, null)); } @Test public void testMatchesMiniPattern() { Pattern p = new Pattern("@@table.col1@@"); Map<String, String> map = new HashMap<>(); map.put("table.col1", ""); assertPatternValues(p, "", map); map.put("table.col1", "a"); assertPatternValues(p, "a", map); map.put("table.col1", "xyz"); assertPatternValues(p, "xyz", map); Assert.assertFalse(matches(p, null)); } @Test public void testMagicRegexCharactersCauseNoProblems() { Pattern p = new Pattern("(foo|bar)@@table.col1@@"); Map<String, String> map = new HashMap<>(); map.put("table.col1", "1"); assertPatternValues(p, "(foo|bar)1", map); Assert.assertFalse(matches(p, "foo1")); } @Test public void testMatchesOneColumnPattern() { Pattern p = new Pattern("foo@@table.col1@@bar"); Map<String, String> map = new HashMap<>(); map.put("table.col1", "1"); assertPatternValues(p, "foo1bar", map); map.put("table.col1", ""); assertPatternValues(p, "foobar", map); map.put("table.col1", "foofoobarbar"); assertPatternValues(p, "foofoofoobarbarbar", map); Assert.assertFalse(matches(p, "fooba")); Assert.assertFalse(matches(p, "barfoo")); Assert.assertFalse(matches(p, "fobar")); } @Test public void testMatchesTwoColumnPattern() { Pattern p = new Pattern("foo@@table.col1@@-@@table.col2@@baz"); Map<String, String> map = new HashMap<>(); map.put("table.col1", ""); map.put("table.col2", ""); assertPatternValues(p, "foo-baz", map); map.put("table.col1", "1"); map.put("table.col2", "2"); assertPatternValues(p, "foo1-2baz", map); map.put("table.col1", "baz"); map.put("table.col2", "foo"); assertPatternValues(p, "foobaz-foobaz", map); map.put("table.col1", "XYZ"); map.put("table.col2", "XYZ-2"); assertPatternValues(p, "fooXYZ-XYZ-2baz", map); Assert.assertFalse(matches(p, "foo1-")); Assert.assertFalse(matches(p, "foobaz-")); Assert.assertFalse(matches(p, "foo1-2baz3")); } @Test public void testMatchesPatternStartingWithColumn() { Pattern p = new Pattern("@@table.col1@@bar@@table.col2@@baz"); Map<String, String> map = new HashMap<>(); map.put("table.col1", ""); map.put("table.col2", ""); assertPatternValues(p, "barbaz", map); map.put("table.col1", "1"); map.put("table.col2", "2"); assertPatternValues(p, "1bar2baz", map); map.put("table.col1", "baz"); map.put("table.col2", "foo"); assertPatternValues(p, "bazbarfoobaz", map); Assert.assertFalse(matches(p, "1bar")); Assert.assertFalse(matches(p, "bazbar")); Assert.assertFalse(matches(p, "1bar2baz3")); } @Test public void testPatternURLEncodeIllegal() { Pattern p = new Pattern("@@table.col1|urlencode@@"); Assert.assertFalse(matches(p, "%")); }
Conjunction extends Expression { public static Expression create(Collection<Expression> expressions) { Set<Expression> elements = new HashSet<>(expressions.size()); for (Expression expression : expressions) { if (expression.isFalse()) { return Expression.FALSE; } if (expression.isTrue()) { continue; } if (expression instanceof Conjunction) { elements.addAll(((Conjunction) expression).expressions); } else { elements.add(expression); } } if (elements.isEmpty()) { return Expression.TRUE; } if (elements.size() == 1) { return elements.iterator().next(); } return new Conjunction(elements); } private Conjunction(Set<Expression> expressions); static Expression create(Collection<Expression> expressions); @Override boolean isTrue(); @Override boolean isFalse(); @Override Set<Attribute> attributes(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); }
@Test public void testEmptyConjunctionIsTrue() { Assert.assertEquals(Expression.TRUE, Conjunction.create(Collections.emptySet())); } @Test public void testSingletonConjunctionIsSelf() { Expression e = SQLExpression.create("foo"); Assert.assertEquals(e, Conjunction.create(Collections.singleton(e))); } @Test public void testTrueExpressionsAreSkipped() { Assert.assertEquals(Expression.TRUE, Conjunction.create(Arrays.asList(Expression.TRUE, Expression.TRUE))); Assert.assertEquals(expr1, Conjunction.create( Arrays.asList(Expression.TRUE, expr1, Expression.TRUE))); Assert.assertEquals(conjunction12, Conjunction.create(Arrays.asList(Expression.TRUE, expr1, Expression.TRUE, expr2))); } @Test public void testFalseCausesFailure() { Assert.assertEquals(Expression.FALSE, Conjunction.create(Collections.singleton(Expression.FALSE))); Assert.assertEquals(Expression.FALSE, Conjunction.create(Arrays.asList(expr1, Expression.FALSE))); } @Test public void testRemoveDuplicates() { Assert.assertEquals(expr1, Conjunction.create(Arrays.asList(expr1, expr1))); } @Test public void testFlatten() { Assert.assertEquals(conjunction123, Conjunction.create(Arrays.asList(conjunction12, expr3))); }
Pattern implements ValueMaker { public Iterator<Object> partsIterator() { return new Iterator<Object>() { private int i = 0; @Override public boolean hasNext() { return i < columns.size() + literalParts.size() + 1; } @Override public Object next() { i++; if (i == 1) { return firstLiteralPart; } else if (i % 2 == 0) { return columns.get(i / 2 - 1); } return literalParts.get(i / 2 - 1); } @Override public void remove() { throw new UnsupportedOperationException(); } }; } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testPartsIteratorSingleLiteral() { Iterator<Object> it = new Pattern("foo").partsIterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals("foo", it.next()); Assert.assertFalse(it.hasNext()); } @Test public void testPartsIteratorFirstLiteralThenColumn() { Iterator<Object> it = new Pattern("foo@@table.col1@@").partsIterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals("foo", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col1, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertFalse(it.hasNext()); } @Test public void testPartsIteratorFirstColumnThenLiteral() { Iterator<Object> it = new Pattern("@@table.col1@@foo").partsIterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col1, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("foo", it.next()); Assert.assertFalse(it.hasNext()); } @Test public void testPartsIteratorSeveralColumns() { Iterator<Object> it = new Pattern("foo@@table.col1@@bar@@table.col2@@").partsIterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals("foo", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col1, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("bar", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col2, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertFalse(it.hasNext()); } @Test public void testPartsIteratorAdjacentColumns() { Iterator<Object> it = new Pattern("@@table.col1@@@@table.col2@@").partsIterator(); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col1, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals(col2, it.next()); Assert.assertTrue(it.hasNext()); Assert.assertEquals("", it.next()); Assert.assertFalse(it.hasNext()); }
Pattern implements ValueMaker { @Override public String toString() { return "Pattern(" + this.pattern + ")"; } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testToString() { Assert.assertEquals("Pattern(foo@@table.col1@@)", new Pattern("foo@@table.col1@@").toString()); }
Pattern implements ValueMaker { @Override public int hashCode() { return this.pattern.hashCode(); } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testSamePatternsAreEqual() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("foo@@table.col1@@"); Assert.assertEquals(p1, p2); Assert.assertEquals(p2, p1); Assert.assertEquals(p1.hashCode(), p2.hashCode()); } @Test public void testPatternsWithDifferentColumnsAreNotEqual() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("foo@@table.col2@@"); Assert.assertNotEquals(p1, p2); Assert.assertNotEquals(p2, p1); Assert.assertNotEquals(p1.hashCode(), p2.hashCode()); } @Test public void testPatternsWithDifferentLiteralPartsAreNotEqual() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("bar@@table.col1@@"); Assert.assertNotEquals(p1, p2); Assert.assertNotEquals(p2, p1); Assert.assertNotEquals(p1.hashCode(), p2.hashCode()); }
Pattern implements ValueMaker { public boolean isEquivalentTo(Pattern p) { return this.firstLiteralPart.equals(p.firstLiteralPart) && this.literalParts.equals(p.literalParts) && this.columnFunctions.equals(p.columnFunctions); } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testIdenticalPatternsAreCompatible() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("foo@@table.col1@@"); Assert.assertTrue(p1.isEquivalentTo(p2)); Assert.assertTrue(p2.isEquivalentTo(p1)); } @Test public void testPatternsWithDifferentColumnNamesAreCompatible() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("foo@@table.col2@@"); Assert.assertTrue(p1.isEquivalentTo(p2)); Assert.assertTrue(p2.isEquivalentTo(p1)); } @Test public void testPatternsWithDifferentLiteralPartsAreNotCompatible() { Pattern p1 = new Pattern("foo@@table.col1@@"); Pattern p2 = new Pattern("bar@@table.col1@@"); Assert.assertFalse(p1.isEquivalentTo(p2)); Assert.assertFalse(p2.isEquivalentTo(p1)); } @Test public void testMultiColumnPatternsWithDifferentLiteralPartsAreNotCompatible() { Pattern p1 = new Pattern("foo@@table.col1@@bar@@table.col2@@abc"); Pattern p2 = new Pattern("foo@@table.col1@@bar@@table.col2@@xyz"); Assert.assertFalse(p1.isEquivalentTo(p2)); Assert.assertFalse(p2.isEquivalentTo(p1)); }
Pattern implements ValueMaker { public boolean literalPartsMatchRegex(String regex) { if (!this.firstLiteralPart.matches(regex)) { return false; } for (String literalPart : literalParts) { if (!literalPart.matches(regex)) { return false; } } return true; } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testLiteralPatternsMatchTrivialRegex() { Assert.assertTrue(new Pattern("asdf").literalPartsMatchRegex(".*")); } @Test public void testLiteralPatternsDontMatchTrivialRegex() { Assert.assertFalse(new Pattern("asdf").literalPartsMatchRegex("foo")); } @Test public void testLiteralPatternRegexIsAnchored() { Assert.assertFalse(new Pattern("aaa").literalPartsMatchRegex("b*")); } @Test public void testLiteralPatternRegexMultipleParts() { Assert.assertTrue(new Pattern("aaa@@aaa.aaa@@aaa").literalPartsMatchRegex("aaa")); } @Test public void testLiteralPatternRegexMatchesOnlyLiteralParts() { Assert.assertTrue(new Pattern("aaa@@bbb.ccc@@aaa").literalPartsMatchRegex("a+")); }
Conjunction extends Expression { @Override public String toString() { List<String> fragments = new ArrayList<>(this.expressions.size()); for (Expression expression : expressions) { fragments.add(expression.toString()); } Collections.sort(fragments); StringBuilder result = new StringBuilder("Conjunction("); Iterator<String> it = fragments.iterator(); while (it.hasNext()) { String fragment = it.next(); result.append(fragment); if (it.hasNext()) { result.append(", "); } } result.append(")"); return result.toString(); } private Conjunction(Set<Expression> expressions); static Expression create(Collection<Expression> expressions); @Override boolean isTrue(); @Override boolean isFalse(); @Override Set<Attribute> attributes(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); }
@Test public void testToString() { Assert.assertEquals("Conjunction(SQL(papers.publish = 1), SQL(papers.rating > 4))", conjunction12.toString()); }
Pattern implements ValueMaker { public String firstLiteralPart() { return firstLiteralPart; } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testTrivialPatternFirstPart() { Assert.assertEquals("aaa", new Pattern("aaa").firstLiteralPart()); } @Test public void testEmptyFirstPart() { Assert.assertEquals("", new Pattern("@@table.col1@@aaa").firstLiteralPart()); }
Pattern implements ValueMaker { public String lastLiteralPart() { if (literalParts.isEmpty()) { return firstLiteralPart; } return literalParts.get(literalParts.size() - 1); } Pattern(String pattern); String firstLiteralPart(); String lastLiteralPart(); boolean literalPartsMatchRegex(String regex); List<Attribute> attributes(); @Override void describeSelf(NodeSetFilter c); boolean matches(String value); @Override Expression valueExpression(String value); @Override Set<ProjectionSpec> projectionSpecs(); @Override String makeValue(ResultRow row); @Override List<OrderSpec> orderSpecs(boolean ascending); @Override String toString(); @Override boolean equals(Object otherObject); @Override int hashCode(); boolean isEquivalentTo(Pattern p); @Override ValueMaker renameAttributes(ColumnRenamer renames); Iterator<Object> partsIterator(); Expression toExpression(); boolean usesColumnFunctions(); final static String DELIMITER; }
@Test public void testTrivialPatternLastPart() { Assert.assertEquals("aaa", new Pattern("aaa").lastLiteralPart()); } @Test public void testEmptyLastPart() { Assert.assertEquals("", new Pattern("aaa@@table.col1@@").lastLiteralPart()); }
Conjunction extends Expression { @Override public int hashCode() { return this.expressions.hashCode(); } private Conjunction(Set<Expression> expressions); static Expression create(Collection<Expression> expressions); @Override boolean isTrue(); @Override boolean isFalse(); @Override Set<Attribute> attributes(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); }
@Test public void testOrderDoesNotAffectEquality() { Assert.assertEquals(conjunction12, conjunction21); Assert.assertEquals(conjunction12.hashCode(), conjunction21.hashCode()); }
Concatenation extends Expression { public static Expression create(List<Expression> expressions) { List<Expression> nonEmpty = new ArrayList<>(expressions.size()); for (Expression expression : expressions) { if (expression instanceof Constant && "".equals(((Constant) expression).value())) { continue; } nonEmpty.add(expression); } if (nonEmpty.isEmpty()) { return new Constant(""); } if (nonEmpty.size() == 1) { return nonEmpty.get(0); } return new Concatenation(nonEmpty); } private Concatenation(List<Expression> parts); static Expression create(List<Expression> expressions); @Override Set<Attribute> attributes(); @Override boolean isFalse(); @Override boolean isTrue(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override boolean equals(Object other); @Override int hashCode(); @Override String toString(); }
@Test public void testCreateEmpty() { Assert.assertEquals(new Constant(""), Concatenation.create(Collections.emptyList())); } @Test public void testCreateOnePart() { Expression expr = new AttributeExpr(new Attribute(null, "table", "col")); Assert.assertEquals(expr, Concatenation.create(Collections.singletonList(expr))); } @Test public void testFilterEmptyParts() { Expression empty = new Constant(""); Expression expr1 = new Constant("aaa"); Assert.assertEquals(expr1, Concatenation.create(Arrays.asList( empty, empty, expr1, empty))); }
SQLExpression extends Expression { public static Expression create(String sql) { sql = sql.trim(); if ("1".equals(sql)) { return Expression.TRUE; } if ("0".equals(sql)) { return Expression.FALSE; } return new SQLExpression(sql); } private SQLExpression(String expression); static Expression create(String sql); @Override boolean isTrue(); @Override boolean isFalse(); @Override Set<Attribute> attributes(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); String getExpression(); }
@Test public void testCreate() { Expression e = SQLExpression.create("papers.publish = 1"); Assert.assertEquals("SQL(papers.publish = 1)", e.toString()); Assert.assertFalse(e.isTrue()); Assert.assertFalse(e.isFalse()); }
SQLExpression extends Expression { @Override public String toString() { return "SQL(" + this.expression + ")"; } private SQLExpression(String expression); static Expression create(String sql); @Override boolean isTrue(); @Override boolean isFalse(); @Override Set<Attribute> attributes(); @Override Expression renameAttributes(ColumnRenamer columnRenamer); @Override String toSQL(ConnectedDB database, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); String getExpression(); }
@Test public void testToString() { Expression e = SQLExpression.create("papers.publish = 1"); Assert.assertEquals("SQL(papers.publish = 1)", e.toString()); }
Expression { public abstract String toSQL(ConnectedDB database, AliasMap aliases); abstract boolean isTrue(); abstract boolean isFalse(); abstract Set<Attribute> attributes(); abstract Expression renameAttributes(ColumnRenamer columnRenamer); abstract String toSQL(ConnectedDB database, AliasMap aliases); Expression and(Expression other); Expression or(Expression other); static final Expression TRUE; static final Expression FALSE; }
@Test public void testConstantToSQL() { Assert.assertEquals("'foo'", new Constant("foo").toSQL(DummyDB.create(), AliasMap.NO_ALIASES)); } @Test public void testConstantToSQLWithType() { Attribute attribute = SQL.parseAttribute("table.col1"); ConnectedDB db = DummyDB.create(Collections.singletonMap("table.col1", GenericType.NUMERIC)); Assert.assertEquals("42", new Constant("42", attribute).toSQL(db, AliasMap.NO_ALIASES)); } @Test public void testConstantToSQLWithTypeAndAlias() { Attribute aliasedAttribute = SQL.parseAttribute("alias.col1"); ConnectedDB db = DummyDB.create(Collections.singletonMap("table.col1", GenericType.NUMERIC)); Assert.assertEquals("42", new Constant("42", aliasedAttribute).toSQL(db, aliases)); }
Expression { public abstract Expression renameAttributes(ColumnRenamer columnRenamer); abstract boolean isTrue(); abstract boolean isFalse(); abstract Set<Attribute> attributes(); abstract Expression renameAttributes(ColumnRenamer columnRenamer); abstract String toSQL(ConnectedDB database, AliasMap aliases); Expression and(Expression other); Expression or(Expression other); static final Expression TRUE; static final Expression FALSE; }
@Test public void testConstantTypeAttributeIsRenamed() { Attribute attribute = SQL.parseAttribute("table.col1"); Assert.assertEquals("Constant([email protected])", new Constant("42", attribute).renameAttributes(aliases).toString()); }
Relation implements RelationalOperators { public boolean isTrivial() { return projections().isEmpty() && condition().isTrue() && joinConditions().isEmpty(); } static Relation createSimpleRelation( ConnectedDB database, Attribute[] attributes); abstract ConnectedDB database(); abstract AliasMap aliases(); abstract Set<Join> joinConditions(); abstract Expression condition(); abstract Expression softCondition(); abstract Set<ProjectionSpec> projections(); abstract boolean isUnique(); abstract List<OrderSpec> orderSpecs(); abstract int limit(); abstract int limitInverse(); Set<Attribute> allKnownAttributes(); Set<RelationName> tables(); boolean isTrivial(); static int combineLimits(int limit1, int limit2); final static int NO_LIMIT; static Relation EMPTY; static Relation TRUE; }
@Test public void testTrueRelationIsTrivial() { Assert.assertTrue(Relation.TRUE.isTrivial()); } @Test public void testQueryWithSelectColumnsIsNotTrivial() { Assert.assertFalse(rel1.isTrivial()); }
Attribute implements ProjectionSpec { @Override public String toString() { return "@@" + this.qualifiedName + "@@"; } Attribute(String schemaName, String tableName, String attributeName); Attribute(RelationName relationName, String attributeName); String qualifiedName(); @Override String toSQL(ConnectedDB database, AliasMap aliases); String attributeName(); String tableName(); RelationName relationName(); String schemaName(); @Override Set<Attribute> requiredAttributes(); Expression selectValue(String value); @Override ProjectionSpec renameAttributes(ColumnRenamer renamer); @Override Expression toExpression(); @Override Expression notNullExpression(ConnectedDB db, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); @Override int compareTo(ProjectionSpec other); }
@Test public void testAttributeToString() { Assert.assertEquals("@@foo.bar@@", new Attribute(null, "foo", "bar").toString()); Assert.assertEquals("@@schema.foo.bar@@", new Attribute("schema", "foo", "bar").toString()); } @Test public void testRelationNameToString() { Assert.assertEquals("table", new RelationName(null, "table").toString()); Assert.assertEquals("schema.table", new RelationName("schema", "table").toString()); }
Attribute implements ProjectionSpec { @Override public int compareTo(ProjectionSpec other) { if (!(other instanceof Attribute)) { return -1; } Attribute otherAttribute = (Attribute) other; int i = this.relationName.compareTo(otherAttribute.relationName); if (i != 0) { return i; } return this.attributeName.compareTo(otherAttribute.attributeName); } Attribute(String schemaName, String tableName, String attributeName); Attribute(RelationName relationName, String attributeName); String qualifiedName(); @Override String toSQL(ConnectedDB database, AliasMap aliases); String attributeName(); String tableName(); RelationName relationName(); String schemaName(); @Override Set<Attribute> requiredAttributes(); Expression selectValue(String value); @Override ProjectionSpec renameAttributes(ColumnRenamer renamer); @Override Expression toExpression(); @Override Expression notNullExpression(ConnectedDB db, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); @Override int compareTo(ProjectionSpec other); }
@Test public void testCompareSameAttribute() { Assert.assertEquals(0, fooCol1.compareTo(fooCol1)); } @Test public void testCompareSameTableDifferentAttribute() { Assert.assertTrue(fooCol1.compareTo(fooCol2) < 0); Assert.assertTrue(fooCol2.compareTo(fooCol1) > 0); } @Test public void testCompareSameAttributeDifferentTable() { Assert.assertTrue(barCol1.compareTo(fooCol1) < 0); Assert.assertTrue(fooCol1.compareTo(barCol2) > 0); } @Test public void testCompareDifferentAttributeDifferentTable() { Assert.assertTrue(barCol2.compareTo(fooCol1) < 0); Assert.assertTrue(fooCol1.compareTo(barCol2) > 0); } @Test public void testNoSchemaAttributeSmallerThanSchemaAttribute() { Attribute noSchema = new Attribute(null, "z", "col"); Attribute schema = new Attribute("schema", "a", "col"); Assert.assertTrue(noSchema.compareTo(schema) < 0); Assert.assertTrue(schema.compareTo(noSchema) > 0); } @Test public void testCompareRelationNamesDifferentSchema() { Assert.assertTrue(xTable1.compareTo(yTable1) < 0); Assert.assertTrue(yTable1.compareTo(xTable1) > 0); } @Test public void testCompareRelationNamesSameSchema() { Assert.assertTrue(table1.compareTo(table2) < 0); Assert.assertTrue(table2.compareTo(table1) > 0); Assert.assertTrue(xTable1.compareTo(xTable2) < 0); Assert.assertTrue(xTable2.compareTo(xTable1) > 0); } @Test public void testNoSchemaRelationNameSmallerSchemaRelationName() { RelationName noSchema = new RelationName(null, "z"); RelationName schema = new RelationName("schema", "a"); Assert.assertTrue(noSchema.compareTo(schema) < 0); Assert.assertTrue(schema.compareTo(noSchema) > 0); } @Test public void testCompareSameRelationName() { Assert.assertEquals(0, table1.compareTo(table1)); Assert.assertEquals(0, xTable1.compareTo(xTable1)); }
Attribute implements ProjectionSpec { @Override public int hashCode() { return this.qualifiedName.hashCode(); } Attribute(String schemaName, String tableName, String attributeName); Attribute(RelationName relationName, String attributeName); String qualifiedName(); @Override String toSQL(ConnectedDB database, AliasMap aliases); String attributeName(); String tableName(); RelationName relationName(); String schemaName(); @Override Set<Attribute> requiredAttributes(); Expression selectValue(String value); @Override ProjectionSpec renameAttributes(ColumnRenamer renamer); @Override Expression toExpression(); @Override Expression notNullExpression(ConnectedDB db, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); @Override int compareTo(ProjectionSpec other); }
@Test public void testSameRelationNameIsEqual() { Assert.assertEquals(table1, table1b); Assert.assertEquals(table1b, table1); Assert.assertEquals(table1.hashCode(), table1b.hashCode()); } @Test public void testDifferentRelationNamesAreNotEqual() { Assert.assertNotEquals(table1, table2); Assert.assertNotEquals(table2, table1); Assert.assertNotEquals(table1.hashCode(), table2.hashCode()); } @Test public void testSameRelationAndSchemaNameIsEqual() { Assert.assertEquals(table1, table1b); Assert.assertEquals(table1b, table1); Assert.assertEquals(table1.hashCode(), table1b.hashCode()); } @Test public void testDifferentSchemaNamesAreNotEqual() { Assert.assertNotEquals(xTable1, yTable1); Assert.assertNotEquals(yTable1, xTable1); Assert.assertNotEquals(xTable1.hashCode(), yTable1.hashCode()); } @Test public void testSchemaAndNoSchemaAreNotEqual() { Assert.assertNotEquals(xTable1, table1); Assert.assertNotEquals(table1, xTable1); Assert.assertNotEquals(table1.hashCode(), xTable1.hashCode()); }
Attribute implements ProjectionSpec { public String qualifiedName() { return this.qualifiedName; } Attribute(String schemaName, String tableName, String attributeName); Attribute(RelationName relationName, String attributeName); String qualifiedName(); @Override String toSQL(ConnectedDB database, AliasMap aliases); String attributeName(); String tableName(); RelationName relationName(); String schemaName(); @Override Set<Attribute> requiredAttributes(); Expression selectValue(String value); @Override ProjectionSpec renameAttributes(ColumnRenamer renamer); @Override Expression toExpression(); @Override Expression notNullExpression(ConnectedDB db, AliasMap aliases); @Override String toString(); @Override boolean equals(Object other); @Override int hashCode(); @Override int compareTo(ProjectionSpec other); }
@Test public void testRelationNameWithPrefixNoSchema() { Assert.assertEquals("T42_table1", table1.withPrefix(42).qualifiedName()); } @Test public void testRelationNameWithPrefixWithSchema() { Assert.assertEquals("T42_x_table1", xTable1.withPrefix(42).qualifiedName()); }
ColumnRenamer { public abstract Attribute applyTo(Attribute original); abstract Attribute applyTo(Attribute original); Join applyTo(Join original); Expression applyTo(Expression original); Set<Join> applyToJoinSet(Set<Join> joins); ProjectionSpec applyTo(ProjectionSpec original); Set<ProjectionSpec> applyToProjectionSet(Set<ProjectionSpec> projections); List<OrderSpec> applyTo(List<OrderSpec> orderSpecs); abstract AliasMap applyTo(AliasMap aliases); final static ColumnRenamer NULL; }
@Test public void testApplyToUnmappedColumnReturnsSameColumn() { Assert.assertEquals(col3, this.col1ToCol2.applyTo(col3)); } @Test public void testApplyToMappedColumnReturnsNewName() { Assert.assertEquals(col2, this.col1ToCol2.applyTo(col1)); } @Test public void testApplyToNewNameReturnsNewName() { Assert.assertEquals(col2, this.col1ToCol2.applyTo(col2)); } @Test public void testApplyToExpressionReplacesMappedColumns() { Expression e = SQLExpression.create("foo.col1=foo.col3"); Assert.assertEquals(SQLExpression.create("foo.col2=foo.col3"), this.col1ToCol2.applyTo(e)); } @Test public void testApplyToAliasMapReturnsOriginal() { AliasMap aliases = new AliasMap(Collections.singleton(new Alias( new RelationName(null, "foo"), new RelationName(null, "bar")))); Assert.assertEquals(aliases, this.col1ToCol2.applyTo(aliases)); } @Test public void testRenameWithSchema() { Attribute foo_c1 = new Attribute("schema", "foo", "col1"); Attribute bar_c2 = new Attribute("schema", "bar", "col2"); ColumnRenamer renamer = new ColumnRenamerMap(Collections.singletonMap(foo_c1, bar_c2)); Assert.assertEquals(bar_c2, renamer.applyTo(foo_c1)); Assert.assertEquals(col1, renamer.applyTo(col1)); }
AliasMap extends ColumnRenamer { public RelationName applyTo(RelationName original) { if (!hasAlias(original)) { return original; } Alias alias = this.byOriginal.get(original); return alias.alias(); } AliasMap(Collection<Alias> aliases); static AliasMap create1(RelationName original, RelationName alias); boolean isAlias(RelationName name); boolean hasAlias(RelationName original); RelationName applyTo(RelationName original); RelationName originalOf(RelationName name); Attribute applyTo(Attribute attribute); Attribute originalOf(Attribute attribute); Alias applyTo(Alias alias); Alias originalOf(Alias alias); @Override Join applyTo(Join join); @Override AliasMap applyTo(AliasMap other); @Override boolean equals(Object other); @Override int hashCode(); @Override String toString(); static final AliasMap NO_ALIASES; }
@Test public void testApplyToColumn() { Assert.assertEquals(baz_col1, this.fooAsBarMap.applyTo(baz_col1)); Assert.assertEquals(bar_col1, this.fooAsBarMap.applyTo(foo_col1)); Assert.assertEquals(bar_col1, this.fooAsBarMap.applyTo(bar_col1)); } @Test public void testApplyToSQLExpression() { Assert.assertEquals(SQLExpression.create("bar.col1 = 1"), fooAsBarMap.applyTo(SQLExpression.create("foo.col1 = 1"))); } @Test public void testApplyToAliasEmpty() { Assert.assertEquals(fooAsBar, AliasMap.NO_ALIASES.applyTo(fooAsBar)); } @Test public void testApplyToAlias() { Assert.assertEquals(new Alias(baz, bar), fooAsBarMap.applyTo(new Alias(baz, foo))); } @Test public void testWithSchema() { RelationName table = new RelationName(null, "table"); RelationName schema_table = new RelationName("schema", "table"); RelationName schema_alias = new RelationName("schema", "alias"); AliasMap m = new AliasMap(Collections.singleton(new Alias(schema_table, schema_alias))); Assert.assertEquals(schema_alias, m.applyTo(schema_table)); Assert.assertEquals(table, m.applyTo(table)); }
AliasMap extends ColumnRenamer { public RelationName originalOf(RelationName name) { if (!isAlias(name)) { return name; } Alias alias = this.byAlias.get(name); return alias.original(); } AliasMap(Collection<Alias> aliases); static AliasMap create1(RelationName original, RelationName alias); boolean isAlias(RelationName name); boolean hasAlias(RelationName original); RelationName applyTo(RelationName original); RelationName originalOf(RelationName name); Attribute applyTo(Attribute attribute); Attribute originalOf(Attribute attribute); Alias applyTo(Alias alias); Alias originalOf(Alias alias); @Override Join applyTo(Join join); @Override AliasMap applyTo(AliasMap other); @Override boolean equals(Object other); @Override int hashCode(); @Override String toString(); static final AliasMap NO_ALIASES; }
@Test public void testOriginalOfColumn() { Assert.assertEquals(baz_col1, this.fooAsBarMap.originalOf(baz_col1)); Assert.assertEquals(foo_col1, this.fooAsBarMap.originalOf(foo_col1)); Assert.assertEquals(foo_col1, this.fooAsBarMap.originalOf(bar_col1)); } @Test public void testOriginalOfAliasEmpty() { Assert.assertEquals(fooAsBar, AliasMap.NO_ALIASES.originalOf(fooAsBar)); } @Test public void testOriginalOfAlias() { Assert.assertEquals(fooAsBaz, fooAsBarMap.originalOf(new Alias(bar, baz))); }
AliasMap extends ColumnRenamer { @Override public int hashCode() { return this.byAlias.hashCode(); } AliasMap(Collection<Alias> aliases); static AliasMap create1(RelationName original, RelationName alias); boolean isAlias(RelationName name); boolean hasAlias(RelationName original); RelationName applyTo(RelationName original); RelationName originalOf(RelationName name); Attribute applyTo(Attribute attribute); Attribute originalOf(Attribute attribute); Alias applyTo(Alias alias); Alias originalOf(Alias alias); @Override Join applyTo(Join join); @Override AliasMap applyTo(AliasMap other); @Override boolean equals(Object other); @Override int hashCode(); @Override String toString(); static final AliasMap NO_ALIASES; }
@Test public void testEqualMapsHaveSameHashCode() { AliasMap m1 = new AliasMap(new ArrayList<>()); AliasMap m2 = new AliasMap(new ArrayList<>()); Assert.assertEquals(m1.hashCode(), m2.hashCode()); } @Test public void testAliasEquals() { Alias fooAsBar2 = new Alias(foo, bar); Assert.assertEquals(fooAsBar, fooAsBar2); Assert.assertEquals(fooAsBar2, fooAsBar); Assert.assertEquals(fooAsBar.hashCode(), fooAsBar2.hashCode()); } @Test public void testAliasNotEquals() { Assert.assertNotEquals(fooAsBar, fooAsBaz); Assert.assertNotEquals(fooAsBaz, fooAsBar); Assert.assertNotEquals(fooAsBar, bazAsBar); Assert.assertNotEquals(bazAsBar, fooAsBar); Assert.assertNotEquals(fooAsBar.hashCode(), fooAsBaz.hashCode()); Assert.assertNotEquals(fooAsBar.hashCode(), bazAsBar.hashCode()); }
AliasMap extends ColumnRenamer { @Override public String toString() { StringBuilder result = new StringBuilder(); result.append("AliasMap("); List<RelationName> tables = new ArrayList<>(this.byAlias.keySet()); Collections.sort(tables); Iterator<RelationName> it = tables.iterator(); while (it.hasNext()) { result.append(this.byAlias.get(it.next())); if (it.hasNext()) { result.append(", "); } } result.append(")"); return result.toString(); } AliasMap(Collection<Alias> aliases); static AliasMap create1(RelationName original, RelationName alias); boolean isAlias(RelationName name); boolean hasAlias(RelationName original); RelationName applyTo(RelationName original); RelationName originalOf(RelationName name); Attribute applyTo(Attribute attribute); Attribute originalOf(Attribute attribute); Alias applyTo(Alias alias); Alias originalOf(Alias alias); @Override Join applyTo(Join join); @Override AliasMap applyTo(AliasMap other); @Override boolean equals(Object other); @Override int hashCode(); @Override String toString(); static final AliasMap NO_ALIASES; }
@Test public void testAliasToString() { Assert.assertEquals("foo AS bar", fooAsBar.toString()); } @Test public void testToStringEmpty() { Assert.assertEquals("AliasMap()", AliasMap.NO_ALIASES.toString()); } @Test public void testToStringOneAlias() { Assert.assertEquals("AliasMap(foo AS bar)", fooAsBarMap.toString()); } @Test public void testToStringTwoAliases() { Collection<Alias> aliases = new ArrayList<>(); aliases.add(fooAsBar); aliases.add(new Alias(new RelationName(null, "abc"), new RelationName(null, "xyz"))); Assert.assertEquals("AliasMap(foo AS bar, abc AS xyz)", new AliasMap(aliases).toString()); }
Join { @Override public String toString() { StringBuilder result = new StringBuilder("Join("); Iterator<Attribute> it = this.attributes1.iterator(); while (it.hasNext()) { Attribute attribute = it.next(); result.append(attribute.qualifiedName()); if (it.hasNext()) { result.append(", "); } } result.append(joinDirection == DIRECTION_UNDIRECTED ? " <=> " : (joinDirection == DIRECTION_RIGHT ? " => " : " <= ")); it = this.attributes2.iterator(); while (it.hasNext()) { Attribute attribute = it.next(); result.append(attribute.qualifiedName()); if (it.hasNext()) { result.append(", "); } } result.append(")"); return result.toString(); } Join(Attribute oneSide, Attribute otherSide, int joinDirection); Join(List<Attribute> oneSideAttributes, List<Attribute> otherSideAttributes, int joinDirection); boolean isSameTable(); boolean containsColumn(Attribute column); RelationName table1(); RelationName table2(); List<Attribute> attributes1(); List<Attribute> attributes2(); int joinDirection(); Attribute equalAttribute(Attribute column); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object otherObject); Join renameColumns(ColumnRenamer columnRenamer); static final int DIRECTION_UNDIRECTED; static final int DIRECTION_LEFT; static final int DIRECTION_RIGHT; static final String[] joinOperators; }
@Test public void testToString() { Join join = new Join(table1foo, table2foo, Join.DIRECTION_UNDIRECTED); Assert.assertEquals("Join(table1.foo <=> table2.foo)", join.toString()); } @Test public void testToStringRetainsTableOrder() { Join join = new Join(table2foo, table1foo, Join.DIRECTION_RIGHT); Assert.assertEquals("Join(table2.foo => table1.foo)", join.toString()); } @Test public void testToStringRetainsAttributeOrder() { Join join = new Join( Arrays.asList(table1foo, table1bar), Arrays.asList(table2bar, table2foo), Join.DIRECTION_RIGHT); Assert.assertEquals("Join(table1.foo, table1.bar => table2.bar, table2.foo)", join.toString()); }
Join { public Join renameColumns(ColumnRenamer columnRenamer) { List<Attribute> oneSide = new ArrayList<>(); List<Attribute> otherSide = new ArrayList<>(); for (Attribute column : attributes1) { oneSide.add(columnRenamer.applyTo(column)); otherSide.add(columnRenamer.applyTo(equalAttribute(column))); } return new Join(oneSide, otherSide, joinDirection); } Join(Attribute oneSide, Attribute otherSide, int joinDirection); Join(List<Attribute> oneSideAttributes, List<Attribute> otherSideAttributes, int joinDirection); boolean isSameTable(); boolean containsColumn(Attribute column); RelationName table1(); RelationName table2(); List<Attribute> attributes1(); List<Attribute> attributes2(); int joinDirection(); Attribute equalAttribute(Attribute column); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object otherObject); Join renameColumns(ColumnRenamer columnRenamer); static final int DIRECTION_UNDIRECTED; static final int DIRECTION_LEFT; static final int DIRECTION_RIGHT; static final String[] joinOperators; }
@Test public void testRenameColumns() { ColumnRenamer renamer = new ColumnRenamerMap(Collections.singletonMap(table1foo, table1bar)); Join join = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Assert.assertEquals("Join(table1.bar => table2.foo)", join.renameColumns(renamer).toString()); }
Join { public RelationName table1() { return this.table1; } Join(Attribute oneSide, Attribute otherSide, int joinDirection); Join(List<Attribute> oneSideAttributes, List<Attribute> otherSideAttributes, int joinDirection); boolean isSameTable(); boolean containsColumn(Attribute column); RelationName table1(); RelationName table2(); List<Attribute> attributes1(); List<Attribute> attributes2(); int joinDirection(); Attribute equalAttribute(Attribute column); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object otherObject); Join renameColumns(ColumnRenamer columnRenamer); static final int DIRECTION_UNDIRECTED; static final int DIRECTION_LEFT; static final int DIRECTION_RIGHT; static final String[] joinOperators; }
@Test public void testTableOrderIsRetained() { Assert.assertEquals(table1, new Join(table1foo, table2foo, Join.DIRECTION_RIGHT).table1()); Assert.assertEquals(table2, new Join(table2foo, table1foo, Join.DIRECTION_RIGHT).table1()); }
Join { @Override public int hashCode() { switch (this.joinDirection) { case DIRECTION_RIGHT: return 31 * (this.attributes1.hashCode() ^ this.attributes2.hashCode()); case DIRECTION_LEFT: return 31 * (this.attributes2.hashCode() ^ this.attributes1.hashCode()); case DIRECTION_UNDIRECTED: default: return 31 * (this.attributes1.hashCode() ^ this.attributes2.hashCode()) + 1; } } Join(Attribute oneSide, Attribute otherSide, int joinDirection); Join(List<Attribute> oneSideAttributes, List<Attribute> otherSideAttributes, int joinDirection); boolean isSameTable(); boolean containsColumn(Attribute column); RelationName table1(); RelationName table2(); List<Attribute> attributes1(); List<Attribute> attributes2(); int joinDirection(); Attribute equalAttribute(Attribute column); @Override String toString(); @Override int hashCode(); @Override boolean equals(Object otherObject); Join renameColumns(ColumnRenamer columnRenamer); static final int DIRECTION_UNDIRECTED; static final int DIRECTION_LEFT; static final int DIRECTION_RIGHT; static final String[] joinOperators; }
@Test public void testJoinOverSameAttributesIsEqual() { Join j1 = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Join j2 = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Assert.assertEquals(j1, j2); Assert.assertEquals(j2, j1); Assert.assertEquals(j1.hashCode(), j2.hashCode()); } @Test public void testSideOrderDoesNotAffectEquality1() { Join j1 = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Join j2 = new Join(table2foo, table1foo, Join.DIRECTION_LEFT); Assert.assertEquals(j1, j2); Assert.assertEquals(j2, j1); Assert.assertEquals(j1.hashCode(), j2.hashCode()); } @Test public void testSideOrderDoesNotAffectEquality2() { Join j1 = new Join(table1foo, table2foo, Join.DIRECTION_UNDIRECTED); Join j2 = new Join(table2foo, table1foo, Join.DIRECTION_UNDIRECTED); Assert.assertEquals(j1, j2); Assert.assertEquals(j2, j1); Assert.assertEquals(j1.hashCode(), j2.hashCode()); } @Test public void testDifferentAttributesNotEqual() { Join j1 = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Join j2 = new Join(table1foo, table2bar, Join.DIRECTION_RIGHT); Assert.assertNotEquals(j1, j2); Assert.assertNotEquals(j2, j1); Assert.assertNotEquals(j1.hashCode(), j2.hashCode()); } @Test public void testDifferentDirectionsNotEqual() { Join j1 = new Join(table1foo, table2foo, Join.DIRECTION_RIGHT); Join j2 = new Join(table1foo, table2foo, Join.DIRECTION_UNDIRECTED); Assert.assertNotEquals(j1, j2); Assert.assertNotEquals(j2, j1); Assert.assertNotEquals(j1.hashCode(), j2.hashCode()); }
FilterParser { public List<List<IdentifierMatcher>> parse() throws ParseException { eatSeparators(); while (!atEnd()) { List<IdentifierMatcher> list = new ArrayList<>(); while (!atEnd()) { if (current() == '/') { list.add(parseRegex()); } else { list.add(parseIdentifier()); } if (!atEnd() && atFilterTerminator()) { break; } index++; } result.add(list); eatSeparators(); } return result; } FilterParser(String filterSpec); Filter parseSchemaFilter(); Filter parseTableFilter(boolean matchParents); Filter parseColumnFilter(boolean matchParents); List<List<IdentifierMatcher>> parse(); }
@Test public void testEmpty() throws ParseException { Assert.assertEquals("", toString(new FilterParser("").parse())); } @Test public void testSimple() throws ParseException { Assert.assertEquals("'foo'", toString(new FilterParser("foo").parse())); } @Test public void testMultipleStrings() throws ParseException { Assert.assertEquals("'foo'.'bar'", toString(new FilterParser("foo.bar").parse())); } @Test public void testMultipleFilters() throws ParseException { Assert.assertEquals("'foo','bar'", toString(new FilterParser("foo,bar").parse())); } @Test public void testMultipleFiltersNewline() throws ParseException { Assert.assertEquals("'foo','bar'", toString(new FilterParser("foo\n\rbar").parse())); } @Test public void testRegex() throws ParseException { Assert.assertEquals("/foo/0", toString(new FilterParser("/foo/").parse())); } @Test public void testRegexWithFlag() throws ParseException { Assert.assertEquals("/foo/2", toString(new FilterParser("/foo/i").parse())); } @Test public void testMutlipleRegexes() throws ParseException { Assert.assertEquals("/foo/0./bar/0", toString(new FilterParser("/foo/./bar/").parse())); } @Test public void testMutlipleRegexFilters() throws ParseException { Assert.assertEquals("/foo/0,/bar/0", toString(new FilterParser("/foo/,/bar/").parse())); } @Test public void testDotInRegex() throws ParseException { Assert.assertEquals("/foo.bar/0", toString(new FilterParser("/foo.bar/").parse())); } @Test public void testEscapedDotInRegex() throws ParseException { Assert.assertEquals("/foo\\.bar/0", toString(new FilterParser("/foo\\.bar/").parse())); } @Test public void testCommaInRegex() throws ParseException { Assert.assertEquals("/foo,bar/0", toString(new FilterParser("/foo,bar/").parse())); } @Test public void testIncompleteRegex() { try { new FilterParser("/foo").parse(); Assert.fail("Should have thrown ParseException because of unterminated regex"); } catch (ParseException ex) { } } @Test public void testIncompleteRegexNewline() { try { new FilterParser("/foo\nbar/").parse(); Assert.fail("Should have thrown ParseException because of unterminated regex"); } catch (ParseException ex) { } } @Test public void testComplex() throws ParseException { Assert.assertEquals("/.*/0.'CHECKSUM','USER'.'PASSWORD'", toString(new FilterParser("/.*/.CHECKSUM,USER.PASSWORD").parse())); }
FilterParser { public Filter parseSchemaFilter() throws ParseException { List<Filter> result = new ArrayList<>(); for (List<IdentifierMatcher> list : parse()) { if (list.size() != 1) { throw new ParseException("Syntax error in schema filter list; expected list of comma- or newline-separated schema names: '" + s + "'"); } result.add(new FilterMatchSchema(list.get(0))); } return FilterMatchAny.create(result); } FilterParser(String filterSpec); Filter parseSchemaFilter(); Filter parseTableFilter(boolean matchParents); Filter parseColumnFilter(boolean matchParents); List<List<IdentifierMatcher>> parse(); }
@Test public void testParseAsSchemaFilter() throws ParseException { Filter result = new FilterParser("schema1,schema2").parseSchemaFilter(); Assert.assertTrue(result.matchesSchema("schema1")); Assert.assertTrue(result.matchesSchema("schema2")); Assert.assertFalse(result.matchesSchema("schema3")); Assert.assertFalse(result.matchesSchema(null)); } @Test public void testParseAsSchemaFilterWithRegex() throws ParseException { Filter result = new FilterParser("/schema[12]/i").parseSchemaFilter(); Assert.assertTrue(result.matchesSchema("schema1")); Assert.assertTrue(result.matchesSchema("SCHEMA2")); Assert.assertFalse(result.matchesSchema("schema3")); Assert.assertFalse(result.matchesSchema(null)); } @Test public void testParseAsSchemaFilterFail() { try { new FilterParser("schema.table").parseSchemaFilter(); Assert.fail("Should have failed because schema.table is not in schema notation"); } catch (ParseException ex) { } }
NiFIAtlasHook extends AtlasHook { public void createLineageToKafkaTopic() throws Exception { final List<HookNotification.HookNotificationMessage> messages = new ArrayList<>(); final Referenceable topic = new Referenceable("kafka_topic"); topic.set(ATTR_NAME, "notification"); topic.set("topic", "notification"); topic.set(ATTR_QUALIFIED_NAME, "notification@HDPF"); topic.set(ATTR_DESCRIPTION, "Description"); topic.set("uri", "0.hdpf.aws.mine"); final HookNotification.EntityCreateRequest createTopic = new HookNotification.EntityCreateRequest("nifi", topic); final Referenceable path5 = new Referenceable(TYPE_NIFI_FLOW_PATH); path5.set(ATTR_QUALIFIED_NAME, "path5"); final ArrayList<Object> path5Outputs = new ArrayList<>(); path5Outputs.add(new Referenceable(topic)); path5.set(ATTR_OUTPUTS, path5Outputs); messages.add(createTopic); messages.add(new HookNotification.EntityPartialUpdateRequest("nifi", TYPE_NIFI_FLOW_PATH, ATTR_QUALIFIED_NAME, "path5", path5)); notifyEntities(messages); } void sendMessage(); void createLineageFromKafkaTopic(); void createLineageToKafkaTopic(); void addDataSetRefs(DataSetRefs dataSetRefs, Referenceable flowPathRef); void addDataSetRefs(DataSetRefs dataSetRefs, Referenceable flowPathRef, boolean create); void addCreateReferenceable(Collection<Referenceable> ins, Referenceable ref); void addUpdateReferenceable(Referenceable ref); void commitMessages(); }
@Test public void test() throws Exception { final NiFIAtlasHook hook = new NiFIAtlasHook(); hook.createLineageToKafkaTopic(); }
HBaseTable extends AbstractNiFiProvenanceEventAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final Matcher uriMatcher = URI_PATTERN.matcher(event.getTransitUri()); if (!uriMatcher.matches()) { logger.warn("Unexpected transit URI: {}", new Object[]{event.getTransitUri()}); return null; } final Referenceable ref = new Referenceable(TYPE); String clusterName = null; for (String zkHost : uriMatcher.group(1).split(",")) { final String zkHostName = zkHost.split(":")[0].trim(); clusterName = context.getClusterResolver().fromHostname(zkHostName); if (clusterName != null && !clusterName.isEmpty()) { break; } } final String tableName = uriMatcher.group(2); ref.set(ATTR_NAME, tableName); ref.set(ATTR_QUALIFIED_NAME, toQualifiedName(clusterName, tableName)); return singleDataSetRef(event.getComponentId(), event.getEventType(), ref); } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetTransitUriPattern(); }
@Test public void testHBaseTable() { final String processorName = "FetchHBaseRow"; final String transitUri = "hbase: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.FETCH); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("hbase_table", ref.getTypeName()); assertEquals("tableA", ref.get(ATTR_NAME)); assertEquals("tableA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testHBaseTableWithMultipleZkHosts() { final String processorName = "FetchHBaseRow"; final String transitUri = "hbase: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.FETCH); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("hbase_table", ref.getTypeName()); assertEquals("tableA", ref.get(ATTR_NAME)); assertEquals("tableA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); }
CreateObscureInputDataSet extends AbstractNiFiProvenanceEventAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final String componentId = event.getComponentId(); final List<ConnectionStatus> incomingConnections = context.findConnectionTo(componentId); if (incomingConnections != null && !incomingConnections.isEmpty()) { return null; } final Referenceable ref = new Referenceable(TYPE); ref.set(ATTR_NAME, event.getComponentType()); ref.set(ATTR_QUALIFIED_NAME, componentId); final DataSetRefs refs = new DataSetRefs(componentId); refs.addInput(ref); return refs; } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override ProvenanceEventType targetProvenanceEventType(); }
@Test public void testGenerateFlowFile() { final String processorName = "GenerateFlowFile"; final String processorId = "processor-1234"; final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getComponentId()).thenReturn(processorId); when(record.getEventType()).thenReturn(ProvenanceEventType.CREATE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final List<ConnectionStatus> connections = new ArrayList<>(); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); when(context.findConnectionTo(processorId)).thenReturn(connections); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, null, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("nifi_data", ref.getTypeName()); assertEquals("GenerateFlowFile", ref.get(ATTR_NAME)); assertEquals("processor-1234", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testSomethingHavingIncomingConnection() { final String processorName = "SomeProcessor"; final String processorId = "processor-1234"; final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getComponentId()).thenReturn(processorId); when(record.getEventType()).thenReturn(ProvenanceEventType.CREATE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final List<ConnectionStatus> connections = new ArrayList<>(); connections.add(new ConnectionStatus()); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); when(context.findConnectionTo(processorId)).thenReturn(connections); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, null, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertNull("If the processor has incoming connections, no refs should be created", refs); }
HDFSPath extends AbstractNiFiProvenanceEventAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final Referenceable ref = new Referenceable(TYPE); final URI uri = parseUri(event.getTransitUri()); final String clusterName = context.getClusterResolver().fromHostname(uri.getHost()); final String path = uri.getPath(); ref.set(ATTR_NAME, path); ref.set(ATTR_PATH, path); ref.set(ATTR_CLUSTER_NAME, clusterName); ref.set(ATTR_QUALIFIED_NAME, toQualifiedName(clusterName, path)); return singleDataSetRef(event.getComponentId(), event.getEventType(), ref); } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetTransitUriPattern(); }
@Test public void testHDFSPath() { final String processorName = "PutHDFS"; final String transitUri = "hdfs: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("hdfs_path", ref.getTypeName()); assertEquals("/user/nifi/fileA", ref.get(ATTR_NAME)); assertEquals("/user/nifi/fileA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); }
KafkaTopic extends AbstractNiFiProvenanceEventAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final Referenceable ref = new Referenceable(TYPE); final String transitUri = event.getTransitUri(); final Matcher uriMatcher = URI_PATTERN.matcher(transitUri); if (!uriMatcher.matches()) { logger.warn("Unexpected transit URI: {}", new Object[]{transitUri}); return null; } String clusterName = null; for (String broker : uriMatcher.group(1).split(",")) { final String brokerHostname = broker.split(":")[0].trim(); clusterName = context.getClusterResolver().fromHostname(brokerHostname); if (clusterName != null && !clusterName.isEmpty()) { break; } } final String topicName = uriMatcher.group(2); ref.set(ATTR_NAME, topicName); ref.set(ATTR_TOPIC, topicName); ref.set(ATTR_QUALIFIED_NAME, toQualifiedName(clusterName, topicName)); ref.set(ATTR_URI, transitUri); return singleDataSetRef(event.getComponentId(), event.getEventType(), ref); } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetComponentTypePattern(); }
@Test public void testPublishKafka() { final String processorName = "PublishKafka"; final String transitUri = "PLAINTEXT: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("topicA", ref.get(ATTR_NAME)); assertEquals("topicA", ref.get("topic")); assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testPublishKafkaMultipleBrokers() { final String processorName = "PublishKafka"; final String transitUri = "PLAINTEXT: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("topicA", ref.get(ATTR_NAME)); assertEquals("topicA", ref.get("topic")); assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testConsumeKafka() { final String processorName = "ConsumeKafka"; final String transitUri = "PLAINTEXT: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("kafka_topic", ref.getTypeName()); assertEquals("topicA", ref.get(ATTR_NAME)); assertEquals("topicA", ref.get("topic")); assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testConsumeKafkaRecord_0_10() { final String processorName = "ConsumeKafkaRecord_0_10"; final String transitUri = "PLAINTEXT: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("kafka_topic", ref.getTypeName()); assertEquals("topicA", ref.get(ATTR_NAME)); assertEquals("topicA", ref.get("topic")); assertEquals("topicA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); }
InputPort extends AbstractFileAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final List<ConnectionStatus> connections = context.findConnectionFrom(event.getComponentId()); if (connections == null || connections.isEmpty()) { logger.warn("Connection was not found: {}", new Object[]{event}); return null; } String sourceFlowFileUuid = event.getSourceSystemFlowFileIdentifier().substring("urn:nifi:".length()); String componentName = context.lookupInputPortName(event.getComponentId()); final Referenceable ref = new Referenceable(TYPE_NIFI_INPUT_PORT); ref.set(ATTR_NAME, componentName); ref.set(ATTR_QUALIFIED_NAME, sourceFlowFileUuid); final Set<String> connectedComponentIds = connections.stream() .map(c -> c.getDestinationId()).collect(Collectors.toSet()); final DataSetRefs refs = new DataSetRefs(connectedComponentIds); refs.addInput(ref); return refs; } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetComponentTypePattern(); }
@Test public void testInputPort() { final String processorName = "Input Port"; ConnectionStatus con1 = Mockito.mock(ConnectionStatus.class); when(con1.getDestinationId()).thenReturn("101"); List<ConnectionStatus> connectionStatuses = new ArrayList<>(); connectionStatuses.add(con1); final String componentId = "100"; final String transitUri = "http: final String sourceSystemFlowFileIdentifier = "urn:nifi:7ce27bc3-b128-4128-aba2-3a366435fd05"; final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getComponentId()).thenReturn(componentId); when(record.getSourceSystemFlowFileIdentifier()).thenReturn(sourceSystemFlowFileIdentifier); when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); when(context.lookupInputPortName(componentId)).thenReturn("IN_PORT"); when(context.findConnectionFrom(componentId)).thenReturn(connectionStatuses); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getComponentIds().size()); assertEquals("101", refs.getComponentIds().iterator().next()); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("nifi_input_port", ref.getTypeName()); assertEquals("IN_PORT", ref.get(ATTR_NAME)); assertEquals("7ce27bc3-b128-4128-aba2-3a366435fd05", ref.get(ATTR_QUALIFIED_NAME)); }
ByFileLineageStrategy implements LineageEventProcessor { public void processEvent (ProvenanceEventRecord event, NiFiFlow nifiFlow, AnalysisContext analysisContext) { try { if (event.getEventType() == ProvenanceEventType.CLONE) { createParentProcess (event, nifiFlow); ProcessorStatus pr = nifiFlow.getProcessors().get(event.getComponentId()); Collection<Referenceable> inputs = getNifiDataRefs(event); for ( String childUuid : event.getChildUuids()) { createProcessForEachChild(event, nifiFlow, pr, childUuid, inputs); } } else { final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(event.getComponentType(), event.getTransitUri(), event.getEventType()); if (getLogger().isDebugEnabled()) { getLogger().debug("Analyzer {} is found for event: {}", new Object[]{analyzer, event}); } if (analyzer == null) { getLogger().warn("No analyzer for {}", new Object[]{event.getComponentType()}); return; } final DataSetRefs refs = analyzer.analyze(analysisContext, event); if (refs == null || (refs.isEmpty())) { return; } final Set<NiFiFlowPath> flowPaths = refs.getComponentIds().stream() .map(componentId -> { final NiFiFlowPath flowPath = nifiFlow.findPath(componentId); if (flowPath == null) { getLogger().warn("FlowPath for {} was not found.", new Object[]{event.getComponentId()}); } return flowPath; }) .filter(Objects::nonNull) .collect(Collectors.toSet()); for (NiFiFlowPath flowPath : flowPaths) { final Referenceable flowRef = new Referenceable(TYPE_NIFI_FLOW); flowRef.set(ATTR_QUALIFIED_NAME, nifiFlow.getId().getUniqueAttributes().get(ATTR_QUALIFIED_NAME)); String cid = refs.getComponentIds().iterator().next(); ProcessorStatus pr = nifiFlow.getProcessors().get(cid); final Referenceable flowPathRef = new Referenceable(TYPE_NIFI_FLOW_PATH); flowPathRef.set(ATTR_NAME, (pr == null ? "UNKNOWN" : pr.getName())); flowPathRef.set(ATTR_DESCRIPTION, event.getAttribute("filename") + " : " + flowPath.getName()); flowPathRef.set(ATTR_QUALIFIED_NAME, event.getFlowFileUuid()); flowPathRef.set(ATTR_URL, nifiFlow.getUrl()); if (event.getAttribute("nifi.params") != null) { flowPathRef.set(ATTR_NIFI_FLOW_PARAMS, event.getAttribute("nifi.params")); } addProcessorToFlowPath(flowPathRef, pr); Collection<Referenceable> addedFlowPath = new ArrayList<>(); addedFlowPath.add(flowPathRef); flowRef.set(ATTR_FLOW_PATHS, addedFlowPath); nifiAtlasHook.addDataSetRefs(refs, flowPathRef, (event.getEventType() == ProvenanceEventType.RECEIVE)); } } } catch (Exception e) { getLogger().error("Skipping failed analyzing event {} due to {}.", new Object[]{event, e}, e); } } ByFileLineageStrategy(ComponentLog logger, NiFIAtlasHook atlasHook); void processEvent(ProvenanceEventRecord event, NiFiFlow nifiFlow, AnalysisContext analysisContext); }
@Test public void testClone() throws IOException { ProcessGroupStatus root = new ProcessGroupStatus(); MockComponentLog logger = new MockComponentLog("0", this); NiFIAtlasHook atlasHook = new NiFIAtlasHook() { protected void notifyEntities(List<HookNotification.HookNotificationMessage> messages) { assertEquals(4, messages.size()); assertEquals(1, ((HookNotification.EntityCreateRequest)messages.get(0)).getEntities().size()); assertEquals(1, ((HookNotification.EntityCreateRequest)messages.get(1)).getEntities().size()); assertEquals(1, ((HookNotification.EntityCreateRequest)messages.get(2)).getEntities().size()); assertEquals(1, ((HookNotification.EntityCreateRequest)messages.get(3)).getEntities().size()); assertEquals("nifi_data", ((HookNotification.EntityCreateRequest)messages.get(0)).getEntities().get(0).getTypeName()); assertEquals("nifi_flow_path", ((HookNotification.EntityCreateRequest)messages.get(1)).getEntities().get(0).getTypeName()); assertEquals("nifi_data", ((HookNotification.EntityCreateRequest)messages.get(2)).getEntities().get(0).getTypeName()); assertEquals("nifi_flow_path", ((HookNotification.EntityCreateRequest)messages.get(3)).getEntities().get(0).getTypeName()); Referenceable nifiData0 = ((HookNotification.EntityCreateRequest)messages.get(0)).getEntities().get(0); assertEquals("child-guid-1", nifiData0.get(ATTR_QUALIFIED_NAME)); Referenceable nifiData1 = ((HookNotification.EntityCreateRequest)messages.get(1)).getEntities().get(0); assertEquals("file-guid", nifiData1.get(ATTR_QUALIFIED_NAME)); Referenceable nifiData2 = ((HookNotification.EntityCreateRequest)messages.get(2)).getEntities().get(0); assertEquals("child-guid-1", nifiData2.get(ATTR_QUALIFIED_NAME)); Referenceable nifiData3 = ((HookNotification.EntityCreateRequest)messages.get(3)).getEntities().get(0); assertEquals("child-guid-1", nifiData3.get(ATTR_QUALIFIED_NAME)); } }; final ByFileLineageStrategy strategy = new ByFileLineageStrategy(logger, atlasHook); final ProvenanceEventRecord event = Mockito.mock(ProvenanceEventRecord.class); when(event.getComponentId()).thenReturn("comp-id"); when(event.getComponentType()).thenReturn("InferAvroSchema"); when(event.getFlowFileUuid()).thenReturn("file-guid"); when(event.getChildUuids()).thenReturn(Arrays.asList("child-guid-1")); when(event.getAttribute("filename")).thenReturn("sample_z"); when(event.getEventType()).thenReturn(ProvenanceEventType.CLONE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiFlowAnalyzer analyzer = new NiFiFlowAnalyzer(); final AtlasVariables atlasVariables = new AtlasVariables(); final MockEventAccess eventAccess = Mockito.mock(MockEventAccess.class); when (eventAccess.getGroupStatus("root")).thenReturn(root); ReportingContext reportingContext = Mockito.mock(ReportingContext.class); when(reportingContext.getEventAccess()).thenReturn(eventAccess); final NiFiFlow nifiFlow = analyzer.analyzeProcessGroup(atlasVariables, reportingContext); strategy.processEvent(event, nifiFlow, context); atlasHook.commitMessages(); }
NiFiFlowAnalyzer { public NiFiFlow analyzeProcessGroup(AtlasVariables atlasVariables, ReportingContext context) throws IOException { final ProcessGroupStatus rootProcessGroup = context.getEventAccess().getGroupStatus("root"); final String flowName = rootProcessGroup.getName(); final String nifiUrlForAtlasMetadata = atlasVariables.getNifiUrl(); final String nifiUrl = nifiUrlForAtlasMetadata; final NiFiFlow nifiFlow = new NiFiFlow(flowName, rootProcessGroup.getId(), nifiUrl); analyzeProcessGroup(rootProcessGroup, nifiFlow); analyzeRootGroupPorts(nifiFlow, rootProcessGroup); return nifiFlow; } NiFiFlow analyzeProcessGroup(AtlasVariables atlasVariables, ReportingContext context); void analyzePaths(NiFiFlow nifiFlow); }
@Test public void testEmptyFlow() throws Exception { ReportingContext reportingContext = Mockito.mock(ReportingContext.class); EventAccess eventAccess = Mockito.mock(EventAccess.class); ProcessGroupStatus rootPG = createEmptyProcessGroupStatus(); when(reportingContext.getEventAccess()).thenReturn(eventAccess); when(eventAccess.getGroupStatus(matches("root"))).thenReturn(rootPG); final NiFiFlowAnalyzer analyzer = new NiFiFlowAnalyzer(); final NiFiFlow nifiFlow = analyzer.analyzeProcessGroup(atlasVariables, reportingContext); assertEquals("Flow name", nifiFlow.getFlowName()); }
PutHiveStreaming extends AbstractHiveAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final URI uri = parseUri(event.getTransitUri()); final String clusterName = context.getClusterResolver().fromHostname(uri.getHost()); final Set<Tuple<String, String>> outputTables = parseTableNames(null, event.getAttribute(ATTR_OUTPUT_TABLES)); if (outputTables.isEmpty()) { return null; } final DataSetRefs refs = new DataSetRefs(event.getComponentId()); outputTables.forEach(tableName -> { final Referenceable ref = createTableRef(clusterName, tableName); refs.addOutput(ref); }); return refs; } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetComponentTypePattern(); }
@Test public void testTableLineage() { final String processorName = "PutHiveStreaming"; final String transitUri = "thrift: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); when(record.getAttribute(ATTR_OUTPUT_TABLES)).thenReturn("databaseA.tableA"); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("hive_table", ref.getTypeName()); assertEquals("tableA", ref.get(ATTR_NAME)); assertEquals("databaseA.tableA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); }
Hive2JDBC extends AbstractHiveAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final String transitUri = event.getTransitUri().replaceFirst("^jdbc:hive2", "jdbc-hive2"); final URI uri = parseUri(transitUri); final String clusterName = context.getClusterResolver().fromHostname(uri.getHost()); final String connectedDatabaseName = uri.getPath().substring(1); final Set<Tuple<String, String>> inputTables = parseTableNames(connectedDatabaseName, event.getAttribute(ATTR_INPUT_TABLES)); final Set<Tuple<String, String>> outputTables = parseTableNames(connectedDatabaseName, event.getAttribute(ATTR_OUTPUT_TABLES)); if (inputTables.isEmpty() && outputTables.isEmpty()) { return getDatabaseRef(event.getComponentId(), event.getEventType(), clusterName, connectedDatabaseName); } final DataSetRefs refs = new DataSetRefs(event.getComponentId()); addRefs(refs, true, clusterName, inputTables); addRefs(refs, false, clusterName, outputTables); return refs; } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetTransitUriPattern(); }
@Test public void testDatabaseLineage() { final String processorName = "PutHiveQL"; final String transitUri = "jdbc:hive2: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("hive_db", ref.getTypeName()); assertEquals("databaseA", ref.get(ATTR_NAME)); assertEquals("databaseA@cluster1", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testTableLineage() { final String processorName = "PutHiveQL"; final String transitUri = "jdbc:hive2: final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); when(record.getAttribute(ATTR_INPUT_TABLES)).thenReturn("tableA1, tableA2"); when(record.getAttribute(ATTR_OUTPUT_TABLES)).thenReturn("databaseB.tableB1"); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(2, refs.getInputs().size()); final Map<String, String> expectedInputRefs = new HashMap<>(); expectedInputRefs.put("databaseA.tableA1@cluster1", "tableA1"); expectedInputRefs.put("databaseA.tableA2@cluster1", "tableA2"); for (Referenceable ref : refs.getInputs()) { final String qName = (String) ref.get(ATTR_QUALIFIED_NAME); assertTrue(expectedInputRefs.containsKey(qName)); assertEquals(expectedInputRefs.get(qName), ref.get(ATTR_NAME)); } assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("hive_table", ref.getTypeName()); assertEquals("tableB1", ref.get(ATTR_NAME)); assertEquals("databaseB.tableB1@cluster1", ref.get(ATTR_QUALIFIED_NAME)); }
GetPutFile extends AbstractFileAnalyzer { @Override public DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event) { final URI uri = parseUri(event.getTransitUri()); final String clusterName = context.getClusterResolver().fromHostname(uri.getHost()); final DataSetRefs refs = new DataSetRefs(event.getComponentId()); final Referenceable ref = createFileRef(clusterName, uri); ref.set(ATTR_NAME, event.getAttribute("filename")); ref.set(ATTR_PATH, event.getAttribute("path")); ref.set(ATTR_IS_FILE, true); ref.set(ATTR_FILE_SIZE, event.getFileSize()); ref.set(ATTR_FILE_GROUP, event.getAttribute("file.group")); ref.set(ATTR_FILE_OWNER, event.getAttribute("file.owner")); if (event.getComponentType().equals("GetFile")) { refs.addInput(ref); } else { refs.addOutput(ref); } return refs; } @Override DataSetRefs analyze(AnalysisContext context, ProvenanceEventRecord event); @Override String targetComponentTypePattern(); }
@Test public void testGetFile() { final String processorName = "GetFile"; final String transitUri = "file:/var/data/landing/sample_z"; final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getAttribute("filename")).thenReturn("sample_z"); when(record.getEventType()).thenReturn(ProvenanceEventType.RECEIVE); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(1, refs.getInputs().size()); assertEquals(0, refs.getOutputs().size()); Referenceable ref = refs.getInputs().iterator().next(); assertEquals("fs_path", ref.getTypeName()); assertEquals("sample_z", ref.get(ATTR_NAME)); assertEquals("/var/data/landing/sample_z@null", ref.get(ATTR_QUALIFIED_NAME)); } @Test public void testPutFile() { final String processorName = "PutFile"; final String transitUri = "file:/var/data/landing/sample_z"; final ProvenanceEventRecord record = Mockito.mock(ProvenanceEventRecord.class); when(record.getComponentType()).thenReturn(processorName); when(record.getTransitUri()).thenReturn(transitUri); when(record.getAttribute("filename")).thenReturn("sample_z"); when(record.getEventType()).thenReturn(ProvenanceEventType.SEND); final ClusterResolvers clusterResolvers = Mockito.mock(ClusterResolvers.class); when(clusterResolvers.fromHostname(matches(".+\\.example\\.com"))).thenReturn("cluster1"); final AnalysisContext context = Mockito.mock(AnalysisContext.class); when(context.getClusterResolver()).thenReturn(clusterResolvers); final NiFiProvenanceEventAnalyzer analyzer = NiFiProvenanceEventAnalyzerFactory.getAnalyzer(processorName, transitUri, record.getEventType()); assertNotNull(analyzer); final DataSetRefs refs = analyzer.analyze(context, record); assertEquals(0, refs.getInputs().size()); assertEquals(1, refs.getOutputs().size()); Referenceable ref = refs.getOutputs().iterator().next(); assertEquals("fs_path", ref.getTypeName()); assertEquals("sample_z", ref.get(ATTR_NAME)); assertEquals("/var/data/landing/sample_z@null", ref.get(ATTR_QUALIFIED_NAME)); }