src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
WxCpTagServiceImpl implements WxCpTagService { @Override public String create(String tagName) throws WxErrorException { String url = "https: JsonObject o = new JsonObject(); o.addProperty("tagname", tagName); String responseContent = this.mainService.post(url, o.toString()); JsonElement tmpJsonElement = new JsonParser().parse(responseContent); return tmpJsonElement.getAsJsonObject().get("tagid").getAsString(); } WxCpTagServiceImpl(WxCpService mainService); @Override String create(String tagName); @Override void update(String tagId, String tagName); @Override void delete(String tagId); @Override List<WxCpTag> listAll(); @Override List<WxCpUser> listUsersByTagId(String tagId); @Override WxCpTagAddOrRemoveUsersResult addUsers2Tag(String tagId, List<String> userIds, List<String> partyIds); @Override WxCpTagAddOrRemoveUsersResult removeUsersFromTag(String tagId, List<String> userIds); }
@Test public void testCreate() throws Exception { this.tagId = this.wxService.getTagService().create("测试标签" + System.currentTimeMillis()); System.out.println(this.tagId); }
WxMpTemplateMsgServiceImpl implements WxMpTemplateMsgService { @Override public WxMpTemplateIndustry getIndustry() throws WxErrorException { String url = API_URL_PREFIX + "/get_industry"; String responseContent = this.wxMpService.get(url, null); return WxMpTemplateIndustry.fromJson(responseContent); } WxMpTemplateMsgServiceImpl(WxMpService wxMpService); @Override String sendTemplateMsg(WxMpTemplateMessage templateMessage); @Override boolean setIndustry(WxMpTemplateIndustry wxMpIndustry); @Override WxMpTemplateIndustry getIndustry(); @Override String addTemplate(String shortTemplateId); @Override List<WxMpTemplate> getAllPrivateTemplate(); @Override boolean delPrivateTemplate(String templateId); static final String API_URL_PREFIX; }
@Test public void testGetIndustry() throws Exception { final WxMpTemplateIndustry industry = this.wxService.getTemplateMsgService().getIndustry(); Assert.assertNotNull(industry); System.out.println(industry); }
WxMpTemplateMsgServiceImpl implements WxMpTemplateMsgService { @Override public boolean setIndustry(WxMpTemplateIndustry wxMpIndustry) throws WxErrorException { if (null == wxMpIndustry.getPrimaryIndustry() || null == wxMpIndustry.getPrimaryIndustry().getId() || null == wxMpIndustry.getSecondIndustry() || null == wxMpIndustry.getSecondIndustry().getId()) { throw new IllegalArgumentException("行业Id不能为空,请核实"); } String url = API_URL_PREFIX + "/api_set_industry"; this.wxMpService.post(url, wxMpIndustry.toJson()); return true; } WxMpTemplateMsgServiceImpl(WxMpService wxMpService); @Override String sendTemplateMsg(WxMpTemplateMessage templateMessage); @Override boolean setIndustry(WxMpTemplateIndustry wxMpIndustry); @Override WxMpTemplateIndustry getIndustry(); @Override String addTemplate(String shortTemplateId); @Override List<WxMpTemplate> getAllPrivateTemplate(); @Override boolean delPrivateTemplate(String templateId); static final String API_URL_PREFIX; }
@Test public void testSetIndustry() throws Exception { WxMpTemplateIndustry industry = new WxMpTemplateIndustry(new WxMpTemplateIndustry.Industry("1"), new WxMpTemplateIndustry.Industry("04")); boolean result = this.wxService.getTemplateMsgService().setIndustry(industry); Assert.assertTrue(result); }
WxMpTemplateMsgServiceImpl implements WxMpTemplateMsgService { @Override public String addTemplate(String shortTemplateId) throws WxErrorException { String url = API_URL_PREFIX + "/api_add_template"; JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("template_id_short", shortTemplateId); String responseContent = this.wxMpService.post(url, jsonObject.toString()); final JsonObject result = JSON_PARSER.parse(responseContent).getAsJsonObject(); if (result.get("errcode").getAsInt() == 0) { return result.get("template_id").getAsString(); } throw new WxErrorException(WxError.fromJson(responseContent)); } WxMpTemplateMsgServiceImpl(WxMpService wxMpService); @Override String sendTemplateMsg(WxMpTemplateMessage templateMessage); @Override boolean setIndustry(WxMpTemplateIndustry wxMpIndustry); @Override WxMpTemplateIndustry getIndustry(); @Override String addTemplate(String shortTemplateId); @Override List<WxMpTemplate> getAllPrivateTemplate(); @Override boolean delPrivateTemplate(String templateId); static final String API_URL_PREFIX; }
@Test public void testAddTemplate() throws Exception { String result = this.wxService.getTemplateMsgService().addTemplate("TM00015"); Assert.assertNotNull(result); System.err.println(result); }
WxMpTemplateMsgServiceImpl implements WxMpTemplateMsgService { @Override public List<WxMpTemplate> getAllPrivateTemplate() throws WxErrorException { String url = API_URL_PREFIX + "/get_all_private_template"; return WxMpTemplate.fromJson(this.wxMpService.get(url, null)); } WxMpTemplateMsgServiceImpl(WxMpService wxMpService); @Override String sendTemplateMsg(WxMpTemplateMessage templateMessage); @Override boolean setIndustry(WxMpTemplateIndustry wxMpIndustry); @Override WxMpTemplateIndustry getIndustry(); @Override String addTemplate(String shortTemplateId); @Override List<WxMpTemplate> getAllPrivateTemplate(); @Override boolean delPrivateTemplate(String templateId); static final String API_URL_PREFIX; }
@Test public void testGetAllPrivateTemplate() throws Exception { List<WxMpTemplate> result = this.wxService.getTemplateMsgService().getAllPrivateTemplate(); Assert.assertNotNull(result); System.err.println(result); }
WxMpTemplateMsgServiceImpl implements WxMpTemplateMsgService { @Override public boolean delPrivateTemplate(String templateId) throws WxErrorException { String url = API_URL_PREFIX + "/del_private_template"; JsonObject jsonObject = new JsonObject(); jsonObject.addProperty("template_id", templateId); String responseContent = this.wxMpService.post(url, jsonObject.toString()); WxError error = WxError.fromJson(responseContent); if (error.getErrorCode() == 0) { return true; } throw new WxErrorException(error); } WxMpTemplateMsgServiceImpl(WxMpService wxMpService); @Override String sendTemplateMsg(WxMpTemplateMessage templateMessage); @Override boolean setIndustry(WxMpTemplateIndustry wxMpIndustry); @Override WxMpTemplateIndustry getIndustry(); @Override String addTemplate(String shortTemplateId); @Override List<WxMpTemplate> getAllPrivateTemplate(); @Override boolean delPrivateTemplate(String templateId); static final String API_URL_PREFIX; }
@Test public void testDelPrivateTemplate() throws Exception { String templateId = "RPcTe7-4BkU5A2J3imC6W0b4JbjEERcJg0whOMKJKIc"; boolean result = this.wxService.getTemplateMsgService().delPrivateTemplate(templateId); Assert.assertTrue(result); }
WxCpTagServiceImpl implements WxCpTagService { @Override public void update(String tagId, String tagName) throws WxErrorException { String url = "https: JsonObject o = new JsonObject(); o.addProperty("tagid", tagId); o.addProperty("tagname", tagName); this.mainService.post(url, o.toString()); } WxCpTagServiceImpl(WxCpService mainService); @Override String create(String tagName); @Override void update(String tagId, String tagName); @Override void delete(String tagId); @Override List<WxCpTag> listAll(); @Override List<WxCpUser> listUsersByTagId(String tagId); @Override WxCpTagAddOrRemoveUsersResult addUsers2Tag(String tagId, List<String> userIds, List<String> partyIds); @Override WxCpTagAddOrRemoveUsersResult removeUsersFromTag(String tagId, List<String> userIds); }
@Test(dependsOnMethods = "testCreate") public void testUpdate() throws Exception { this.wxService.getTagService().update(this.tagId, "测试标签-改名" + System.currentTimeMillis()); }
WxMpKefuServiceImpl implements WxMpKefuService { @Override public WxMpKfMsgList kfMsgList(Date startTime, Date endTime, Long msgId, Integer number) throws WxErrorException { if (number > 10000) { throw new WxErrorException(WxError.newBuilder().setErrorMsg("非法参数请求,每次最多查询10000条记录!").build()); } if (startTime.after(endTime)) { throw new WxErrorException(WxError.newBuilder().setErrorMsg("起始时间不能晚于结束时间!").build()); } JsonObject param = new JsonObject(); param.addProperty("starttime", startTime.getTime() / 1000); param.addProperty("endtime", endTime.getTime() / 1000); param.addProperty("msgid", msgId); param.addProperty("number", number); String responseContent = this.wxMpService.post(MSGRECORD_GET_MSG_LIST, param.toString()); return WxMpKfMsgList.fromJson(responseContent); } WxMpKefuServiceImpl(WxMpService wxMpService); @Override boolean sendKefuMessage(WxMpKefuMessage message); @Override WxMpKfList kfList(); @Override WxMpKfOnlineList kfOnlineList(); @Override boolean kfAccountAdd(WxMpKfAccountRequest request); @Override boolean kfAccountUpdate(WxMpKfAccountRequest request); @Override boolean kfAccountInviteWorker(WxMpKfAccountRequest request); @Override boolean kfAccountUploadHeadImg(String kfAccount, File imgFile); @Override boolean kfAccountDel(String kfAccount); @Override boolean kfSessionCreate(String openid, String kfAccount); @Override boolean kfSessionClose(String openid, String kfAccount); @Override WxMpKfSessionGetResult kfSessionGet(String openid); @Override WxMpKfSessionList kfSessionList(String kfAccount); @Override WxMpKfSessionWaitCaseList kfSessionGetWaitCase(); @Override WxMpKfMsgList kfMsgList(Date startTime, Date endTime, Long msgId, Integer number); @Override WxMpKfMsgList kfMsgList(Date startTime, Date endTime); }
@Test public void testKfMsgList() throws WxErrorException { Date startTime = DateTime.now().minusDays(1).toDate(); Date endTime = DateTime.now().minusDays(0).toDate(); WxMpKfMsgList result = this.wxService.getKefuService().kfMsgList(startTime, endTime, 1L, 50); Assert.assertNotNull(result); System.err.println(result); } @Test public void testKfMsgListAll() throws WxErrorException { Date startTime = DateTime.now().minusDays(1).toDate(); Date endTime = DateTime.now().minusDays(0).toDate(); WxMpKfMsgList result = this.wxService.getKefuService().kfMsgList(startTime, endTime); Assert.assertNotNull(result); System.err.println(result); }
WxMpDataCubeServiceImpl implements WxMpDataCubeService { @Override public List<WxDataCubeUserSummary> getUserSummary(Date beginDate, Date endDate) throws WxErrorException { String responseContent = this.wxMpService.post(GET_USER_SUMMARY, buildParams(beginDate, endDate)); return WxDataCubeUserSummary.fromJson(responseContent); } WxMpDataCubeServiceImpl(WxMpService wxMpService); @Override List<WxDataCubeUserSummary> getUserSummary(Date beginDate, Date endDate); @Override List<WxDataCubeUserCumulate> getUserCumulate(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getArticleSummary(Date beginDate, Date endDate); @Override List<WxDataCubeArticleTotal> getArticleTotal(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserRead(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserReadHour(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShare(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShareHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsg(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgMonth(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDist(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistMonth(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummary(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummaryHour(Date beginDate, Date endDate); }
@Test(dataProvider = "sevenDays") public void testGetUserSummary(Date beginDate, Date endDate) throws WxErrorException { List<WxDataCubeUserSummary> summaries = this.wxService.getDataCubeService() .getUserSummary(beginDate, endDate); Assert.assertNotNull(summaries); System.out.println(summaries); }
WxMpDataCubeServiceImpl implements WxMpDataCubeService { @Override public List<WxDataCubeUserCumulate> getUserCumulate(Date beginDate, Date endDate) throws WxErrorException { String responseContent = this.wxMpService.post(GET_USER_CUMULATE, buildParams(beginDate, endDate)); return WxDataCubeUserCumulate.fromJson(responseContent); } WxMpDataCubeServiceImpl(WxMpService wxMpService); @Override List<WxDataCubeUserSummary> getUserSummary(Date beginDate, Date endDate); @Override List<WxDataCubeUserCumulate> getUserCumulate(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getArticleSummary(Date beginDate, Date endDate); @Override List<WxDataCubeArticleTotal> getArticleTotal(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserRead(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserReadHour(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShare(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShareHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsg(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgMonth(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDist(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistMonth(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummary(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummaryHour(Date beginDate, Date endDate); }
@Test(dataProvider = "sevenDays") public void testGetUserCumulate(Date beginDate, Date endDate) throws WxErrorException { List<WxDataCubeUserCumulate> result = this.wxService.getDataCubeService() .getUserCumulate(beginDate, endDate); Assert.assertNotNull(result); System.out.println(result); }
WxMpDataCubeServiceImpl implements WxMpDataCubeService { @Override public List<WxDataCubeArticleResult> getArticleSummary(Date beginDate, Date endDate) throws WxErrorException { return this.getArticleResults(GET_ARTICLE_SUMMARY, beginDate, endDate); } WxMpDataCubeServiceImpl(WxMpService wxMpService); @Override List<WxDataCubeUserSummary> getUserSummary(Date beginDate, Date endDate); @Override List<WxDataCubeUserCumulate> getUserCumulate(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getArticleSummary(Date beginDate, Date endDate); @Override List<WxDataCubeArticleTotal> getArticleTotal(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserRead(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserReadHour(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShare(Date beginDate, Date endDate); @Override List<WxDataCubeArticleResult> getUserShareHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsg(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgHour(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgMonth(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDist(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistWeek(Date beginDate, Date endDate); @Override List<WxDataCubeMsgResult> getUpstreamMsgDistMonth(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummary(Date beginDate, Date endDate); @Override List<WxDataCubeInterfaceResult> getInterfaceSummaryHour(Date beginDate, Date endDate); }
@Test(dataProvider = "oneDay") public void testGetArticleSummary(Date date) throws WxErrorException { List<WxDataCubeArticleResult> results = this.wxService.getDataCubeService() .getArticleSummary(date, date); Assert.assertNotNull(results); System.out.println(results); }
WxCpTagServiceImpl implements WxCpTagService { @Override public List<WxCpTag> listAll() throws WxErrorException { String url = "https: String responseContent = this.mainService.get(url, null); JsonElement tmpJsonElement = new JsonParser().parse(responseContent); return WxCpGsonBuilder.INSTANCE.create() .fromJson( tmpJsonElement.getAsJsonObject().get("taglist"), new TypeToken<List<WxCpTag>>() { }.getType() ); } WxCpTagServiceImpl(WxCpService mainService); @Override String create(String tagName); @Override void update(String tagId, String tagName); @Override void delete(String tagId); @Override List<WxCpTag> listAll(); @Override List<WxCpUser> listUsersByTagId(String tagId); @Override WxCpTagAddOrRemoveUsersResult addUsers2Tag(String tagId, List<String> userIds, List<String> partyIds); @Override WxCpTagAddOrRemoveUsersResult removeUsersFromTag(String tagId, List<String> userIds); }
@Test(dependsOnMethods = {"testUpdate", "testCreate"}) public void testListAll() throws Exception { List<WxCpTag> tags = this.wxService.getTagService().listAll(); assertNotEquals(tags.size(), 0); }
VerticaBulkLoader extends BaseStep implements StepInterface { @VisibleForTesting VerticaConnection getVerticaConnection() throws SQLException { Connection conn = data.db.getConnection(); if ( conn != null ) { if ( conn instanceof VerticaConnection ) { return (VerticaConnection) conn; } else { Connection underlyingConn = null; if ( conn instanceof DelegatingConnection ) { DelegatingConnection pooledConn = (DelegatingConnection) conn; underlyingConn = pooledConn.getInnermostDelegate(); } else if ( conn instanceof javax.sql.PooledConnection ) { PooledConnection pooledConn = (PooledConnection) conn; underlyingConn = pooledConn.getConnection(); } else { try { if ( conn.isWrapperFor( VerticaConnection.class ) ) { VerticaConnection vc = conn.unwrap( VerticaConnection.class ); return vc; } } catch ( SQLException Ignored ) { } } if ( ( underlyingConn != null ) && ( underlyingConn instanceof VerticaConnection ) ) { return (VerticaConnection) underlyingConn; } } throw new IllegalStateException( "Could not retrieve a VerticaConnection from " + conn.getClass().getName() ); } else { throw new IllegalStateException( "Could not retrieve a VerticaConnection from null" ); } } VerticaBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void markStop(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetConnection() throws Exception { Connection connection1 = mock( VerticaConnection.class ); DelegatingConnection connection2 = mock( DelegatingConnection.class ); when( connection2.getInnermostDelegate() ).thenReturn( connection1 ); DelegatingConnection connection3 = mock( DelegatingConnection.class ); when( connection3.getInnermostDelegate() ).thenReturn( mock( java.sql.Connection.class ) ); Connection connection4 = mock( java.sql.Connection.class ); loaderData.db.setConnection( connection1 ); Connection rtn = loader.getVerticaConnection(); assertTrue( connection1 == rtn ); loaderData.db.setConnection( connection2 ); rtn = loader.getVerticaConnection(); assertTrue( connection1 == rtn ); loaderData.db.setConnection( connection3 ); try { rtn = loader.getVerticaConnection(); fail( "Expected IllegalStateException" ); } catch ( IllegalStateException expected ) { } loaderData.db.setConnection( connection4 ); try { rtn = loader.getVerticaConnection(); fail( "Expected IllegalStateException" ); } catch ( IllegalStateException expected ) { } }
VerticaBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesModelerMeta { public String getXML() { StringBuilder retval = new StringBuilder(); retval.append( " " + XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " + XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " + XMLHandler.addTagValue( "table", tablename ) ); retval.append( " " + XMLHandler.addTagValue( "specify_fields", specifyFields ) ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < fieldDatabase.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "column_name", fieldDatabase[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldStream[i] ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " " + XMLHandler.addTagValue( "exceptions_filename", exceptionsFileName ) ); retval.append( " " + XMLHandler.addTagValue( "rejected_data_filename", rejectedDataFileName ) ); retval.append( " " + XMLHandler.addTagValue( "abort_on_error", abortOnError ) ); retval.append( " " + XMLHandler.addTagValue( "direct", direct ) ); retval.append( " " + XMLHandler.addTagValue( "stream_name", streamName ) ); return retval.toString(); } VerticaBulkLoaderMeta(); @Injection( name = "CONNECTIONNAME" ) void setConnection( String connectionName ); void allocate( int nrRows ); void loadXML( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ); Object clone(); DatabaseMeta getDatabaseMeta(); void setDatabaseMeta( DatabaseMeta database ); String getTablename(); String getTableName(); void setTablename( String tablename ); void setSpecifyFields( boolean specifyFields ); boolean specifyFields(); boolean isDirect(); void setDirect( boolean direct ); boolean isAbortOnError(); void setAbortOnError( boolean abortOnError ); String getExceptionsFileName(); void setExceptionsFileName( String exceptionsFileName ); String getRejectedDataFileName(); void setRejectedDataFileName( String rejectedDataFileName ); String getStreamName(); void setStreamName( String streamName ); boolean isSpecifyFields(); void setDefault(); String getXML(); void readRep( Repository rep, ObjectId id_step, List<DatabaseMeta> databases, Map<String, Counter> counters ); void saveRep( Repository rep, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ); SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev ); RowMetaInterface getRequiredFields( VariableSpace space ); DatabaseMeta[] getUsedDatabaseConnections(); String[] getFieldStream(); void setFieldStream( String[] fieldStream ); String[] getFieldDatabase(); void setFieldDatabase( String[] fieldDatabase ); String getSchemaName(); void setSchemaName( String schemaName ); boolean supportsErrorHandling(); RowMetaInterface getTableRowMetaInterface(); @Override String getMissingDatabaseConnectionInformationMessage(); @Override RowMeta getRowMeta( StepDataInterface stepData ); @Override List<String> getDatabaseFields(); @Override List<String> getStreamFields(); @AfterInjection void afterInjectionSynchronization(); }
@Test public void testGetXml() throws Exception { VerticaBulkLoaderMeta vbl = new VerticaBulkLoaderMeta(); vbl.setDefault(); vbl.setFieldDatabase( new String[] { "fieldDB1", "fieldDB2", "fieldDB3", "fieldDB4", "fieldDB5" } ); vbl.setFieldStream( new String[] { "fieldStr1", "fieldStr2", "fieldStr3" } ); try { vbl.getXML(); fail( "Before calling afterInjectionSynchronization, should have thrown an ArrayIndexOOB" ); } catch ( Exception expected ) { } vbl.afterInjectionSynchronization(); vbl.getXML(); int targetSz = vbl.getFieldDatabase().length; assertEquals( targetSz, vbl.getFieldStream().length ); }
StreamEncoder { public ByteBuffer getBuffer() { return this.buffer; } StreamEncoder( List<ColumnSpec> columns, PipedInputStream inputStream ); void close(); void writeHeader(); void writeRow( RowMetaInterface rowMeta, Object[] row ); ByteBuffer getBuffer(); static final int NUM_ROWS_TO_BUFFER; }
@Test public void testStreamEncoderConstructor_NoException_ByteBufferIsPositiveInt() throws Exception { int maxTypeLenght = 500; ColumnSpec cs = new ColumnSpec( ColumnSpec.VariableWidthType.VARCHAR, maxTypeLenght ); columns.add( cs ); try { StreamEncoder stEncoder = new StreamEncoder( columns, inputStream ); long expectedBufferSize = getExpectedBufferSize( maxTypeLenght, columns.size() ); expectedBufferSize += columns.size() * 4 * maxTypeLenght; assertEquals( expectedBufferSize, stEncoder.getBuffer().capacity() ); } catch ( Exception e ) { fail( "There is not expected exception expected But was: " + e ); } }
StreamEncoder { int countMainByteBufferSize() { long bufferSize = (long) getRowMaxSize() * NUM_ROWS_TO_BUFFER; return (int) ( bufferSize > 0 && bufferSize < MAXIMUM_BUFFER_SIZE ? bufferSize : MAXIMUM_BUFFER_SIZE ); } StreamEncoder( List<ColumnSpec> columns, PipedInputStream inputStream ); void close(); void writeHeader(); void writeRow( RowMetaInterface rowMeta, Object[] row ); ByteBuffer getBuffer(); static final int NUM_ROWS_TO_BUFFER; }
@Test public void testCountedBufferSizeIsInt_WhenToBufferAllRowMaxSizeRequiresMoreThenInt() throws Exception { try { assertEquals( MAXIMUM_BUFFER_SIZE, getSpyStreamEncoder().countMainByteBufferSize() ); } catch ( Exception e ) { fail( "There is no exception expected But was: " + e ); } }
GreeterService extends GreeterGrpc.GreeterImplBase { @Override public void sayHello(HelloRequest request, StreamObserver<HelloReply> responseObserver) { log.info("get request - {}", request.getName()); HelloReply helloReply = HelloReply .newBuilder() .setMessage(String.format("hello %s", request.getName())) .build(); log.info("reply with {}", helloReply); responseObserver.onNext(helloReply); responseObserver.onCompleted(); } @Override void sayHello(HelloRequest request, StreamObserver<HelloReply> responseObserver); }
@Test public void sayHello() { String name = faker.superhero().name(); HelloRequest request = HelloRequest.newBuilder().setName(name).build(); HelloReply helloReply = stub.sayHello(request); assertThat(helloReply.getMessage()).isEqualTo("hello " + name); }
TransportClientProvider implements Provider<TransportClient> { public TransportClient get() { final String hostCsv = configuration.getString(CONFIG_ES_CLUSTER_HOST); final List<String> hosts = Splitter.on(",").splitToList(hostCsv); Preconditions.checkState(!hosts.isEmpty()); final TransportClient transportClient = new PreBuiltTransportClient(esSettings()); final Integer esTransportPort = configuration.getInteger(CONFIG_ES_CLUSTER_PORT, 9300); log.info("connect to elastic search {} on port {} ", hostCsv, esTransportPort); hosts.forEach( host -> transportClient.addTransportAddress( new InetSocketTransportAddress(new InetSocketAddress(host, esTransportPort)) ) ); return transportClient; } TransportClient get(); }
@Test public void get() throws Exception { TransportClient transportClient = transportClientProvider.get(); assertThat(transportClient).isNotNull(); }
ProductReadService extends ProductReadServiceGrpc.ProductReadServiceImplBase { @Override public void searchProducts(SearchProductsRequest request, StreamObserver<SearchProductsResponse> responseObserver) { try { responseObserver.onNext(productDao.searchProducts(request)); responseObserver.onCompleted(); counter.labels("searchProducts", "success"); } catch (Exception e) { log.error(" error on search product with request - {}", request, e); responseObserver.onError(e); counter.labels("searchProducts", "failed"); } } @Inject ProductReadService(); @Override void searchProducts(SearchProductsRequest request, StreamObserver<SearchProductsResponse> responseObserver); @Override void downloadProducts(DownloadProductsRequest request, StreamObserver<Product> responseObserver); @Override StreamObserver<Product> calculateProductScore(StreamObserver<CalculateProductScoreResponse> responseObserver); @Override void downloadProductImage(DownloadProductImageRequest request, StreamObserver<DataChunk> responseObserver); }
@Test public void searchProducts() throws Exception { SearchProductsResponse searchProductsResponse = SearchProductsResponse.newBuilder() .addProducts( Product.newBuilder() .setProductId(faker.number().randomNumber()) .setProductName(faker.name().fullName()) .setCategory(faker.numerify("category-###")) .setProductStatus(ProductStatus.InStock) ).build(); when(productDao.searchProducts(any())).thenReturn(searchProductsResponse); SearchProductsResponse result = blockingStub.searchProducts(SearchProductsRequest.getDefaultInstance()); assertThat(result).isEqualTo(searchProductsResponse); } @Test(expected = StatusRuntimeException.class) public void searchProducts_with_exception() throws Exception { when(productDao.searchProducts(any())).thenThrow(new IllegalStateException()); blockingStub.searchProducts(SearchProductsRequest.getDefaultInstance()); }
ProductReadService extends ProductReadServiceGrpc.ProductReadServiceImplBase { @Override public void downloadProducts(DownloadProductsRequest request, StreamObserver<Product> responseObserver) { PublishSubject<Product> productPublishSubject = PublishSubject.create(); productPublishSubject .doOnNext(product -> { responseObserver.onNext(product); counter.labels("downloadProducts", "success"); }) .doOnComplete(() -> responseObserver.onCompleted()) .doOnError(t -> { responseObserver.onError(t); counter.labels("downloadProducts", "failed"); }) .subscribe(); productDao.downloadProducts(request, productPublishSubject); } @Inject ProductReadService(); @Override void searchProducts(SearchProductsRequest request, StreamObserver<SearchProductsResponse> responseObserver); @Override void downloadProducts(DownloadProductsRequest request, StreamObserver<Product> responseObserver); @Override StreamObserver<Product> calculateProductScore(StreamObserver<CalculateProductScoreResponse> responseObserver); @Override void downloadProductImage(DownloadProductImageRequest request, StreamObserver<DataChunk> responseObserver); }
@Test public void downloadProducts() throws Exception { doAnswer(invocation -> { PublishSubject<Product> publishSubject = (PublishSubject<Product>) invocation.getArguments()[1]; publishSubject.onNext(Product.getDefaultInstance()); publishSubject.onComplete(); return null; }).when(productDao).downloadProducts(any(), any()); List<Product> downloadedProducts = Lists.newArrayList(); AtomicBoolean onCompletedCalled = new AtomicBoolean(false); StreamObserver<Product> downloadObserver = new StreamObserver<Product>() { @Override public void onNext(Product value) { downloadedProducts.add(value); } @Override public void onError(Throwable t) { fail("should not fail"); } @Override public void onCompleted() { onCompletedCalled.compareAndSet(false, true); } }; productReadService.downloadProducts(DownloadProductsRequest.getDefaultInstance(), downloadObserver); verify(productDao, times(1)).downloadProducts(any(), any()); assertThat(downloadedProducts).containsOnly(Product.getDefaultInstance()); assertThat(onCompletedCalled).isTrue(); }
ProductReadService extends ProductReadServiceGrpc.ProductReadServiceImplBase { @Override public StreamObserver<Product> calculateProductScore(StreamObserver<CalculateProductScoreResponse> responseObserver) { PublishSubject<CalculateProductScoreResponse> downloadStream = PublishSubject.create(); downloadStream .doOnNext(response -> { responseObserver.onNext(response); counter.labels("calculateProductScore_download", "success"); }) .doOnError(t -> { log.error("error on calculate product score response", t); responseObserver.onError(t); counter.labels("calculateProductScore_download", "failed"); }) .doOnComplete(() -> { log.info("calculate product score response done"); responseObserver.onCompleted(); }) .subscribe(); PublishSubject<Product> uploadStream = PublishSubject.create(); uploadStream .doOnNext(product -> { log.debug(" calculate product score - {}", product); productDao.calculateProductScore(product, downloadStream); counter.labels("calculateProductScore_upload", "success"); }) .doOnError(t -> { log.info("client upload got error", t); downloadStream.onError(t); counter.labels("calculateProductScore_upload", "failed"); }) .doOnComplete(() -> { log.info("client upload complete"); downloadStream.onComplete(); }) .subscribe(); return new RxStreamObserver<>(uploadStream); } @Inject ProductReadService(); @Override void searchProducts(SearchProductsRequest request, StreamObserver<SearchProductsResponse> responseObserver); @Override void downloadProducts(DownloadProductsRequest request, StreamObserver<Product> responseObserver); @Override StreamObserver<Product> calculateProductScore(StreamObserver<CalculateProductScoreResponse> responseObserver); @Override void downloadProductImage(DownloadProductImageRequest request, StreamObserver<DataChunk> responseObserver); }
@Test public void calculateProductScore() throws Exception { doAnswer(invocation -> { PublishSubject<CalculateProductScoreResponse> downloadStream = (PublishSubject<CalculateProductScoreResponse>) invocation.getArguments()[1]; downloadStream.onNext(CalculateProductScoreResponse.getDefaultInstance()); return null; }).when(productDao).calculateProductScore(any(), any()); List<CalculateProductScoreResponse> responses = Lists.newArrayList(); AtomicBoolean onErrorCalled = new AtomicBoolean(false); AtomicBoolean onCompleted = new AtomicBoolean(false); StreamObserver<Product> uploadStream = productReadService.calculateProductScore(new StreamObserver<CalculateProductScoreResponse>() { @Override public void onNext(CalculateProductScoreResponse value) { responses.add(value); } @Override public void onError(Throwable t) { onErrorCalled.compareAndSet(false, true); } @Override public void onCompleted() { onCompleted.compareAndSet(false, true); } }); List<Product> products = IntStream.range(1, 5) .mapToObj(index -> Product.getDefaultInstance()) .collect(Collectors.toList()); products.forEach(product -> uploadStream.onNext(product)); uploadStream.onCompleted(); assertThat(responses.size()).isEqualTo(4); assertThat(onCompleted).isTrue(); assertThat(onErrorCalled).isFalse(); }
ProductReadService extends ProductReadServiceGrpc.ProductReadServiceImplBase { @Override public void downloadProductImage(DownloadProductImageRequest request, StreamObserver<DataChunk> responseObserver) { try { BufferedInputStream imageStream = new BufferedInputStream( productImageSeeker.seekProductImage(request.getProductId()) ); int bufferSize = 256 * 1024; byte[] buffer = new byte[bufferSize]; int length; while ((length = imageStream.read(buffer, 0, bufferSize)) != -1) { responseObserver.onNext( DataChunk.newBuilder().setData(ByteString.copyFrom(buffer, 0, length)).build() ); } responseObserver.onCompleted(); imageStream.close(); counter.labels("downloadProductImage", "success"); } catch (Exception e) { counter.labels("downloadProductImage", "failed"); log.error("error on read product image", e); responseObserver.onError(e); } } @Inject ProductReadService(); @Override void searchProducts(SearchProductsRequest request, StreamObserver<SearchProductsResponse> responseObserver); @Override void downloadProducts(DownloadProductsRequest request, StreamObserver<Product> responseObserver); @Override StreamObserver<Product> calculateProductScore(StreamObserver<CalculateProductScoreResponse> responseObserver); @Override void downloadProductImage(DownloadProductImageRequest request, StreamObserver<DataChunk> responseObserver); }
@Test public void downloadProductImage() throws Exception { when(productImageSeeker.seekProductImage(anyLong())) .thenReturn(Resources.getResource("Large_Scaled_Forest_Lizard.jpg").openStream()); AtomicBoolean completed = new AtomicBoolean(false); AtomicBoolean error = new AtomicBoolean(false); File imageFile = File.createTempFile("image", ".jpg"); imageFile.deleteOnExit(); Files.touch(imageFile); ByteSink byteSink = Files.asByteSink(imageFile, FileWriteMode.APPEND); StreamObserver<DataChunk> streamObserver = new StreamObserver<DataChunk>() { @Override public void onNext(DataChunk dataChunk) { try { byteSink.write(dataChunk.getData().toByteArray()); } catch (IOException e) { log.error("error on write files", e); onError(e); } } @Override public void onError(Throwable t) { error.compareAndSet(false, true); } @Override public void onCompleted() { log.info("write image to {}", imageFile.getAbsoluteFile()); completed.compareAndSet(false, true); } }; stub.downloadProductImage(DownloadProductImageRequest.getDefaultInstance(), streamObserver); while (!completed.get() && !error.get()) { Thread.sleep(500); } assertThat(completed.get()).isTrue(); assertThat(error.get()).isFalse(); try (InputStream destImageStream = new FileInputStream(imageFile); InputStream origImageStream = Resources.getResource("Large_Scaled_Forest_Lizard.jpg").openStream()) { assertThat(DigestUtils.md5Hex(destImageStream)).isEqualTo( DigestUtils.md5Hex(origImageStream) ); } }
ProductUpdateService extends ProductUpdateServiceGrpc.ProductUpdateServiceImplBase { @Override public StreamObserver<Product> uploadProduct(StreamObserver<UploadProductResponse> responseObserver) { PublishSubject<Product> publishSubject = PublishSubject.create(); publishSubject .doOnNext(product -> { log.info("saving product - {} ", product); productDao.upsertProduct(product); }) .doOnError(t -> responseObserver.onError(t)) .doOnComplete(() -> { responseObserver.onNext(UploadProductResponse.newBuilder().build()); responseObserver.onCompleted(); }) .subscribe(); return new RxStreamObserver<>(publishSubject); } @Override StreamObserver<Product> uploadProduct(StreamObserver<UploadProductResponse> responseObserver); }
@Test public void uploadProduct() throws Exception { Set<UploadProductResponse> responseHolder = Sets.newHashSet(); Set<Throwable> exceptionHolder = Sets.newHashSet(); AtomicBoolean completed = new AtomicBoolean(false); StreamObserver<Product> uploadStream = stub.uploadProduct(new StreamObserver<UploadProductResponse>() { @Override public void onNext(UploadProductResponse value) { responseHolder.add(value); } @Override public void onError(Throwable t) { exceptionHolder.add(t); } @Override public void onCompleted() { completed.compareAndSet(false, true); } }); Product product = Product.newBuilder() .setProductId(faker.number().randomNumber()) .setProductName(faker.company().name()) .setProductPrice(faker.number().randomDouble(2, 10, 100)) .setProductStatus(ProductStatus.InStock) .build(); ImmutableList .of(product, product, product) .stream() .forEach(prod -> uploadStream.onNext(prod)); uploadStream.onCompleted(); while (!completed.get()) { Thread.sleep(200); } assertThat(responseHolder.size()).isEqualTo(1); assertThat(exceptionHolder).isEmpty(); assertThat(completed).isTrue(); verify(productDao, times(3)).upsertProduct(any()); }
RxStreamObserver implements StreamObserver<T> { @Override public void onError(Throwable t) { observer.onError(t); } RxStreamObserver(Observer<T> observer); @Override void onNext(T value); @Override void onError(Throwable t); @Override void onCompleted(); }
@Test public void clientStreaming_error_break_flow() throws Exception { PublishSubject<Integer> publishSubject = PublishSubject.create(); Set<Integer> resultsHolder = Sets.newConcurrentHashSet(); Set<Throwable> exceptionsHolder = Sets.newConcurrentHashSet(); AtomicBoolean complete = new AtomicBoolean(false); Disposable disposable = publishSubject .doOnNext(num->resultsHolder.add(num)) .doOnError(t->exceptionsHolder.add(t)) .doOnComplete(()->complete.compareAndSet(false,true)) .subscribe(); assertThat(disposable.isDisposed()).isFalse(); RxStreamObserver<Integer> rxStreamObserver = new RxStreamObserver<>(publishSubject); rxStreamObserver.onError(new IllegalStateException()); assertThat(disposable.isDisposed()).isTrue(); assertThat(resultsHolder).isEmpty(); assertThat(exceptionsHolder.size()).isEqualTo(1); assertThat(exceptionsHolder.iterator().next()).isInstanceOf(IllegalStateException.class); assertThat(complete).isFalse(); }
EchoService extends EchoServiceGrpc.EchoServiceImplBase { @Override public void echo(EchoRequest request, StreamObserver<EchoResponse> responseObserver) { responseObserver.onNext(EchoResponse.newBuilder().setPong(request.getPing()).build()); responseObserver.onCompleted(); } @Override void echo(EchoRequest request, StreamObserver<EchoResponse> responseObserver); }
@Test public void echo() throws Exception { EchoRequest echoRequest = EchoRequest.newBuilder().setPing(faker.hacker().verb()).build(); EchoResponse echoResponse = stub.echo(echoRequest); assertThat(echoResponse.getPong()).isEqualTo(echoRequest.getPing()); }
ProductDao { public boolean initIndexIfNotExists() throws IOException { final IndicesExistsResponse existsResponse = esClient.admin().indices().prepareExists(INDEX).get(); if (existsResponse.isExists()) { return false; } final String settings = Resources.toString( getClass().getResource("/elasticsearch/product_settings.json"), Charset.defaultCharset() ); CreateIndexRequestBuilder createIndexRequestBuilder = esClient .admin() .indices() .prepareCreate(INDEX) .setSettings(settings); final String mapping = Resources.toString( getClass().getResource("/elasticsearch/product_mappings.json"), Charset.defaultCharset() ); createIndexRequestBuilder = createIndexRequestBuilder.addMapping(TYPE, mapping); return createIndexRequestBuilder.get().isShardsAcked(); } void upsertProduct(Product product); boolean initIndexIfNotExists(); SearchProductsResponse searchProducts(SearchProductsRequest request); void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject); void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream); static final String INDEX; static final String TYPE; static final TimeValue DEFAULT_SCROLL_TIME_VALUE; }
@Test public void initIndexIfNotExists() throws Exception { final IndicesExistsResponse existsResponse = esClient.admin().indices().prepareExists(INDEX).get(); assertThat(existsResponse.isExists()).isTrue(); }
Metric { abstract Class service(); static Builder builder(); }
@Test public void service() throws Exception { assertThat(metric.service()).isAssignableFrom(MetricTest.class); }
ProductDao { public void upsertProduct(Product product) throws InvalidProtocolBufferException { log.debug("save product into ES"); final UpdateRequestBuilder updateRequestBuilder = esClient .prepareUpdate(INDEX, TYPE, String.valueOf(product.getProductId())) .setDoc( jsonPrinter .includingDefaultValueFields() .omittingInsignificantWhitespace() .print(product) ) .setDocAsUpsert(true); updateRequestBuilder.get(); } void upsertProduct(Product product); boolean initIndexIfNotExists(); SearchProductsResponse searchProducts(SearchProductsRequest request); void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject); void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream); static final String INDEX; static final String TYPE; static final TimeValue DEFAULT_SCROLL_TIME_VALUE; }
@Test public void upsertProduct() throws Exception { Product product = Product.newBuilder() .setProductId(faker.number().randomNumber()) .setProductName(faker.company().name()) .setProductPrice(faker.number().randomDouble(2, 10, 100)) .setProductStatus(ProductStatus.InStock) .build(); productDao.upsertProduct(product); esClient.admin().indices().flush(Requests.flushRequest(INDEX)).actionGet(); GetResponse getResponse = esClient.prepareGet(INDEX, TYPE, String.valueOf(product.getProductId())).get(); JsonFormat.Parser jsonParser = injector.getInstance(JsonFormat.Parser.class); Product.Builder builder = Product.newBuilder(); jsonParser.merge(getResponse.getSourceAsString(), builder); assertThat(builder.build()).isEqualTo(product); }
ProductDao { public SearchProductsResponse searchProducts(SearchProductsRequest request) throws InvalidProtocolBufferException { QueryBuilder queryBuilder = QueryBuilders.boolQuery() .must(QueryBuilders.matchQuery("productName", request.getKeyWord())); SearchResponse response = esClient.prepareSearch(INDEX) .setTypes(TYPE) .setQuery(queryBuilder) .setSize(request.getLimit()) .execute() .actionGet(); SearchHits hits = response.getHits(); SearchProductsResponse.Builder responseBuilder = SearchProductsResponse.newBuilder(); for (SearchHit hit : hits) { Product.Builder builder = Product.newBuilder(); jsonParser.merge(hit.getSourceAsString(), builder); responseBuilder.addProducts(builder.build()); } return responseBuilder.build(); } void upsertProduct(Product product); boolean initIndexIfNotExists(); SearchProductsResponse searchProducts(SearchProductsRequest request); void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject); void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream); static final String INDEX; static final String TYPE; static final TimeValue DEFAULT_SCROLL_TIME_VALUE; }
@Test public void searchProducts() throws Exception { Product product1 = Product.newBuilder() .setProductId(faker.number().randomNumber()) .setProductName("apple guice") .setProductPrice(faker.number().randomDouble(2, 10, 100)) .setProductStatus(ProductStatus.InStock) .build(); Product product2 = Product.newBuilder() .setProductId(faker.number().randomNumber()) .setProductName("cheese cake") .setProductPrice(faker.number().randomDouble(2, 10, 100)) .setProductStatus(ProductStatus.InStock) .build(); productDao.upsertProduct(product1); productDao.upsertProduct(product2); esClient.admin().indices().flush(Requests.flushRequest(INDEX)).actionGet(); SearchProductsResponse response = productDao.searchProducts( SearchProductsRequest .newBuilder() .setKeyWord("apple") .setLimit(5) .build() ); assertThat(response.getProductsList()).containsOnly(product1); }
ProductDao { public void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject) { QueryBuilder queryBuilder = QueryBuilders.termQuery("category", request.getCategory()); SearchResponse scrollResponse = esClient .prepareSearch(INDEX) .setScroll(DEFAULT_SCROLL_TIME_VALUE) .setTypes(TYPE) .setQuery(queryBuilder) .setSize(SCROLL_SIZE) .get(); do { scrollResponse.getHits().forEach(hit -> { try { Product.Builder builder = Product.newBuilder(); jsonParser.merge(hit.sourceAsString(), builder); productPublishSubject.onNext(builder.build()); } catch (IOException ioe) { log.error("Unable to read product record", ioe); productPublishSubject.onError(ioe); throw new IllegalStateException(ioe); } }); scrollResponse = esClient .prepareSearchScroll(scrollResponse.getScrollId()) .setScroll(DEFAULT_SCROLL_TIME_VALUE) .execute() .actionGet(); } while (scrollResponse.getHits().getHits().length != 0); productPublishSubject.onComplete(); } void upsertProduct(Product product); boolean initIndexIfNotExists(); SearchProductsResponse searchProducts(SearchProductsRequest request); void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject); void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream); static final String INDEX; static final String TYPE; static final TimeValue DEFAULT_SCROLL_TIME_VALUE; }
@Test public void downloadProducts() throws Exception { String category = faker.numerify("category-##"); List<Product> sampleProducts = IntStream.range(1, 5).mapToObj(index -> { Product product = createProduct(category); try { productDao.upsertProduct(product); } catch (InvalidProtocolBufferException e) { log.error(" error on creating sample product for test downloadProducts", e); } return product; }).collect(Collectors.toList()); esClient.admin().indices().flush(Requests.flushRequest(INDEX)).actionGet(); PublishSubject<Product> productPublishSubject = PublishSubject.create(); List<Product> downloadedProducts = Lists.newArrayList(); Disposable disposable = productPublishSubject .doOnNext(product -> downloadedProducts.add(product)) .doOnError(t -> fail("should not failed", t)) .doOnComplete(() -> { Product[] downloadedProductArray = sampleProducts.toArray(new Product[]{}); assertThat(downloadedProducts).containsOnly(downloadedProductArray); }) .subscribe(); productDao.downloadProducts( DownloadProductsRequest.newBuilder() .setCategory(category) .build(), productPublishSubject ); disposable.dispose(); }
ProductDao { public void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream) { downloadStream.onNext( CalculateProductScoreResponse .newBuilder() .setProduct(product) .setScore((long) product.getProductPrice()) .build() ); } void upsertProduct(Product product); boolean initIndexIfNotExists(); SearchProductsResponse searchProducts(SearchProductsRequest request); void downloadProducts(DownloadProductsRequest request, PublishSubject<Product> productPublishSubject); void calculateProductScore(Product product, PublishSubject<CalculateProductScoreResponse> downloadStream); static final String INDEX; static final String TYPE; static final TimeValue DEFAULT_SCROLL_TIME_VALUE; }
@Test public void calculateProductScore() throws Exception { PublishSubject<CalculateProductScoreResponse> publishSubject = PublishSubject.create(); List<CalculateProductScoreResponse> responses = Lists.newArrayList(); publishSubject .doOnNext(response -> responses.add(response)) .subscribe(); Product product = createProduct("category"); productDao.calculateProductScore(product, publishSubject); assertThat(responses.size()).isEqualTo(1); publishSubject.onComplete(); }
Metric { abstract Optional<String> action(); static Builder builder(); }
@Test public void action() throws Exception { assertThat(metric.action().get()).isEqualTo("test"); }
Metric { abstract Optional<List<String>> labels(); static Builder builder(); }
@Test public void labels() throws Exception { assertThat(metric.labels()).isNotEmpty(); assertThat(metric.labels().get()).contains("labela", "labelb"); }
Metric { public static Builder builder() { return new AutoValue_Metric.Builder(); } static Builder builder(); }
@Test(expected = IllegalStateException.class) public void mandatory_field() throws Exception { metric = Metric .builder() .build(); fail("should failed as mandatory field is missing"); }
CounterFactory { public static Counter create(final Metric metric) { return cachedCounter.getUnchecked(metric); } static Counter create(final Metric metric); }
@Test public void create() throws Exception { Counter counter1 = CounterFactory.create( Metric.builder() .setService(CounterFactoryTest.class) .setAction("test") .build() ); assertThat(counter1).isNotNull(); Counter counter2 = CounterFactory.create( Metric.builder() .setService(CounterFactoryTest.class) .setAction("test") .build() ); assertThat(counter1).isSameAs(counter2); }
ConfigurationProvider implements Provider<Configuration> { @Override public Configuration get() { Configurations configs = new Configurations(); try { return configs.properties(new File(getPropertyFilePath())); } catch (ConfigurationException e) { log.error(" error on build configuration", e); throw new IllegalStateException(e); } } ConfigurationProvider(); @Override Configuration get(); String getPropertyFilePath(); }
@Test public void get() throws Exception { Configuration configuration = configurationProvider.get(); assertThat(configuration.getString("key1")).isEqualTo("value1"); } @Test(expected = IllegalStateException.class) public void properties_file_not_found() throws Exception { System.setProperty(Constants.CONFIG_PROP_FILE_PATH, "xxx"); configurationProvider = new ConfigurationProvider(); configurationProvider.get(); fail("should meet IllegalStateException"); }
ConfigurationProvider implements Provider<Configuration> { public String getPropertyFilePath() { return propertyFilePath; } ConfigurationProvider(); @Override Configuration get(); String getPropertyFilePath(); }
@Test public void getPropertyFilePath() throws Exception { assertThat(configurationProvider.getPropertyFilePath()).isEqualTo(path); }
ElasticSearchModule extends AbstractModule { @Override protected void configure() { bind(Configuration.class).toProvider(ConfigurationProvider.class).in(Singleton.class); bind(TransportClient.class).toProvider(TransportClientProvider.class).in(Singleton.class); bind(JsonFormat.Printer.class).toInstance(JsonFormat.printer()); bind(JsonFormat.Parser.class).toInstance(JsonFormat.parser()); } }
@Test public void testConfigure() throws Exception { Injector injector = Guice.createInjector(new ElasticSearchModule()); Configuration configuration = injector.getInstance(Configuration.class); assertThat(configuration.getString("key1")).isEqualTo("value1"); }
LoginViewModel extends BaseViewModel<LoginNavigator> { public void login(String email, String password) { setIsLoading(true); getCompositeDisposable().add(getDataManager() .doServerLoginApiCall(new LoginRequest.ServerLoginRequest(email, password)) .doOnSuccess(response -> getDataManager() .updateUserInfo( response.getAccessToken(), response.getUserId(), DataManager.LoggedInMode.LOGGED_IN_MODE_SERVER, response.getUserName(), response.getUserEmail(), response.getGoogleProfilePicUrl())) .subscribeOn(getSchedulerProvider().io()) .observeOn(getSchedulerProvider().ui()) .subscribe(response -> { setIsLoading(false); getNavigator().openMainActivity(); }, throwable -> { setIsLoading(false); getNavigator().handleError(throwable); })); } LoginViewModel(DataManager dataManager, SchedulerProvider schedulerProvider); boolean isEmailAndPasswordValid(String email, String password); void login(String email, String password); void onFbLoginClick(); void onGoogleLoginClick(); void onServerLoginClick(); }
@Test public void testServerLoginSuccess() { String email = "[email protected]"; String password = "password"; LoginResponse loginResponse = new LoginResponse(); doReturn(Single.just(loginResponse)) .when(mMockDataManager) .doServerLoginApiCall(new LoginRequest.ServerLoginRequest(email, password)); mLoginViewModel.login(email, password); mTestScheduler.triggerActions(); verify(mLoginCallback).openMainActivity(); }
RAMLtoSwagger implements Constants { @SuppressWarnings("WeakerAccess, unused") public String convertToSwagger(String raml) { return convertToSwagger(raml, null); } RAMLtoSwagger(); @SuppressWarnings("WeakerAccess, unused") String convertToSwagger(String raml); @SuppressWarnings("WeakerAccess, unused") String convertToSwagger(String raml, ResourceLoader resourceLoader); @SuppressWarnings("WeakerAccess, unused") String convertToSwagger(InputStream input); @SuppressWarnings("WeakerAccess, unused") String convertToSwagger(InputStream input, ResourceLoader resourceLoader); }
@Test @SuppressWarnings("unchecked") public void convertToSwaggerValidJson() throws Exception { String raml = IOUtils.resourceToString("/product-api.raml", Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; Map<String, Object> json = objectMapper.readValue(swagger, typeRef); Assert.assertNotNull(json); } @Test @SuppressWarnings("unchecked") public void convertToSwaggerFromString() throws Exception { String raml = IOUtils.resourceToString("/product-api.raml", Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; Map<String, Object> json = objectMapper.readValue(swagger, typeRef); Assert.assertNotNull(json); Assert.assertEquals("Swagger version should be 2.0", json.get("swagger").toString(), "2.0"); Assert.assertTrue("Info section should be a map", json.get("info") instanceof Map); Assert.assertEquals("Info section should have three keys", 3, ((Map<String, Object>)json.get("info")).size()); Assert.assertTrue("Schemes section should be a list", json.get("schemes") instanceof List); Assert.assertEquals("Info section should have two values", 2, ((List<String>)json.get("schemes")).size()); Assert.assertTrue("Paths section should be a map", json.get("paths") instanceof Map); Assert.assertEquals("Paths section should have 62 keys", 62, ((Map<String, Object>)json.get("paths")).size()); } @Test @SuppressWarnings("unchecked") public void checkQueryParameters() throws Exception { String raml = IOUtils.resourceToString("/product-api.raml", Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; Map<String, Object> json = objectMapper.readValue(swagger, typeRef); Assert.assertNotNull(json); Map<String, Object> paths = (Map<String, Object>)json.get("paths"); Map<String, Object> path = (Map<String, Object>)paths.get("/{version}/cache/clear"); Map<String, Object> method = (Map<String, Object>)path.get("get"); List<Map<String, Object>> parameters = (List<Map<String, Object>>)method.get("parameters"); Assert.assertEquals("There should be 4 parameters", 4, parameters.size()); Assert.assertEquals("Parameter type should be 'query'", "query", parameters.get(0).get("in")); } @Test @SuppressWarnings("unchecked") public void checkPathParameters() throws Exception { String raml = IOUtils.resourceToString("/product-api.raml", Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; Map<String, Object> json = objectMapper.readValue(swagger, typeRef); Assert.assertNotNull(json); Map<String, Object> paths = (Map<String, Object>)json.get("paths"); Map<String, Object> path = (Map<String, Object>)paths.get("/{version}/used-families/{familyName}/{modelName}/{type}"); Map<String, Object> method = (Map<String, Object>)path.get("get"); List<Map<String, Object>> parameters = (List<Map<String, Object>>)method.get("parameters"); Assert.assertEquals("There should be 3 parameters", 3, parameters.size()); Assert.assertEquals("Parameter type should be 'query'", "path", parameters.get(0).get("in")); } @Test @SuppressWarnings("unchecked") public void checkCorrectParametersResolution() throws Exception { String raml = IOUtils.resourceToString("/product-api.raml", Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); TypeReference<HashMap<String, Object>> typeRef = new TypeReference<HashMap<String, Object>>() {}; Map<String, Object> json = objectMapper.readValue(swagger, typeRef); Assert.assertNotNull(json); Map<String, Object> paths = (Map<String, Object>)json.get("paths"); Map<String, Object> path = (Map<String, Object>)paths.get("/{version}/models/{externalId}/derivativeFeatureCombos/{featureList}"); Map<String, Object> method = (Map<String, Object>)path.get("get"); List<Map<String, Object>> parameters = (List<Map<String, Object>>)method.get("parameters"); Assert.assertEquals("There should be 2 parameters", 2, parameters.size()); } @Test public void convertToSwaggerFromInputStream() throws Exception { String raml = IOUtils.resourceToString(PRODUCT_API_RAML, Charset.forName("UTF-8")); Assert.assertNotNull(raml); String swagger = raml2Swagger.convertToSwagger(raml); Assert.assertNotNull(swagger); String swagger2 = raml2Swagger.convertToSwagger(getClass().getResourceAsStream(PRODUCT_API_RAML)); Assert.assertNotNull(swagger2); Assert.assertEquals("The two swaggers output should be the same", swagger, swagger2); }
HoneycombDOMRpcService implements DOMRpcService { @Nonnull @Override public FluentFuture<DOMRpcResult> invokeRpc(@Nonnull final SchemaPath schemaPath, @Nullable final NormalizedNode<?, ?> normalizedNode) { DataObject input = null; if (normalizedNode != null) { final SchemaPath nodePatch = schemaPath.createChild(normalizedNode.getNodeType()); input = serializer.fromNormalizedNodeRpcData(nodePatch, (ContainerNode) normalizedNode); } final CompletableFuture<DataObject> result = rpcRegistry.invoke(schemaPath, input).toCompletableFuture(); final ListenableFuture<DOMRpcResult> output = getDOMRpcResult(toListenableFuture(result)); return FluentFuture.from(output); } HoneycombDOMRpcService(@Nonnull final BindingNormalizedNodeSerializer serializer, @Nonnull final RpcRegistry rpcRegistry); @Nonnull @Override FluentFuture<DOMRpcResult> invokeRpc(@Nonnull final SchemaPath schemaPath, @Nullable final NormalizedNode<?, ?> normalizedNode); @Nonnull @Override ListenerRegistration<T> registerRpcListener(@Nonnull final T t); }
@Test public void testInvokeRpc() throws Exception { final ContainerNode outputBi = mock(ContainerNode.class); final DataObject outputBa = mock(DataObject.class); when(serializer.toNormalizedNodeRpcData(ArgumentMatchers.any())).thenReturn(outputBi); when(registry.invoke(path, input)).thenReturn(CompletableFuture.completedFuture(outputBa)); assertEquals(outputBi, service.invokeRpc(path, node).get().getResult()); } @Test public void testInvokeRpcNoResult() throws Exception { final DataObject outputBa = null; final ContainerNode outputBi = null; when(registry.invoke(path, input)).thenReturn(CompletableFuture.completedFuture(outputBa)); assertEquals(outputBi, service.invokeRpc(path, node).get().getResult()); } @Test(expected = ExecutionException.class) public void testInvokeRpcFailed() throws Exception { final CompletableFuture future = new CompletableFuture(); future.completeExceptionally(new RuntimeException()); when(registry.invoke(path, input)).thenReturn(future); service.invokeRpc(path, node).get(); }
HoneycombNotificationCollector implements NotificationCollector, AutoCloseable { @Override @Nonnull public Collection<Class<? extends Notification>> getNotificationTypes() { return notificationProducerRegistry.getNotificationTypes(); } HoneycombNotificationCollector( @Nonnull final NotificationPublishService bindingDOMNotificationPublishServiceAdapter, @Nonnull final NotificationProducerRegistry notificationProducerRegistry); @Override void close(); @Override void onNotification(@Nonnull final Notification notification); @Override @Nonnull Collection<Class<? extends Notification>> getNotificationTypes(); }
@Test public void testNotificationTypes() throws Exception { final HoneycombNotificationCollector honeycombNotificationCollector = new HoneycombNotificationCollector(notificationService, notificationRegistry); honeycombNotificationCollector.getNotificationTypes(); verify(producer, atLeast(1)).getNotificationTypes(); }
HoneycombNotificationCollector implements NotificationCollector, AutoCloseable { @Override public void onNotification(@Nonnull final Notification notification) { LOG.debug("Notification: {} pushed into collector", notification.getClass().getSimpleName()); LOG.trace("Notification: {} pushed into collector", notification); try { bindingDOMNotificationPublishServiceAdapter.putNotification(notification); } catch (InterruptedException e) { LOG.warn("Interrupted", e); Thread.currentThread().interrupt(); } } HoneycombNotificationCollector( @Nonnull final NotificationPublishService bindingDOMNotificationPublishServiceAdapter, @Nonnull final NotificationProducerRegistry notificationProducerRegistry); @Override void close(); @Override void onNotification(@Nonnull final Notification notification); @Override @Nonnull Collection<Class<? extends Notification>> getNotificationTypes(); }
@Test public void testCollect() throws Exception { final HoneycombNotificationCollector honeycombNotificationCollector = new HoneycombNotificationCollector(notificationService, notificationRegistry); final NetconfSessionStart notif = new NetconfSessionStartBuilder().build(); honeycombNotificationCollector.onNotification(notif); verify(notificationService).putNotification(notif); }
NotificationProducerRegistry { Set<Class<? extends Notification>> getNotificationTypes() { return notificationTypes; } NotificationProducerRegistry(final List<ManagedNotificationProducer> notificationProducersDependency); static QName getQName(final Class<? extends Notification> aClass); }
@Test public void testNotificationTypes() throws Exception { final NotificationProducerRegistry notificationRegistry = new NotificationProducerRegistry(Lists.newArrayList(producer, producer2)); final Set<Class<? extends Notification>> notificationTypes = notificationRegistry.getNotificationTypes(); Assert.assertThat(notificationTypes, hasItem(NetconfSessionEnd.class)); Assert.assertThat(notificationTypes, hasItem(NetconfSessionStart.class)); Assert.assertThat(notificationTypes, hasItem(NetconfCapabilityChange.class)); }
PersistingDataTreeAdapter implements DataTree { @Override public DataTreeSnapshot takeSnapshot() { return delegateDependency.takeSnapshot(); } PersistingDataTreeAdapter(@Nonnull final DataTree delegate, @Nonnull final DOMSchemaService schemaService, @Nonnull final Path persistPath); PersistingDataTreeAdapter(final DataTree delegate, final JsonPersister persister); @Override DataTreeSnapshot takeSnapshot(); @Override void setSchemaContext(final SchemaContext schemaContext); @Override void commit(final DataTreeCandidate dataTreeCandidate); @Override YangInstanceIdentifier getRootPath(); @Override void validate(final DataTreeModification dataTreeModification); @Override DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification); }
@Test public void testTakeSnapshot() throws Exception { persistingDataTreeAdapter.takeSnapshot(); verify(delegatingDataTree).takeSnapshot(); }
PersistingDataTreeAdapter implements DataTree { @Override public void setSchemaContext(final SchemaContext schemaContext) { delegateDependency.setSchemaContext(schemaContext); } PersistingDataTreeAdapter(@Nonnull final DataTree delegate, @Nonnull final DOMSchemaService schemaService, @Nonnull final Path persistPath); PersistingDataTreeAdapter(final DataTree delegate, final JsonPersister persister); @Override DataTreeSnapshot takeSnapshot(); @Override void setSchemaContext(final SchemaContext schemaContext); @Override void commit(final DataTreeCandidate dataTreeCandidate); @Override YangInstanceIdentifier getRootPath(); @Override void validate(final DataTreeModification dataTreeModification); @Override DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification); }
@Test public void testSetSchema() throws Exception { persistingDataTreeAdapter.setSchemaContext(null); verify(delegatingDataTree).setSchemaContext(null); }
PersistingDataTreeAdapter implements DataTree { @Override public void validate(final DataTreeModification dataTreeModification) throws DataValidationFailedException { delegateDependency.validate(dataTreeModification); } PersistingDataTreeAdapter(@Nonnull final DataTree delegate, @Nonnull final DOMSchemaService schemaService, @Nonnull final Path persistPath); PersistingDataTreeAdapter(final DataTree delegate, final JsonPersister persister); @Override DataTreeSnapshot takeSnapshot(); @Override void setSchemaContext(final SchemaContext schemaContext); @Override void commit(final DataTreeCandidate dataTreeCandidate); @Override YangInstanceIdentifier getRootPath(); @Override void validate(final DataTreeModification dataTreeModification); @Override DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification); }
@Test public void testValidate() throws Exception { persistingDataTreeAdapter.validate(null); verify(delegatingDataTree).validate(null); }
PersistingDataTreeAdapter implements DataTree { @Override public DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification) { return delegateDependency.prepare(dataTreeModification); } PersistingDataTreeAdapter(@Nonnull final DataTree delegate, @Nonnull final DOMSchemaService schemaService, @Nonnull final Path persistPath); PersistingDataTreeAdapter(final DataTree delegate, final JsonPersister persister); @Override DataTreeSnapshot takeSnapshot(); @Override void setSchemaContext(final SchemaContext schemaContext); @Override void commit(final DataTreeCandidate dataTreeCandidate); @Override YangInstanceIdentifier getRootPath(); @Override void validate(final DataTreeModification dataTreeModification); @Override DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification); }
@Test public void testPrepare() throws Exception { persistingDataTreeAdapter.prepare(null); verify(delegatingDataTree).prepare(null); }
PersistingDataTreeAdapter implements DataTree { @Override public YangInstanceIdentifier getRootPath() { return delegateDependency.getRootPath(); } PersistingDataTreeAdapter(@Nonnull final DataTree delegate, @Nonnull final DOMSchemaService schemaService, @Nonnull final Path persistPath); PersistingDataTreeAdapter(final DataTree delegate, final JsonPersister persister); @Override DataTreeSnapshot takeSnapshot(); @Override void setSchemaContext(final SchemaContext schemaContext); @Override void commit(final DataTreeCandidate dataTreeCandidate); @Override YangInstanceIdentifier getRootPath(); @Override void validate(final DataTreeModification dataTreeModification); @Override DataTreeCandidateTip prepare( final DataTreeModification dataTreeModification); }
@Test public void testGetRootPath() throws Exception { persistingDataTreeAdapter.getRootPath(); verify(delegatingDataTree).getRootPath(); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public boolean cancel() { delegateReadTx.close(); return delegateWriteTx.cancel(); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testCancel() { readWriteTx.cancel(); verify(writeTx).cancel(); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data) { delegateWriteTx.put(store, path, data); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testPut() { readWriteTx.put(store, path, data); verify(writeTx).put(store, path, data); }
RestoringInitializer implements DataTreeInitializer { @Override public void initialize() throws InitializeException { LOG.debug("Starting restoration of {} from {} using {}", dataTree, path, restorationType); if (!Files.exists(path)) { LOG.debug("Persist file {} does not exist. Skipping restoration", path); return; } try { final ContainerNode containerNode = jsonReader.readData(schemaService.getGlobalContext(), path); final DOMDataTreeWriteTransaction domDataWriteTransaction = dataTree.newWriteOnlyTransaction(); for (DataContainerChild<? extends YangInstanceIdentifier.PathArgument, ?> dataContainerChild : containerNode .getValue()) { final YangInstanceIdentifier iid = YangInstanceIdentifier.create(dataContainerChild.getIdentifier()); LOG.trace("Restoring {} from {}", iid, path); switch (restorationType) { case Merge: domDataWriteTransaction.merge(datastoreType, iid, dataContainerChild); break; case Put: domDataWriteTransaction.put(datastoreType, iid, dataContainerChild); break; default: throw new InitializeException( "Unable to initialize data using " + restorationType + " restoration strategy. Unsupported"); } } domDataWriteTransaction.commit().get(); LOG.debug("Data from {} restored successfully", path); } catch (IOException | InterruptedException | ExecutionException ex) { throw new InitializeException("Unable to restore data from " + path, ex); } } RestoringInitializer(@Nonnull final DOMSchemaService schemaService, @Nonnull final Path path, @Nonnull final DOMDataBroker dataTree, @Nonnull final RestorationType restorationType, @Nonnull final LogicalDatastoreType datastoreType, @Nonnull final JsonReader jsonReader); RestoringInitializer(@Nonnull final DOMSchemaService schemaService, @Nonnull final Path path, @Nonnull final DOMDataBroker dataTree, @Nonnull final RestorationType restorationType, @Nonnull final LogicalDatastoreType datastoreType); @Override void initialize(); }
@Test public void testMergeConfig() throws Exception { final RestoringInitializer init = new RestoringInitializer(schemaService, path, dataTree, RestoringInitializer.RestorationType.Merge, LogicalDatastoreType.CONFIGURATION, jsonReader); init.initialize(); verify(writeTx).merge(LogicalDatastoreType.CONFIGURATION, YangInstanceIdentifier.create(nodeId), data); } @Test public void testNoRestore() throws Exception { Files.delete(path); final RestoringInitializer init = new RestoringInitializer(schemaService, path, dataTree, RestoringInitializer.RestorationType.Merge, LogicalDatastoreType.CONFIGURATION, jsonReader); init.initialize(); verifyZeroInteractions(writeTx); } @Test(expected = DataTreeInitializer.InitializeException.class) public void testFail() throws Exception { when(jsonReader.readData(schemaContext, path)).thenThrow(new IOException("t")); final RestoringInitializer init = new RestoringInitializer(schemaService, path, dataTree, RestoringInitializer.RestorationType.Merge, LogicalDatastoreType.CONFIGURATION, jsonReader); init.initialize(); } @Test public void testPutOper() throws Exception { final RestoringInitializer init = new RestoringInitializer(schemaService, path, dataTree, RestoringInitializer.RestorationType.Put, LogicalDatastoreType.OPERATIONAL, jsonReader); init.initialize(); verify(schemaService).getGlobalContext(); verify(jsonReader).readData(schemaContext, path); verify(dataTree).newWriteOnlyTransaction(); verify(writeTx).put(LogicalDatastoreType.OPERATIONAL, YangInstanceIdentifier.create(nodeId), data); verify(writeTx).commit(); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data) { delegateWriteTx.merge(store, path, data); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testMerge() { readWriteTx.merge(store, path, data); verify(writeTx).merge(store, path, data); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path) { delegateWriteTx.delete(store, path); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testDelete() { readWriteTx.delete(store, path); verify(writeTx).delete(store, path); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public FluentFuture<? extends CommitInfo> commit() { return delegateWriteTx.commit(); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testCommit() { readWriteTx.commit(); verify(writeTx).commit(); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path) { return delegateReadTx.read(store, path); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testRead() { readWriteTx.read(store, path); verify(readTx).read(store, path); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path) { return delegateReadTx.exists(store, path); } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testExists() { readWriteTx.exists(store, path); verify(readTx).exists(store, path); }
ReadWriteTransaction implements DOMDataTreeReadWriteTransaction, ValidableTransaction { @Override public Object getIdentifier() { return this; } ReadWriteTransaction(@Nonnull final DOMDataTreeReadTransaction delegateReadTx, @Nonnull final ValidableTransaction delegateWriteTx); @Override boolean cancel(); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void delete(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<? extends CommitInfo> commit(); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testGetIdentifier() { assertNotNull(readWriteTx.getIdentifier()); }
ReadOnlyTransaction implements DOMDataTreeReadTransaction { @Override public FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path) { LOG.debug("ReadOnlyTransaction.exists() store={}, path={}", store, path); ListenableFuture<Boolean> listenableFuture = Futures.transform(read(store, path), IS_NODE_PRESENT, MoreExecutors.directExecutor()); return FluentFuture.from(listenableFuture); } private ReadOnlyTransaction(@Nullable final DataModification configData, @Nullable final ReadableDataManager operationalData); @Override synchronized void close(); @Override synchronized FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Nonnull @Override Object getIdentifier(); }
@Test public void testExists() { final YangInstanceIdentifier path = mock(YangInstanceIdentifier.class); final FluentFuture<Optional<NormalizedNode<?, ?>>> future = mock(FluentFuture.class); when(operationalData.read(path)).thenReturn(future); readOnlyTx.exists(LogicalDatastoreType.OPERATIONAL, path); verify(operationalData).read(path); }
ReadOnlyTransaction implements DOMDataTreeReadTransaction { @Nonnull @Override public Object getIdentifier() { return this; } private ReadOnlyTransaction(@Nullable final DataModification configData, @Nullable final ReadableDataManager operationalData); @Override synchronized void close(); @Override synchronized FluentFuture<Optional<NormalizedNode<?, ?>>> read(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Override FluentFuture<Boolean> exists(final LogicalDatastoreType store, final YangInstanceIdentifier path); @Nonnull @Override Object getIdentifier(); }
@Test public void testGetIdentifier() { assertNotNull(readOnlyTx.getIdentifier()); }
ModifiableDataTreeDelegator extends ModifiableDataTreeManager { @Override public DataModification newModification() { return new DelegatingConfigSnapshot(super.newModification()); } ModifiableDataTreeDelegator(@Nonnull final BindingNormalizedNodeSerializer serializer, @Nonnull final DataTree dataTree, @Nonnull final SchemaContext schema, @Nonnull final WriterRegistry writerRegistry, @Nonnull final DataBroker contextBroker); @Override DataModification newModification(); }
@Test public void testRead() throws Exception { final ContainerNode topContainer = getTopContainer("topContainer"); addNodeToTree(dataTree, topContainer, TOP_CONTAINER_ID); final FluentFuture<Optional<NormalizedNode<?, ?>>> read = configDataTree.read(TOP_CONTAINER_ID); final FluentFuture<Optional<NormalizedNode<?, ?>>> read2 = configDataTree.newModification().read(TOP_CONTAINER_ID); final Optional<NormalizedNode<?, ?>> normalizedNodeOptional = read.get(); final Optional<NormalizedNode<?, ?>> normalizedNodeOptional2 = read2.get(); assertEquals(normalizedNodeOptional, normalizedNodeOptional2); assertTrue(normalizedNodeOptional.isPresent()); assertEquals(topContainer, normalizedNodeOptional.get()); assertEquals(dataTree.takeSnapshot().readNode(TOP_CONTAINER_ID), normalizedNodeOptional); } @Test public void testValidateTwice() throws Exception { final MapNode nestedList = getNestedList("listEntry", "listValue"); final DataModification dataModification = configDataTree.newModification(); dataModification.write(NESTED_LIST_ID, nestedList); dataModification.validate(); dataModification.validate(); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> map = HashMultimap.create(); map.put(DEFAULT_ID, DataObjectUpdate.create(DEFAULT_ID, null, DEFAULT_DATA_OBJECT)); final WriterRegistry.DataObjectUpdates updates = new WriterRegistry.DataObjectUpdates(map, ImmutableMultimap.of()); verify(writer, times(2)).validateModifications(eq(updates), any(WriteContext.class)); } @Test public void testCommitSuccessful() throws Exception { final MapNode nestedList = getNestedList("listEntry", "listValue"); final DataModification dataModification = configDataTree.newModification(); dataModification.write(NESTED_LIST_ID, nestedList); dataModification.validate(); dataModification.commit(); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> map = HashMultimap.create(); map.put(DEFAULT_ID, DataObjectUpdate.create(DEFAULT_ID, null, DEFAULT_DATA_OBJECT)); verify(writer).processModifications(eq(new WriterRegistry.DataObjectUpdates(map, ImmutableMultimap.of())), any(WriteContext.class)); assertEquals(nestedList, dataTree.takeSnapshot().readNode(NESTED_LIST_ID).get()); }
ModifiableDataTreeDelegator extends ModifiableDataTreeManager { @VisibleForTesting static WriterRegistry.DataObjectUpdates toBindingAware( final WriterRegistry registry, final Map<YangInstanceIdentifier, NormalizedNodeUpdate> biNodes, final BindingNormalizedNodeSerializer serializer) { final Multimap<InstanceIdentifier<?>, DataObjectUpdate> dataObjectUpdates = HashMultimap.create(); final Multimap<InstanceIdentifier<?>, DataObjectUpdate.DataObjectDelete> dataObjectDeletes = HashMultimap.create(); for (Map.Entry<YangInstanceIdentifier, NormalizedNodeUpdate> biEntry : biNodes.entrySet()) { final InstanceIdentifier<?> keyedId = serializer.fromYangInstanceIdentifier(biEntry.getKey()); final InstanceIdentifier<?> unkeyedIid = RWUtils.makeIidWildcarded(keyedId); NormalizedNodeUpdate normalizedNodeUpdate = biEntry.getValue(); final DataObjectUpdate dataObjectUpdate = toDataObjectUpdate(normalizedNodeUpdate, serializer); if (dataObjectUpdate != null) { if (dataObjectUpdate instanceof DataObjectUpdate.DataObjectDelete) { dataObjectDeletes.put(unkeyedIid, (DataObjectUpdate.DataObjectDelete) dataObjectUpdate); } else if (dataObjectUpdate.getDataBefore() != null && !registry.writerSupportsUpdate(unkeyedIid)) { dataObjectDeletes.put(unkeyedIid, (DataObjectUpdate.DataObjectDelete) DataObjectUpdate.DataObjectDelete .create(keyedId, dataObjectUpdate.getDataBefore(), null)); dataObjectUpdates .put(unkeyedIid, DataObjectUpdate.create(keyedId, null, dataObjectUpdate.getDataAfter())); } else { dataObjectUpdates.put(unkeyedIid, dataObjectUpdate); } } } return new WriterRegistry.DataObjectUpdates(dataObjectUpdates, dataObjectDeletes); } ModifiableDataTreeDelegator(@Nonnull final BindingNormalizedNodeSerializer serializer, @Nonnull final DataTree dataTree, @Nonnull final SchemaContext schema, @Nonnull final WriterRegistry writerRegistry, @Nonnull final DataBroker contextBroker); @Override DataModification newModification(); }
@Test public void testToBindingAware() throws Exception { when(serializer.fromNormalizedNode(any(YangInstanceIdentifier.class), eq(null))).thenReturn(null); when(writer.writerSupportsUpdate(any())).thenReturn(true); final Map<YangInstanceIdentifier, NormalizedNodeUpdate> biNodes = new HashMap<>(); final QName nn1 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid1 = mockYid(nn1); final InstanceIdentifier iid1 = mockIid(yid1, DataObject1.class); final NormalizedNode nn1B = mockNormalizedNode(nn1); final DataObject1 do1B = mockDataObject(yid1, iid1, nn1B, DataObject1.class); biNodes.put(yid1, NormalizedNodeUpdate.create(yid1, nn1B, null)); final QName nn2 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid2 = mockYid(nn2); final InstanceIdentifier iid2 = mockIid(yid2, DataObject2.class); final NormalizedNode nn2A = mockNormalizedNode(nn2); final DataObject2 do2A = mockDataObject(yid2, iid2, nn2A, DataObject2.class); biNodes.put(yid2, NormalizedNodeUpdate.create(yid2, null, nn2A)); final QName nn3 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid3 = mockYid(nn3); final InstanceIdentifier iid3 = mockIid(yid3, DataObject3.class); final NormalizedNode nn3B = mockNormalizedNode(nn3); final DataObject3 do3B = mockDataObject(yid3, iid3, nn3B, DataObject3.class); final NormalizedNode nn3A = mockNormalizedNode(nn3); final DataObject3 do3A = mockDataObject(yid3, iid3, nn3A, DataObject3.class); biNodes.put(yid3, NormalizedNodeUpdate.create(yid3, nn3B, nn3A)); final WriterRegistry.DataObjectUpdates dataObjectUpdates = ModifiableDataTreeDelegator.toBindingAware(writer, biNodes, serializer); assertThat(dataObjectUpdates.getDeletes().size(), is(1)); assertThat(dataObjectUpdates.getDeletes().keySet(), hasItem(((InstanceIdentifier<?>) iid1))); assertThat(dataObjectUpdates.getDeletes().values(), hasItem( ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid1, do1B, null)))); assertThat(dataObjectUpdates.getUpdates().size(), is(2)); assertThat(dataObjectUpdates.getUpdates().keySet(), hasItems((InstanceIdentifier<?>) iid2, (InstanceIdentifier<?>) iid3)); assertThat(dataObjectUpdates.getUpdates().values(), hasItems( DataObjectUpdate.create(iid2, null, do2A), DataObjectUpdate.create(iid3, do3B, do3A))); assertThat(dataObjectUpdates.getTypeIntersection().size(), is(3)); } @Test public void testToBindingAwareUpdateNotSupported() throws Exception { when(serializer.fromNormalizedNode(any(YangInstanceIdentifier.class), eq(null))).thenReturn(null); when(writer.writerSupportsUpdate(any())).thenReturn(false); final Map<YangInstanceIdentifier, NormalizedNodeUpdate> biNodes = new HashMap<>(); final QName nn1 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid1 = mockYid(nn1); final InstanceIdentifier iid1 = mockIid(yid1, DataObject1.class); final NormalizedNode nn1B = mockNormalizedNode(nn1); final DataObject1 do1B = mockDataObject(yid1, iid1, nn1B, DataObject1.class); biNodes.put(yid1, NormalizedNodeUpdate.create(yid1, nn1B, null)); final QName nn2 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid2 = mockYid(nn2); final InstanceIdentifier iid2 = mockIid(yid2, DataObject2.class); final NormalizedNode nn2A = mockNormalizedNode(nn2); final DataObject2 do2A = mockDataObject(yid2, iid2, nn2A, DataObject2.class); biNodes.put(yid2, NormalizedNodeUpdate.create(yid2, null, nn2A)); final QName nn3 = QName.create("namespace", "nn1"); final YangInstanceIdentifier yid3 = mockYid(nn3); final InstanceIdentifier iid3 = mockIid(yid3, DataObject3.class); final NormalizedNode nn3B = mockNormalizedNode(nn3); final DataObject3 do3B = mockDataObject(yid3, iid3, nn3B, DataObject3.class); final NormalizedNode nn3A = mockNormalizedNode(nn3); final DataObject3 do3A = mockDataObject(yid3, iid3, nn3A, DataObject3.class); biNodes.put(yid3, NormalizedNodeUpdate.create(yid3, nn3B, nn3A)); final WriterRegistry.DataObjectUpdates dataObjectUpdates = ModifiableDataTreeDelegator.toBindingAware(writer, biNodes, serializer); assertThat(dataObjectUpdates.getDeletes().size(), is(2)); assertThat(dataObjectUpdates.getDeletes().keySet(), hasItems(((InstanceIdentifier<?>) iid1), (InstanceIdentifier<?>) iid3)); assertThat(dataObjectUpdates.getDeletes().values(), hasItems( ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid1, do1B, null)), ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid3, do3B, null)))); assertThat(dataObjectUpdates.getUpdates().size(), is(2)); assertThat(dataObjectUpdates.getUpdates().keySet(), hasItems((InstanceIdentifier<?>) iid2, (InstanceIdentifier<?>) iid3)); assertThat(dataObjectUpdates.getUpdates().values(), hasItems( DataObjectUpdate.create(iid2, null, do2A), DataObjectUpdate.create(iid3, null, do3A))); assertThat(dataObjectUpdates.getTypeIntersection().size(), is(3)); }
WriteTransaction implements ValidableTransaction { @Override public void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data) { LOG.debug("WriteTransaction.put() store={}, path={}, data={}", store, path, data); checkIsNew(); handleOperation(store, (modification) -> modification.write(path, data)); } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testPut() { writeTx.put(LogicalDatastoreType.CONFIGURATION, path, data); verify(configSnapshot).write(path, data); } @Test(expected = IllegalArgumentException.class) public void testPutOperational() { writeTx.put(LogicalDatastoreType.OPERATIONAL, path, data); verify(configSnapshot).write(path, data); }
WriteTransaction implements ValidableTransaction { @Override public void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data) { LOG.debug("WriteTransaction.merge() store={}, path={}, data={}", store, path, data); checkIsNew(); handleOperation(store, (modification) -> modification.merge(path, data)); } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testMerge() { writeTx.merge(LogicalDatastoreType.CONFIGURATION, path, data); verify(configSnapshot).merge(path, data); }
WriteTransaction implements ValidableTransaction { @Override public boolean cancel() { if (status != TransactionStatus.NEW) { return false; } else { if (configModification != null) { configModification.close(); } status = TransactionStatus.CANCELED; return true; } } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testCancel() { assertTrue(writeTx.cancel()); }
WriteTransaction implements ValidableTransaction { @Override public void delete(LogicalDatastoreType store, final YangInstanceIdentifier path) { LOG.debug("WriteTransaction.delete() store={}, path={}", store, path); checkIsNew(); handleOperation(store, (modification) -> modification.delete(path)); } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testDelete() { writeTx.delete(LogicalDatastoreType.CONFIGURATION, path); verify(configSnapshot).delete(path); }
WriteTransaction implements ValidableTransaction { @Override public @NonNull FluentFuture<? extends CommitInfo> commit() { LOG.trace("WriteTransaction.commit()"); checkIsNew(); try { doCommit(); } catch (Exception e) { status = TransactionStatus.FAILED; LOG.error("Submit failed", e); return FluentFuture .from(Futures.immediateFailedFuture( new TransactionCommitFailedException("Failed to validate DataTreeModification", e))); } return FluentFuture.from(Futures.immediateFuture(null)); } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testSubmit() throws Exception { writeTx.commit(); verify(configSnapshot).commit(); } @Test public void testSubmitFailed() throws Exception { doThrow(mock(ValidationFailedException.class)).when(configSnapshot).commit(); final FluentFuture<? extends CommitInfo> future = writeTx.commit(); try { future.get(); } catch (Exception e) { assertTrue(e.getCause() instanceof TransactionCommitFailedException); return; } fail("Expected exception to be thrown"); } @Test public void testCommit() throws TranslationException { writeTx.commit(); verify(configSnapshot).commit(); }
WriteTransaction implements ValidableTransaction { @Override public Object getIdentifier() { return this; } private WriteTransaction(@Nullable final DataModification configModification, @Nullable final DataModification operationalModification); @Override void put(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override void merge(final LogicalDatastoreType store, final YangInstanceIdentifier path, final NormalizedNode<?, ?> data); @Override boolean cancel(); @Override void delete(LogicalDatastoreType store, final YangInstanceIdentifier path); @Override @NonNull FluentFuture<? extends CommitInfo> commit(); @Override Object getIdentifier(); @Override FluentFuture<Void> validate(); }
@Test public void testGetIdentifier() { assertNotNull(writeTx.getIdentifier()); }
DataBroker implements DOMDataBroker, Closeable { @Override public DOMDataTreeReadWriteTransaction newReadWriteTransaction() { LOG.trace("DataBroker({}).newReadWriteTransaction()", this); return transactionFactory.newReadWriteTransaction(); } DataBroker(final TransactionFactory transactionFactory); @Override DOMDataTreeReadTransaction newReadOnlyTransaction(); @Override DOMDataTreeReadWriteTransaction newReadWriteTransaction(); @Override DOMDataTreeWriteTransaction newWriteOnlyTransaction(); @Override DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener); @Override @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions(); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager configDataTree, @Nonnull final ReadableDataManager operationalDataTree); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager operationalDataTree); @Override void close(); }
@Test public void testNewReadWriteTransaction() { final DOMDataTreeReadWriteTransaction readWriteTx = broker.newReadWriteTransaction(); final YangInstanceIdentifier path = mock(YangInstanceIdentifier.class); readWriteTx.read(LogicalDatastoreType.CONFIGURATION, path); verify(configSnapshot).read(path); verify(confiDataTree).newModification(); }
DataBroker implements DOMDataBroker, Closeable { @Override public DOMDataTreeWriteTransaction newWriteOnlyTransaction() { LOG.trace("DataBroker({}).newWriteTransaction()", this); return transactionFactory.newWriteTransaction(); } DataBroker(final TransactionFactory transactionFactory); @Override DOMDataTreeReadTransaction newReadOnlyTransaction(); @Override DOMDataTreeReadWriteTransaction newReadWriteTransaction(); @Override DOMDataTreeWriteTransaction newWriteOnlyTransaction(); @Override DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener); @Override @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions(); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager configDataTree, @Nonnull final ReadableDataManager operationalDataTree); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager operationalDataTree); @Override void close(); }
@Test public void testNewWriteOnlyTransaction() { broker.newWriteOnlyTransaction(); verify(confiDataTree).newModification(); }
DataBroker implements DOMDataBroker, Closeable { @Override public DOMDataTreeReadTransaction newReadOnlyTransaction() { LOG.trace("DataBroker({}).newReadTransaction()", this); return transactionFactory.newReadTransaction(); } DataBroker(final TransactionFactory transactionFactory); @Override DOMDataTreeReadTransaction newReadOnlyTransaction(); @Override DOMDataTreeReadWriteTransaction newReadWriteTransaction(); @Override DOMDataTreeWriteTransaction newWriteOnlyTransaction(); @Override DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener); @Override @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions(); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager configDataTree, @Nonnull final ReadableDataManager operationalDataTree); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager operationalDataTree); @Override void close(); }
@Test public void testNewReadOnlyTransaction() { final DOMDataTreeReadTransaction readTx = broker.newReadOnlyTransaction(); final YangInstanceIdentifier path = mock(YangInstanceIdentifier.class); readTx.read(LogicalDatastoreType.CONFIGURATION, path); verify(configSnapshot).read(path); }
DataBroker implements DOMDataBroker, Closeable { @Override public DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener) { throw new UnsupportedOperationException("Not supported"); } DataBroker(final TransactionFactory transactionFactory); @Override DOMDataTreeReadTransaction newReadOnlyTransaction(); @Override DOMDataTreeReadWriteTransaction newReadWriteTransaction(); @Override DOMDataTreeWriteTransaction newWriteOnlyTransaction(); @Override DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener); @Override @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions(); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager configDataTree, @Nonnull final ReadableDataManager operationalDataTree); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager operationalDataTree); @Override void close(); }
@Test(expected = UnsupportedOperationException.class) public void testCreateTransactionChain() { final DOMTransactionChainListener listener = mock(DOMTransactionChainListener.class); broker.createTransactionChain(listener); }
DataBroker implements DOMDataBroker, Closeable { @Override public @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions() { return ImmutableClassToInstanceMap.of(DOMDataTransactionValidator.class, tx -> ((ValidableTransaction) tx).validate()); } DataBroker(final TransactionFactory transactionFactory); @Override DOMDataTreeReadTransaction newReadOnlyTransaction(); @Override DOMDataTreeReadWriteTransaction newReadWriteTransaction(); @Override DOMDataTreeWriteTransaction newWriteOnlyTransaction(); @Override DOMTransactionChain createTransactionChain(final DOMTransactionChainListener listener); @Override @NonNull ClassToInstanceMap<DOMDataBrokerExtension> getExtensions(); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager configDataTree, @Nonnull final ReadableDataManager operationalDataTree); @Nonnull static DataBroker create(@Nonnull final ModifiableDataManager operationalDataTree); @Override void close(); }
@Test public void testGetSupportedExtensions() { final @NonNull ClassToInstanceMap<DOMDataBrokerExtension> supportedExtensions = broker.getExtensions(); assertEquals(1, supportedExtensions.size()); assertNotNull(supportedExtensions.get(DOMDataTransactionValidator.class)); }
Reverter { void revert(@Nonnull final WriteContext writeContext) throws RevertFailedException { checkNotNull(writeContext, "Cannot revert changes for null context"); final WriterRegistry.DataObjectUpdates revertedAndMapped = revertAndMapProcessed(revertOrder(toBeReverted)); LOG.info("Attempting revert for changes: {}", revertedAndMapped); try { writerRegistry.processModifications(revertedAndMapped, writeContext); LOG.info("Revert successful"); } catch (UpdateFailedException e) { LOG.error("Revert failed", e); final Set<DataObjectUpdate> nonReverted = revertedAndMapped.getAllModifications(); nonReverted.removeAll(e.getProcessed()); throw new RevertFailedException(e.getFailed(), nonReverted, e); } catch (Exception e) { LOG.error("Revert failed with unexpected error"); throw new RevertFailedException(e); } } Reverter(final List<DataObjectUpdate> toBeReverted, final WriterRegistry writerRegistry); }
@Test public void revertSingle() throws Exception { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); new Reverter(ImmutableList.of(create), registry).revert(writeContext); assertSingleRevert(create); } @Test public void revertSingleFailed() throws TranslationException { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); final UpdateFailedException ex = new UpdateFailedException(new IllegalStateException(), Collections.emptyList(), create); doThrow(ex).when(registry) .processModifications(any(WriterRegistry.DataObjectUpdates.class), any(WriteContext.class)); try { new Reverter(ImmutableList.of(create), registry).revert(writeContext); } catch (Reverter.RevertFailedException e) { assertEquals(ex, e.getCause()); assertSingleRevert(create); return; } fail("Reverter.RevertFailedException was expected"); } @Test public void revertSingleFailedWithUnexpectedEx() throws TranslationException { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); final IllegalStateException ex = new IllegalStateException(); doThrow(ex).when(registry) .processModifications(any(WriterRegistry.DataObjectUpdates.class), any(WriteContext.class)); try { new Reverter(ImmutableList.of(create), registry).revert(writeContext); } catch (Reverter.RevertFailedException e) { assertEquals(ex, e.getCause()); assertSingleRevert(create); return; } fail("IllegalStateException was expected"); } @Test public void revertMultiple() throws Exception { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); final DataObjectUpdate update = DataObjectUpdate.create(IID_1, mock(DataObject1.class), mock(DataObject1.class)); final DataObjectUpdate delete = DataObjectUpdate.create(IID_2, mock(DataObject2.class), null); new Reverter(ImmutableList.of(create, update, delete), registry).revert(writeContext); assertMultiRevert(create, update, delete); } @Test public void revertMultipleFailed() throws Exception { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); final DataObjectUpdate update = DataObjectUpdate.create(IID_1, mock(DataObject1.class), mock(DataObject1.class)); final DataObjectUpdate delete = DataObjectUpdate.create(IID_2, mock(DataObject2.class), null); final UpdateFailedException ex = new UpdateFailedException(new IllegalStateException(), ImmutableList.of(create, update), create); doThrow(ex).when(registry) .processModifications(any(WriterRegistry.DataObjectUpdates.class), any(WriteContext.class)); try { new Reverter(ImmutableList.of(create, update, delete), registry).revert(writeContext); } catch (Reverter.RevertFailedException e) { assertEquals(ex, e.getCause()); assertMultiRevert(create, update, delete); return; } fail("Reverter.RevertFailedException was expected"); } @Test public void revertMultipleFailedWithUnnexpectedException() throws Exception { final DataObjectUpdate create = DataObjectUpdate.create(IID_0, null, mock(DataObject.class)); final DataObjectUpdate update = DataObjectUpdate.create(IID_1, mock(DataObject1.class), mock(DataObject1.class)); final DataObjectUpdate delete = DataObjectUpdate.create(IID_2, mock(DataObject2.class), null); final IllegalStateException ex = new IllegalStateException(); doThrow(ex).when(registry) .processModifications(any(WriterRegistry.DataObjectUpdates.class), any(WriteContext.class)); try { new Reverter(ImmutableList.of(create, update, delete), registry).revert(writeContext); } catch (Reverter.RevertFailedException e) { assertEquals(ex, e.getCause()); assertMultiRevert(create, update, delete); return; } fail("IllegalStateException was expected"); }
FlatWriterRegistryBuilder extends AbstractSubtreeManagerRegistryBuilderBuilder<Writer<? extends DataObject>, WriterRegistry> implements ModifiableWriterRegistryBuilder, WriterRegistryBuilder { @VisibleForTesting @Override protected ImmutableMap<InstanceIdentifier<?>, Writer<? extends DataObject>> getMappedHandlers() { return super.getMappedHandlers(); } FlatWriterRegistryBuilder(@Nonnull final YangDAG yangDAG); @Override WriterRegistry build(); }
@Test public void testRelationsBefore() throws Exception { final FlatWriterRegistryBuilder flatWriterRegistryBuilder = new FlatWriterRegistryBuilder(new YangDAG()); flatWriterRegistryBuilder.add(mockWriter(DataObjects.DataObject3.class)); flatWriterRegistryBuilder.add(mockWriter(DataObjects.DataObject4.class)); flatWriterRegistryBuilder.addBefore(mockWriter(DataObjects.DataObject2.class), Lists.newArrayList(DataObjects.DataObject3.IID, DataObjects.DataObject4.IID)); flatWriterRegistryBuilder.addBefore(mockWriter(DataObjects.DataObject1.class), DataObjects.DataObject2.IID); final ImmutableMap<InstanceIdentifier<?>, Writer<?>> mappedWriters = flatWriterRegistryBuilder.getMappedHandlers(); final ArrayList<InstanceIdentifier<?>> typesInList = Lists.newArrayList(mappedWriters.keySet()); assertEquals(DataObjects.DataObject1.IID, typesInList.get(0)); assertEquals(DataObjects.DataObject2.IID, typesInList.get(1)); assertThat(typesInList.get(2), anyOf(equalTo(DataObjects.DataObject3.IID), equalTo(DataObjects.DataObject4.IID))); assertThat(typesInList.get(3), anyOf(equalTo(DataObjects.DataObject3.IID), equalTo(DataObjects.DataObject4.IID))); } @Test public void testRelationsAfter() throws Exception { final FlatWriterRegistryBuilder flatWriterRegistryBuilder = new FlatWriterRegistryBuilder(new YangDAG()); flatWriterRegistryBuilder.add(mockWriter(DataObjects.DataObject1.class)); flatWriterRegistryBuilder.addAfter(mockWriter(DataObjects.DataObject2.class), DataObjects.DataObject1.IID); flatWriterRegistryBuilder.addAfter(mockWriter(DataObjects.DataObject3.class), DataObjects.DataObject2.IID); flatWriterRegistryBuilder.addAfter(mockWriter(DataObjects.DataObject4.class), Lists.newArrayList(DataObjects.DataObject2.IID, DataObjects.DataObject3.IID)); final ImmutableMap<InstanceIdentifier<?>, Writer<?>> mappedWriters = flatWriterRegistryBuilder.getMappedHandlers(); final List<InstanceIdentifier<?>> typesInList = Lists.newArrayList(mappedWriters.keySet()); assertEquals(DataObjects.DataObject1.IID, typesInList.get(0)); assertEquals(DataObjects.DataObject2.IID, typesInList.get(1)); assertThat(typesInList.get(2), anyOf(equalTo(DataObjects.DataObject3.IID), equalTo(DataObjects.DataObject4.IID))); assertThat(typesInList.get(3), anyOf(equalTo(DataObjects.DataObject3.IID), equalTo(DataObjects.DataObject4.IID))); } @Test public void testAddSubtreeWriter() throws Exception { final FlatWriterRegistryBuilder flatWriterRegistryBuilder = new FlatWriterRegistryBuilder(new YangDAG()); flatWriterRegistryBuilder.subtreeAdd( Sets.newHashSet(DataObjects.DataObject4.DataObject41.IID, DataObjects.DataObject4.DataObject41.IID), mockWriter(DataObjects.DataObject4.class)); final ImmutableMap<InstanceIdentifier<?>, Writer<?>> mappedWriters = flatWriterRegistryBuilder.getMappedHandlers(); final ArrayList<InstanceIdentifier<?>> typesInList = Lists.newArrayList(mappedWriters.keySet()); assertEquals(DataObjects.DataObject4.IID, typesInList.get(0)); assertEquals(1, typesInList.size()); }
ReadableDataTreeDelegator implements ReadableDataManager { private Optional<NormalizedNode<?, ?>> readNode(final YangInstanceIdentifier yangInstanceIdentifier, final ReadContext ctx) throws ReadFailedException { LOG.debug("OperationalDataTree.readNode(), yangInstanceIdentifier={}", yangInstanceIdentifier); final InstanceIdentifier<?> path = serializer.fromYangInstanceIdentifier(yangInstanceIdentifier); checkNotNull(path, "Invalid instance identifier %s. Cannot create BA equivalent.", yangInstanceIdentifier); LOG.debug("OperationalDataTree.readNode(), path={}", path); final Optional<? extends DataObject> dataObject = readerRegistry.read(path, ctx); ctx.getModificationCache().close(); if (dataObject.isPresent()) { final NormalizedNode<?, ?> value = toNormalizedNodeFunction(path).apply(dataObject.get()); return Optional.ofNullable(value); } else { return Optional.empty(); } } ReadableDataTreeDelegator(@Nonnull BindingNormalizedNodeSerializer serializer, @Nonnull final SchemaContext globalContext, @Nonnull final ReaderRegistry readerRegistry, @Nonnull final org.opendaylight.mdsal.binding.api.DataBroker contextBroker); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read( @Nonnull final YangInstanceIdentifier yangInstanceIdentifier); }
@Test public void testReadNode() throws Exception { final YangInstanceIdentifier yangId = mock(YangInstanceIdentifier.class); final YangInstanceIdentifier.PathArgument pArg = mock(YangInstanceIdentifier.PathArgument.class); doReturn(pArg).when(yangId).getLastPathArgument(); doReturn(Collections.singletonList(pArg)).when(yangId).getPathArguments(); doReturn(QName.create("namespace", "2012-12-12", "local")).when(pArg).getNodeType(); doReturn(id).when(serializer).fromYangInstanceIdentifier(yangId); final DataObject dataObject = mock(DataObject.class); doReturn(Optional.of(dataObject)).when(reader).read(same(id), any(ReadContext.class)); when(serializer.toNormalizedNode(id, dataObject)).thenReturn(entry); final DataContainerChild<?, ?> expectedValue = mock(DataContainerChild.class); doReturn(expectedValue).when(entry).getValue(); final FluentFuture<Optional<NormalizedNode<?, ?>>> future = operationalData.read(yangId); verify(serializer).fromYangInstanceIdentifier(yangId); verify(reader).read(same(id), any(ReadContext.class)); final Optional<NormalizedNode<?, ?>> result = future.get(); assertTrue(result.isPresent()); assertEquals(expectedValue, result.get()); }
ReadableDataTreeDelegator implements ReadableDataManager { @Override public FluentFuture<Optional<NormalizedNode<?, ?>>> read( @Nonnull final YangInstanceIdentifier yangInstanceIdentifier) { try (TransactionMappingContext mappingContext = new TransactionMappingContext( contextBroker.newReadWriteTransaction()); ReadContext ctx = new ReadContextImpl(mappingContext)) { final Optional<NormalizedNode<?, ?>> value; if (checkNotNull(yangInstanceIdentifier).equals(YangInstanceIdentifier.EMPTY)) { value = readRoot(ctx); } else { value = readNode(yangInstanceIdentifier, ctx); } final FluentFuture<? extends CommitInfo> contextUpdateResult = ((TransactionMappingContext) ctx.getMappingContext()).commit(); contextUpdateResult.get(); return FluentFutures.immediateFluentFuture(value); } catch (InterruptedException | ExecutionException | ReadFailedException ex) { return FluentFutures.immediateFailedFluentFuture( new org.opendaylight.controller.md.sal.common.api.data.ReadFailedException("Failed to read data", ex)); } } ReadableDataTreeDelegator(@Nonnull BindingNormalizedNodeSerializer serializer, @Nonnull final SchemaContext globalContext, @Nonnull final ReaderRegistry readerRegistry, @Nonnull final org.opendaylight.mdsal.binding.api.DataBroker contextBroker); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read( @Nonnull final YangInstanceIdentifier yangInstanceIdentifier); }
@Test public void testReadNonExistingNode() throws Exception { final YangInstanceIdentifier yangId = mock(YangInstanceIdentifier.class); doReturn(id).when(serializer).fromYangInstanceIdentifier(yangId); doReturn(Optional.empty()).when(reader).read(same(id), any(ReadContext.class)); final FluentFuture<Optional<NormalizedNode<?, ?>>> future = operationalData.read(yangId); verify(serializer).fromYangInstanceIdentifier(yangId); verify(reader).read(same(id), any(ReadContext.class)); final Optional<NormalizedNode<?, ?>> result = future.get(); assertFalse(result.isPresent()); } @Test public void testReadFailed() throws Exception { doThrow(io.fd.honeycomb.translate.read.ReadFailedException.class).when(reader).readAll(any(ReadContext.class)); final FluentFuture<Optional<NormalizedNode<?, ?>>> future = operationalData.read(YangInstanceIdentifier.EMPTY); try { future.get(); } catch (ExecutionException e) { assertTrue(e.getCause() instanceof ReadFailedException); return; } fail("ReadFailedException was expected"); } @Test public void testReadRootWithOneNonListElement() throws Exception { final InstanceIdentifier<DataObject> vppStateII = InstanceIdentifier.create(DataObject.class); final DataObject vppState = mock(DataObject.class); Multimap<InstanceIdentifier<?>, DataObject> dataObjects = LinkedListMultimap.create(); dataObjects.put(vppStateII, vppState); doReturn(dataObjects).when(reader).readAll(any(ReadContext.class)); final YangInstanceIdentifier vppYangId = YangInstanceIdentifier.builder().node(QName.create("n", "d")).build(); when(serializer.toYangInstanceIdentifier(vppStateII)).thenReturn(vppYangId); when(serializer.toNormalizedNode(vppStateII, vppState)).thenReturn(entry); final DataContainerChild<?, ?> vppStateContainer = mock(DataContainerChild.class); doReturn(vppStateContainer).when(entry).getValue(); doReturn(vppYangId.getLastPathArgument()).when(vppStateContainer).getIdentifier(); final FluentFuture<Optional<NormalizedNode<?, ?>>> future = operationalData.read(YangInstanceIdentifier.EMPTY); verify(reader).readAll(any(ReadContext.class)); verify(serializer).toYangInstanceIdentifier(vppStateII); verify(serializer).toNormalizedNode(vppStateII, vppState); final Optional<NormalizedNode<?, ?>> result = future.get(); assertTrue(result.isPresent()); final ContainerNode rootNode = (ContainerNode) result.get(); assertEquals(SchemaContext.NAME, rootNode.getIdentifier().getNodeType()); assertEquals(vppStateContainer, Iterables.getOnlyElement(rootNode.getValue())); }
ReadableDataTreeDelegator implements ReadableDataManager { @VisibleForTesting static DataContainerChild<?, ?> wrapListIntoMixinNode( final Collection<NormalizedNode<?, ?>> normalizedRootElements, final ListSchemaNode listSchema) { if (listSchema.getKeyDefinition().isEmpty()) { final CollectionNodeBuilder<UnkeyedListEntryNode, UnkeyedListNode> listBuilder = Builders.unkeyedListBuilder(); listBuilder.withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(listSchema.getQName())); for (NormalizedNode<?, ?> normalizedRootElement : normalizedRootElements) { listBuilder.withChild((UnkeyedListEntryNode) normalizedRootElement); } return listBuilder.build(); } else { final CollectionNodeBuilder<MapEntryNode, ? extends MapNode> listBuilder = listSchema.isUserOrdered() ? Builders.orderedMapBuilder() : Builders.mapBuilder(); listBuilder.withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(listSchema.getQName())); for (NormalizedNode<?, ?> normalizedRootElement : normalizedRootElements) { listBuilder.withChild((MapEntryNode) normalizedRootElement); } return listBuilder.build(); } } ReadableDataTreeDelegator(@Nonnull BindingNormalizedNodeSerializer serializer, @Nonnull final SchemaContext globalContext, @Nonnull final ReaderRegistry readerRegistry, @Nonnull final org.opendaylight.mdsal.binding.api.DataBroker contextBroker); @Override FluentFuture<Optional<NormalizedNode<?, ?>>> read( @Nonnull final YangInstanceIdentifier yangInstanceIdentifier); }
@Test public void testWrapMixin() throws Exception { final QName nodeQName = QName.create("namespace", "node"); final QName keyQName = QName.create("namespace", "key"); final List<NormalizedNode<?, ?>> mapNodes = Lists.newArrayList("one", "two", "three").stream() .map(value -> ImmutableNodes.mapEntry(nodeQName, keyQName, value)) .collect(Collectors.toList()); final ListSchemaNode listSchema = mock(ListSchemaNode.class); doReturn(Collections.singletonList(keyQName)).when(listSchema).getKeyDefinition(); doReturn(true).when(listSchema).isUserOrdered(); doReturn(nodeQName).when(listSchema).getQName(); final DataContainerChild<?, ?> dataContainerChild = ReadableDataTreeDelegator.wrapListIntoMixinNode(mapNodes, listSchema); assertArrayEquals(mapNodes.toArray(), ((UnmodifiableCollection) dataContainerChild.getValue()).toArray()); }
ModificationDiff { Map<YangInstanceIdentifier, NormalizedNodeUpdate> getUpdates() { return updates; } private ModificationDiff(@Nonnull Map<YangInstanceIdentifier, NormalizedNodeUpdate> updates); @Override String toString(); }
@Test public void testInitialWrite() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> topContainer = getTopContainer("string1"); final YangInstanceIdentifier TOP_CONTAINER_ID = YangInstanceIdentifier.of(TOP_CONTAINER_QNAME); dataTreeModification.write(TOP_CONTAINER_ID, topContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); assertThat(modificationDiff.getUpdates().size(), is(1)); assertThat(modificationDiff.getUpdates().values().size(), is(1)); assertUpdate(modificationDiff.getUpdates().values().iterator().next(), TOP_CONTAINER_ID, null, topContainer); } @Test public void testLeafList() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final ContainerNode topContainer = getTopContainerWithLeafList("string1", "string2"); final YangInstanceIdentifier TOP_CONTAINER_ID = YangInstanceIdentifier.of(TOP_CONTAINER_QNAME); dataTreeModification.write(TOP_CONTAINER_ID, topContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); assertThat(modificationDiff.getUpdates().size(), is(1)); assertThat(modificationDiff.getUpdates().values().size(), is(1)); assertUpdate(modificationDiff.getUpdates().values().iterator().next(), TOP_CONTAINER_ID.node(FOR_LEAF_LIST_QNAME), null, topContainer.getChild(new YangInstanceIdentifier.NodeIdentifier(FOR_LEAF_LIST_QNAME)).get()); } @Test public void testWritePresenceEmptyContainer() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> presenceContainer = Builders.containerBuilder() .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(PRESENCE_CONTAINER_QNAME)) .build(); final YangInstanceIdentifier PRESENCE_CONTAINER_ID = YangInstanceIdentifier.of(PRESENCE_CONTAINER_QNAME); dataTreeModification.write(PRESENCE_CONTAINER_ID, presenceContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); dataTree.commit(prepare); final Optional<NormalizedNode<?, ?>> presenceAfter = getModification(dataTree).readNode(PRESENCE_CONTAINER_ID); assertTrue(presenceAfter.isPresent()); assertThat(presenceAfter.get(), equalTo(presenceContainer)); assertThat(modificationDiff.getUpdates().size(), is(1)); assertThat(modificationDiff.getUpdates().values().size(), is(1)); assertUpdate(modificationDiff.getUpdates().values().iterator().next(), PRESENCE_CONTAINER_ID, null, presenceContainer); } @Test public void testInitialWriteForContainerWithChoice() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final ContainerNode containerWithChoice = Builders.containerBuilder() .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(WITH_CHOICE_CONTAINER_QNAME)) .withChild(Builders.choiceBuilder() .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(CHOICE_QNAME)) .withChild(ImmutableNodes.leafNode(IN_CASE1_LEAF_QNAME, "withinCase1")) .build()) .build(); final YangInstanceIdentifier WITH_CHOICE_CONTAINER_ID = YangInstanceIdentifier.of(WITH_CHOICE_CONTAINER_QNAME); dataTreeModification.write(WITH_CHOICE_CONTAINER_ID, containerWithChoice); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final Map<YangInstanceIdentifier, NormalizedNodeUpdate> updates = getModificationDiff(prepare).getUpdates(); assertThat(updates.size(), is(1)); assertUpdate(getNormalizedNodeUpdateForAfterType(updates, ContainerNode.class), WITH_CHOICE_CONTAINER_ID, null, containerWithChoice); } @Test public void testWriteNonPresenceEmptyContainer() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> topContainer = ImmutableNodes.containerNode(TOP_CONTAINER_QNAME); dataTreeModification.write(TOP_CONTAINER_ID, topContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); assertThat(modificationDiff.getUpdates().size(), is(0)); } @Test public void testWriteNonPresenceNonEmptyContainer() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> topContainer = Builders.containerBuilder() .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(TOP_CONTAINER_QNAME)) .withChild(getNestedList("value","txt")) .build(); dataTreeModification.write(TOP_CONTAINER_ID, topContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); assertThat(modificationDiff.getUpdates().size(), is(1)); assertNodeModificationPresent(modificationDiff, ImmutableSet.of(NESTED_LIST_QNAME)); } @Test public void testWriteNonPresenceEmptyNestedContainer() throws Exception { final DataTree dataTree = getDataTree(); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> topContainer = Builders.containerBuilder() .withNodeIdentifier(new YangInstanceIdentifier.NodeIdentifier(TOP_CONTAINER_QNAME)) .withChild(ImmutableNodes.containerNode(EMPTY_QNAME)) .withChild(ImmutableNodes.leafNode(STRING_LEAF_QNAME, "1")) .build(); dataTreeModification.write(TOP_CONTAINER_ID, topContainer); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final ModificationDiff modificationDiff = getModificationDiff(prepare); dataTree.commit(prepare); assertThat(modificationDiff.getUpdates().size(), is(1)); } @Test public void testUpdateWrite() throws Exception { final DataTree dataTree = getDataTree(); final ContainerNode topContainer = getTopContainer("string1"); addNodeToTree(dataTree, topContainer, TOP_CONTAINER_ID); final DataTreeModification dataTreeModification = getModification(dataTree); final NormalizedNode<?, ?> topContainerAfter = getTopContainer("string2"); dataTreeModification.write(TOP_CONTAINER_ID, topContainerAfter); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final Map<YangInstanceIdentifier, NormalizedNodeUpdate> updates = getModificationDiff(prepare).getUpdates(); assertThat(updates.size(), is(1)); assertThat(updates.values().size(), is(1)); assertUpdate(updates.values().iterator().next(), TOP_CONTAINER_ID, topContainer, topContainerAfter); } @Test public void testUpdateDelete() throws Exception { final DataTree dataTree = getDataTree(); final ContainerNode topContainer = getTopContainer("string1"); addNodeToTree(dataTree, topContainer, TOP_CONTAINER_ID); final DataTreeModification dataTreeModification = getModification(dataTree); dataTreeModification.delete(TOP_CONTAINER_ID); final DataTreeCandidateTip prepare = prepareModification(dataTree, dataTreeModification); final Map<YangInstanceIdentifier, NormalizedNodeUpdate> updates = getModificationDiff(prepare).getUpdates(); assertThat(updates.size(), is(1)); assertThat(updates.values().size(), is(1)); assertUpdate(updates.values().iterator().next(), TOP_CONTAINER_ID, topContainer, null); }
FlatWriterRegistryBuilder extends AbstractSubtreeManagerRegistryBuilderBuilder<Writer<? extends DataObject>, WriterRegistry> implements ModifiableWriterRegistryBuilder, WriterRegistryBuilder { @Override public WriterRegistry build() { final ImmutableMap<InstanceIdentifier<?>, Writer<?>> mappedWriters = getMappedHandlers(); LOG.debug("Building writer registry with writers: {}", mappedWriters.keySet().stream() .map(InstanceIdentifier::getTargetType) .map(Class::getSimpleName) .collect(Collectors.joining(", "))); LOG.trace("Building writer registry with writers: {}", mappedWriters); return new FlatWriterRegistry(mappedWriters); } FlatWriterRegistryBuilder(@Nonnull final YangDAG yangDAG); @Override WriterRegistry build(); }
@Test public void testBuild() throws Exception { final FlatWriterRegistryBuilder flatWriterRegistryBuilder = new FlatWriterRegistryBuilder(new YangDAG()); final Writer<? extends DataObject> writer = mockWriter(DataObjects.DataObject3.class); flatWriterRegistryBuilder.add(writer); final WriterRegistry build = flatWriterRegistryBuilder.build(); final InstanceIdentifier<DataObjects.DataObject3> id = InstanceIdentifier.create(DataObjects.DataObject3.class); final DataObjectUpdate update = mock(DataObjectUpdate.class); doReturn(id).when(update).getId(); final DataObjects.DataObject3 before = mock(DataObjects.DataObject3.class); final DataObjects.DataObject3 after = mock(DataObjects.DataObject3.class); when(update.getDataBefore()).thenReturn(before); when(update.getDataAfter()).thenReturn(after); WriterRegistry.DataObjectUpdates updates = new WriterRegistry.DataObjectUpdates( Multimaps.forMap(Collections.singletonMap(id, update)), Multimaps.forMap(Collections.emptyMap())); final WriteContext ctx = mock(WriteContext.class); build.processModifications(updates, ctx); verify(writer).processModification(id, before, after, ctx); } @Test(expected = IllegalArgumentException.class) public void testBuildUnknownWriter() throws Exception { final FlatWriterRegistryBuilder flatWriterRegistryBuilder = new FlatWriterRegistryBuilder(new YangDAG()); final Writer<? extends DataObject> writer = mockWriter(DataObjects.DataObject3.class); flatWriterRegistryBuilder.add(writer); final WriterRegistry build = flatWriterRegistryBuilder.build(); final InstanceIdentifier<DataObjects.DataObject1> id2 = InstanceIdentifier.create(DataObjects.DataObject1.class); final DataObjectUpdate update2 = mock(DataObjectUpdate.class); final WriterRegistry.DataObjectUpdates updates = new WriterRegistry.DataObjectUpdates( Multimaps.forMap(Collections.singletonMap(id2, update2)), Multimaps.forMap(Collections.emptyMap())); build.processModifications(updates, mock(WriteContext.class)); }
YangModuleWhitelistReader extends JAXBContextHolder { @Nonnull public YangModuleWhitelist read(@Nonnull final Path path) { final Unmarshaller unmarshaller = createUnmarshaller(); return YangModuleWhitelist.class.cast(readWhitelist(path, unmarshaller)); } YangModuleWhitelistReader(); @Nonnull YangModuleWhitelist read(@Nonnull final Path path); }
@Test public void read() throws Exception { final YangModuleWhitelist whitelist = reader.read( Paths.get(this.getClass().getClassLoader().getResource("expected-whitelist.xml").getPath())); assertNotNull(whitelist); final List<Module> modules = whitelist.getModules(); assertEquals(2, modules.size()); final Module moduleA = new Module(); final Module moduleB = new Module(); moduleA.setPckg("module.a.package"); moduleA.setDescription("desc"); moduleB.setPckg("module.b.package"); assertTrue(modules.containsAll(ImmutableSet.of(moduleA, moduleB))); }
YangModuleWhitelistWriter extends JAXBContextHolder { public void write(@Nonnull final YangModuleWhitelist whitelist, @Nonnull final Path outPath, final boolean formatOutput) { Objects.requireNonNull(whitelist, "Cannot white null whitelist"); final Marshaller marshaller = createMarshaller(getCtx()); setupPrettyPrint(marshaller, formatOutput); whiteWhitelist(whitelist, outPath, marshaller); } YangModuleWhitelistWriter(); void write(@Nonnull final YangModuleWhitelist whitelist, @Nonnull final Path outPath, final boolean formatOutput); }
@Test public void write() throws Exception { final Module moduleA = new Module(); final Module moduleB = new Module(); moduleA.setPckg("module.a.package"); moduleA.setDescription("desc"); moduleB.setPckg("module.b.package"); final YangModuleWhitelist whitelist = new YangModuleWhitelist(); whitelist.setModules(ImmutableList.of(moduleA, moduleB)); writer.write(whitelist, path, false); final String output = Files.readAllLines(path).stream().collect(Collectors.joining()); final String expectedOutput = Resources .toString(this.getClass().getClassLoader().getResource("expected-whitelist.xml"), StandardCharsets.UTF_8); assertEquals(expectedOutput, output); }
SubtreeWriter implements Writer<D> { static Writer<?> createForWriter(@Nonnull final Set<InstanceIdentifier<?>> handledChildren, @Nonnull final Writer<? extends DataObject> writer) { return new SubtreeWriter<>(writer, handledChildren); } private SubtreeWriter(final Writer<D> delegate, final Set<InstanceIdentifier<?>> handledTypes); private SubtreeWriter(final Writer<D> delegate); @Override void validate(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nullable final DataObject dataBefore, @Nullable final DataObject dataAfter, @Nonnull final WriteContext ctx); @Override void processModification( @Nonnull final InstanceIdentifier<? extends DataObject> id, @Nullable final DataObject dataBefore, @Nullable final DataObject dataAfter, @Nonnull final WriteContext ctx); @Override boolean supportsDirectUpdate(); @Override boolean canProcess(@Nonnull final InstanceIdentifier<? extends DataObject> instanceIdentifier); @Override @Nonnull InstanceIdentifier<D> getManagedDataObjectType(); }
@Test(expected = IllegalArgumentException.class) public void testSubtreeWriterCreationFail() throws Exception { SubtreeWriter.createForWriter(Collections.singleton(InstanceIdentifier.create(DataObject.class)), writer); } @Test(expected = IllegalArgumentException.class) public void testSubtreeWriterCreationFailInvalidIid() throws Exception { SubtreeWriter.createForWriter(Collections.singleton(DataObjects.DataObject4.IID), writer); }
SubtreeWriter implements Writer<D> { @Override public boolean canProcess(@Nonnull final InstanceIdentifier<? extends DataObject> instanceIdentifier) { if (isWildcarded) { final Class<D> parent = delegate.getManagedDataObjectType().getTargetType(); for (InstanceIdentifier.PathArgument pathArgument : instanceIdentifier.getPathArguments()) { if (pathArgument.getType().equals(parent)) { return true; } } return false; } return handledChildTypes.parallelStream() .filter(childIiD -> instanceIdentifier.getTargetType().equals(childIiD.getTargetType())) .anyMatch(instanceIdentifier1 -> isPathEqual(instanceIdentifier, instanceIdentifier1)); } private SubtreeWriter(final Writer<D> delegate, final Set<InstanceIdentifier<?>> handledTypes); private SubtreeWriter(final Writer<D> delegate); @Override void validate(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nullable final DataObject dataBefore, @Nullable final DataObject dataAfter, @Nonnull final WriteContext ctx); @Override void processModification( @Nonnull final InstanceIdentifier<? extends DataObject> id, @Nullable final DataObject dataBefore, @Nullable final DataObject dataAfter, @Nonnull final WriteContext ctx); @Override boolean supportsDirectUpdate(); @Override boolean canProcess(@Nonnull final InstanceIdentifier<? extends DataObject> instanceIdentifier); @Override @Nonnull InstanceIdentifier<D> getManagedDataObjectType(); }
@Test public void testCanHandleChild() throws Exception { final SubtreeWriter<?> forWriter = createSubtreeWriter(); InstanceIdentifier<DataObjects.DataObject4.DataObject41.DataObject411> testIid = InstanceIdentifier.create( DataObjects.DataObject4.class).child(DataObjects.DataObject4.DataObject41.class).child( DataObjects.DataObject4.DataObject41.DataObject411.class); assertTrue(forWriter.canProcess(testIid)); }
FlatWriterRegistry implements WriterRegistry { static Collection<DataObjectUpdate> getParentDataObjectUpdate(final WriteContext ctx, final Multimap<InstanceIdentifier<?>, ? extends DataObjectUpdate> updates, final Writer<?> writer) { return ((SubtreeWriter<?>) writer).getHandledChildTypes().stream() .filter(updates::containsKey) .map(unkeyedId -> updates.get(unkeyedId)) .flatMap(doUpdates -> doUpdates.stream()) .map(DataObjectUpdate::getId) .map(id -> getSingleParentDataObjectUpdate(ctx, (Multimap<InstanceIdentifier<?>, DataObjectUpdate>) updates, writer, id)) .collect(toMap(update -> RWUtils.cutId(update.getId(), writer.getManagedDataObjectType()), Function.identity(), (u1, u2) -> u1)) .values(); } FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writersById); @Override void validateModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override void processModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type); }
@Test public void testSubtreeWriterUpdateAggregation() throws Exception { Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); when(ctx.readAfter(DataObject1.IID)).thenReturn(Optional.of(mock(DataObject1.class))); when(ctx.readBefore(DataObject1.IID)).thenReturn(Optional.of(mock(DataObject1.class))); Writer<?> writer = SubtreeWriter.createForWriter(Collections.singleton(DataObjects.DataObject1ChildK.IID), writer1); InstanceIdentifier<DataObjects.DataObject1ChildK> update1Id = DataObject1.IID.child(DataObjects.DataObject1ChildK.class, new DataObjects.DataObject1ChildKey()); InstanceIdentifier<DataObjects.DataObject1ChildK> update2Id = DataObject1.IID.child(DataObjects.DataObject1ChildK.class, new DataObjects.DataObject1ChildKey()); updates.putAll(DataObjects.DataObject1ChildK.IID, Lists.newArrayList( DataObjectUpdate.create(update1Id, mock(DataObjects.DataObject1ChildK.class), mock(DataObjects.DataObject1ChildK.class)), DataObjectUpdate.create(update2Id, mock(DataObjects.DataObject1ChildK.class), mock(DataObjects.DataObject1ChildK.class)))); Collection<DataObjectUpdate> parentDataObjectUpdate = FlatWriterRegistry.getParentDataObjectUpdate(ctx, updates, writer); assertEquals(1, parentDataObjectUpdate.size()); } @Test public void testSubtreeWriterUpdateAggregationForList() throws Exception { Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); KeyedInstanceIdentifier<DataObjects.DataObject1ChildK, DataObjects.DataObject1ChildKey> parentKeyedId1 = DataObject1.IID.child(DataObjects.DataObject1ChildK.class, new DataObjects.DataObject1ChildKey()); KeyedInstanceIdentifier<DataObjects.DataObject1ChildK, DataObjects.DataObject1ChildKey> parentKeyedId2 = DataObject1.IID.child(DataObjects.DataObject1ChildK.class, new DataObjects.DataObject1ChildKey()); when(ctx.readBefore(parentKeyedId1)).thenReturn(Optional.of(mock(DataObjects.DataObject1ChildK.class))); when(ctx.readAfter(parentKeyedId1)).thenReturn(Optional.of(mock(DataObjects.DataObject1ChildK.class))); when(ctx.readBefore(parentKeyedId2)).thenReturn(Optional.of(mock(DataObjects.DataObject1ChildK.class))); when(ctx.readAfter(parentKeyedId2)).thenReturn(Optional.of(mock(DataObjects.DataObject1ChildK.class))); Writer<?> writer = SubtreeWriter.createForWriter(Sets.newHashSet( InstanceIdentifier.create(DataObjects.DataObject1ChildK.class).child(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class), InstanceIdentifier.create(DataObjects.DataObject1ChildK.class).child(DataObjects.DataObject1ChildK.DataObject1ChildKNested2.class)), writer4); InstanceIdentifier<DataObjects.DataObject1ChildK.DataObject1ChildKNested> updateList1Id = parentKeyedId1.child(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class); InstanceIdentifier<DataObjects.DataObject1ChildK.DataObject1ChildKNested> updateList2Id = parentKeyedId2.child(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class); updates.putAll(DataObjects.DataObject1ChildK.DataObject1ChildKNested.IID, Lists.newArrayList( DataObjectUpdate.create(updateList1Id, mock(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class), mock(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class)), DataObjectUpdate.create(updateList2Id, mock(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class), mock(DataObjects.DataObject1ChildK.DataObject1ChildKNested.class)))); Collection<DataObjectUpdate> parentDataObjectUpdate = FlatWriterRegistry.getParentDataObjectUpdate(ctx, updates, writer); assertEquals(2, parentDataObjectUpdate.size()); }
FlatWriterRegistry implements WriterRegistry { @Override public void processModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx) throws TranslationException { if (updates.isEmpty()) { return; } final List<DataObjectUpdate> alreadyProcessed = new LinkedList<>(); if (updates.containsOnlySingleType()) { singleUpdate(updates.getDeletes(), alreadyProcessed, ctx); singleUpdate(updates.getUpdates(), alreadyProcessed, ctx); } else { bulkUpdate(updates.getDeletes(), alreadyProcessed, ctx, writersOrderReversed); bulkUpdate(updates.getUpdates(), alreadyProcessed, ctx, writersOrder); } LOG.debug("Update successful for types: {}", updates.getTypeIntersection()); LOG.trace("Update successful for: {}", updates); } FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writersById); @Override void validateModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override void processModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type); }
@Test public void testMultipleUpdatesForSingleWriter() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final InstanceIdentifier<DataObject1> iid2 = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); updates.put(DataObject1.IID, DataObjectUpdate.create(iid, dataObject, dataObject)); updates.put(DataObject1.IID, DataObjectUpdate.create(iid2, dataObject, dataObject)); flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); verify(writer1).processModification(iid, dataObject, dataObject, ctx); verify(writer1).processModification(iid2, dataObject, dataObject, ctx); verifyNoMoreInteractions(writer1); verifyZeroInteractions(writer2); } @Test public void testMultipleUpdatesForMultipleWriters() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); updates.put(DataObject1.IID, DataObjectUpdate.create(iid, dataObject, dataObject)); final InstanceIdentifier<DataObject2> iid2 = InstanceIdentifier.create(DataObject2.class); final DataObject2 dataObject2 = mock(DataObject2.class); updates.put(DataObject2.IID, DataObjectUpdate.create(iid2, dataObject2, dataObject2)); flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); final InOrder inOrder = inOrder(writer1, writer2); inOrder.verify(writer1).processModification(iid, dataObject, dataObject, ctx); inOrder.verify(writer2).processModification(iid2, dataObject2, dataObject2, ctx); verify(writer1,times(1)).processModification(any(),any(),any(),any()); verify(writer2,times(1)).processModification(any(),any(),any(),any()); } @Test public void testMultipleDeletesForMultipleWriters() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate.DataObjectDelete> deletes = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); deletes.put(DataObject1.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid, dataObject, null))); final InstanceIdentifier<DataObject2> iid2 = InstanceIdentifier.create(DataObject2.class); final DataObject2 dataObject2 = mock(DataObject2.class); deletes.put( DataObject2.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid2, dataObject2, null))); flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(ImmutableMultimap.of(), deletes), ctx); final InOrder inOrder = inOrder(writer1, writer2); inOrder.verify(writer2).processModification(iid2, dataObject2, null, ctx); inOrder.verify(writer1).processModification(iid, dataObject, null, ctx); verify(writer1,times(1)).processModification(any(),any(),any(),any()); verify(writer2,times(1)).processModification(any(),any(),any(),any()); } @Test public void testMultipleUpdatesAndDeletesForMultipleWriters() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate.DataObjectDelete> deletes = HashMultimap.create(); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); deletes.put(DataObject1.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid, dataObject, null))); updates.put(DataObject1.IID, DataObjectUpdate.create(iid, dataObject, dataObject)); final InstanceIdentifier<DataObject2> iid2 = InstanceIdentifier.create(DataObject2.class); final DataObject2 dataObject2 = mock(DataObject2.class); deletes.put( DataObject2.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid2, dataObject2, null))); updates.put(DataObject2.IID, DataObjectUpdate.create(iid2, dataObject2, dataObject2)); flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, deletes), ctx); final InOrder inOrder = inOrder(writer1, writer2); inOrder.verify(writer2).processModification(iid2, dataObject2, null, ctx); inOrder.verify(writer1).processModification(iid, dataObject, null, ctx); inOrder.verify(writer1).processModification(iid, dataObject, dataObject, ctx); inOrder.verify(writer2).processModification(iid2, dataObject2, dataObject2, ctx); verify(writer1,times(2)).processModification(any(),any(),any(),any()); verify(writer2,times(2)).processModification(any(),any(),any(),any()); } @Test(expected = IllegalArgumentException.class) public void testMultipleUpdatesOneMissing() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addUpdate(updates, DataObject1.class); addUpdate(updates, DataObject2.class); flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); } @Test public void testMultipleUpdatesFirstFailing() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); doThrow(new RuntimeException()).when(writer1) .processModification(any(InstanceIdentifier.class), any(DataObject.class), any(DataObject.class), any(WriteContext.class)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addUpdate(updates, DataObject1.class); addUpdate(updates, DataObject2.class); try { flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); fail("Bulk update should have failed on writer1 with UpdateFailedException"); } catch (UpdateFailedException e) { assertThat(e.getProcessed(), hasSize(0)); } } @Test public void testMultipleUpdatesSecondFailing() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); doThrow(new RuntimeException()).when(writer2) .processModification(any(InstanceIdentifier.class), any(DataObject.class), any(DataObject.class), any(WriteContext.class)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addUpdate(updates, DataObject1.class); addUpdate(updates, DataObject2.class); try { flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); fail("Bulk update should have failed on writer1 with UpdateFailedException"); } catch (UpdateFailedException e) { final List<DataObjectUpdate> alreadyProcessed = e.getProcessed(); assertThat(alreadyProcessed, hasSize(1)); assertEquals(updateData(DataObject1.class, DataObject1.IID), e.getProcessed().iterator().next()); } } @Test public void testMultipleUpdatesLastFailing() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry( ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2, DataObjects.DataObject3.IID, writer3)); doThrow(new RuntimeException()).when(writer3) .processModification(any(InstanceIdentifier.class), any(DataObject.class), any(DataObject.class), any(WriteContext.class)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addUpdate(updates, DataObject1.class); addUpdate(updates, DataObject2.class); addUpdate(updates, DataObjects.DataObject3.class); try { flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); fail("Bulk update should have failed on writer1 with UpdateFailedException"); } catch (UpdateFailedException e) { final List<DataObjectUpdate> alreadyProcessed = e.getProcessed(); assertEquals(2, alreadyProcessed.size()); assertTrue(alreadyProcessed.contains(updateData(DataObject1.class, DataObject1.IID))); assertTrue(alreadyProcessed.contains(updateData(DataObject2.class, DataObject2.IID))); } } @Test public void testMutlipleUpdatesWithOneKeyedContainer() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry( ImmutableMap.of(DataObject1.IID, writer1, DataObjects.DataObject1ChildK.IID, writer4)); doThrow(new RuntimeException()).when(writer1) .processModification(any(InstanceIdentifier.class), any(DataObject.class), any(DataObject.class), any(WriteContext.class)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addKeyedUpdate(updates, DataObjects.DataObject1ChildK.class); addUpdate(updates, DataObject1.class); try { flatWriterRegistry.processModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); fail("Bulk update should have failed on writer1 with UpdateFailedException"); } catch (UpdateFailedException e) { assertTrue(e.getProcessed().isEmpty()); } }
FlatWriterRegistry implements WriterRegistry { @Override public void validateModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx) throws DataValidationFailedException { validateModifications(updates.getDeletes(), ctx); validateModifications(updates.getUpdates(), ctx); } FlatWriterRegistry(@Nonnull final ImmutableMap<InstanceIdentifier<?>, Writer<?>> writersById); @Override void validateModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override void processModifications(@Nonnull final DataObjectUpdates updates, @Nonnull final WriteContext ctx); @Override boolean writerSupportsUpdate(@Nonnull final InstanceIdentifier<?> type); }
@Test(expected = IllegalArgumentException.class) public void testValidateMissingWriter() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); addUpdate(updates, DataObject1.class); addUpdate(updates, DataObject2.class); flatWriterRegistry.validateModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); } @Test public void testValidateSingleWriter() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final InstanceIdentifier<DataObject1> iid2 = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); updates.put(DataObject1.IID, DataObjectUpdate.create(iid, dataObject, dataObject)); updates.put(DataObject1.IID, DataObjectUpdate.create(iid2, dataObject, dataObject)); flatWriterRegistry .validateModifications(new WriterRegistry.DataObjectUpdates(updates, ImmutableMultimap.of()), ctx); verify(writer1).validate(iid, dataObject, dataObject, ctx); verify(writer1).validate(iid2, dataObject, dataObject, ctx); verifyNoMoreInteractions(writer1); verifyZeroInteractions(writer2); } @Test public void testValidateMultipleWriters() throws Exception { final FlatWriterRegistry flatWriterRegistry = new FlatWriterRegistry(ImmutableMap.of(DataObject1.IID, writer1, DataObject2.IID, writer2)); final Multimap<InstanceIdentifier<?>, DataObjectUpdate.DataObjectDelete> deletes = HashMultimap.create(); final Multimap<InstanceIdentifier<?>, DataObjectUpdate> updates = HashMultimap.create(); final InstanceIdentifier<DataObject1> iid = InstanceIdentifier.create(DataObject1.class); final DataObject1 dataObject = mock(DataObject1.class); deletes.put(DataObject1.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid, dataObject, null))); updates.put(DataObject1.IID, DataObjectUpdate.create(iid, null, dataObject)); final InstanceIdentifier<DataObject2> iid2 = InstanceIdentifier.create(DataObject2.class); final DataObject2 dataObject2 = mock(DataObject2.class); deletes.put(DataObject2.IID, ((DataObjectUpdate.DataObjectDelete) DataObjectUpdate.create(iid2, dataObject2, null))); updates.put(DataObject2.IID, DataObjectUpdate.create(iid2, dataObject2, dataObject2)); flatWriterRegistry.validateModifications(new WriterRegistry.DataObjectUpdates(updates, deletes), ctx); verify(writer1).validate(iid, dataObject, null, ctx); verify(writer1).validate(iid, null, dataObject, ctx); verify(writer2).validate(iid2, dataObject2, null, ctx); verify(writer2).validate(iid2, dataObject2, dataObject2, ctx); verifyNoMoreInteractions(writer1); verifyNoMoreInteractions(writer2); }
GenericListWriter extends AbstractGenericWriter<D> implements ListWriter<D, K> { @Override protected void writeCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D data, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.writeCurrentAttributes(id, data, ctx); } catch (RuntimeException e) { throw new WriteFailedException.CreateFailedException(id, data, e); } } GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer); GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.CreateFailedException.class) public void testWriteFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer).writeCurrentAttributes(DATA_OBJECT_ID, after, ctx); writer = new GenericListWriter<>(DATA_OBJECT_ID, customizer); writer.writeCurrentAttributes(DATA_OBJECT_ID, after, ctx); }
GenericListWriter extends AbstractGenericWriter<D> implements ListWriter<D, K> { @Override protected void updateCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D dataBefore, @Nonnull final D dataAfter, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.updateCurrentAttributes(id, dataBefore, dataAfter, ctx); } catch (RuntimeException e) { throw new WriteFailedException.UpdateFailedException(id, dataBefore, dataAfter, e); } } GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer); GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.UpdateFailedException.class) public void testUpdateFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer) .updateCurrentAttributes(DATA_OBJECT_ID, before, after, ctx); writer = new GenericListWriter<>(DATA_OBJECT_ID, customizer); writer.updateCurrentAttributes(DATA_OBJECT_ID, before, after, ctx); }
GenericListWriter extends AbstractGenericWriter<D> implements ListWriter<D, K> { @Override protected void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D dataBefore, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.deleteCurrentAttributes(id, dataBefore, ctx); } catch (RuntimeException e) { throw new WriteFailedException.DeleteFailedException(id, e); } } GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer); GenericListWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final ListWriterCustomizer<D, K> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.DeleteFailedException.class) public void testDeleteFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer) .deleteCurrentAttributes(DATA_OBJECT_ID, before, ctx); writer = new GenericListWriter<>(DATA_OBJECT_ID, customizer); writer.deleteCurrentAttributes(DATA_OBJECT_ID, before, ctx); }
GenericWriter extends AbstractGenericWriter<D> { @Override protected void writeCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D data, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.writeCurrentAttributes(id, data, ctx); } catch (RuntimeException e) { throw new WriteFailedException.CreateFailedException(id, data, e); } } GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer); GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.CreateFailedException.class) public void testWriteFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer).writeCurrentAttributes(DATA_OBJECT_ID, after, ctx); writer = new GenericWriter<>(DATA_OBJECT_ID, customizer); writer.writeCurrentAttributes(DATA_OBJECT_ID, after, ctx); }
GenericWriter extends AbstractGenericWriter<D> { @Override protected void updateCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D dataBefore, @Nonnull final D dataAfter, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.updateCurrentAttributes(id, dataBefore, dataAfter, ctx); } catch (RuntimeException e) { throw new WriteFailedException.UpdateFailedException(id, dataBefore, dataAfter, e); } } GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer); GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.UpdateFailedException.class) public void testUpdateFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer) .updateCurrentAttributes(DATA_OBJECT_ID, before, after, ctx); writer = new GenericWriter<>(DATA_OBJECT_ID, customizer); writer.updateCurrentAttributes(DATA_OBJECT_ID, before, after, ctx); }
WriteFailedException extends TranslationException { @Nonnull public InstanceIdentifier<?> getFailedId() { return failedId; } WriteFailedException(@Nonnull final InstanceIdentifier<?> failedId, @Nonnull final String message, @Nonnull final Throwable cause); WriteFailedException(@Nonnull final InstanceIdentifier<?> failedId, @Nonnull final String message); WriteFailedException(@Nonnull final InstanceIdentifier<?> failedId, @Nonnull final Throwable cause); @Nonnull InstanceIdentifier<?> getFailedId(); }
@Test public void testCreateFailed() throws Exception { final WriteFailedException.CreateFailedException cause = new WriteFailedException.CreateFailedException(id, dataAfter); final WriteFailedException.CreateFailedException createFailedException = new WriteFailedException.CreateFailedException(id, dataAfter, cause); assertEquals(createFailedException.getFailedId(), id); assertEquals(createFailedException.getData(), dataAfter); assertEquals(createFailedException.getCause(), cause); assertThat(createFailedException.getMessage(), CoreMatchers.containsString("Failed to create")); } @Test public void testUpdateFailed() throws Exception { final WriteFailedException.UpdateFailedException cause = new WriteFailedException.UpdateFailedException(id, dataBefore, dataAfter); final WriteFailedException.UpdateFailedException createFailedException = new WriteFailedException.UpdateFailedException(id, dataBefore, dataAfter, cause); assertEquals(createFailedException.getFailedId(), id); assertEquals(createFailedException.getDataBefore(), dataBefore); assertEquals(createFailedException.getDataAfter(), dataAfter); assertEquals(createFailedException.getCause(), cause); assertThat(createFailedException.getMessage(), CoreMatchers.containsString("Failed to update")); } @Test public void testDeleteFailed() throws Exception { final WriteFailedException.DeleteFailedException cause = new WriteFailedException.DeleteFailedException(id); final WriteFailedException.DeleteFailedException createFailedException = new WriteFailedException.DeleteFailedException(id, cause); assertEquals(createFailedException.getFailedId(), id); assertEquals(createFailedException.getCause(), cause); assertThat(createFailedException.getMessage(), CoreMatchers.containsString("Failed to delete")); }
GenericWriter extends AbstractGenericWriter<D> { @Override protected void deleteCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final D dataBefore, @Nonnull final WriteContext ctx) throws WriteFailedException { try { customizer.deleteCurrentAttributes(id, dataBefore, ctx); } catch (RuntimeException e) { throw new WriteFailedException.DeleteFailedException(id, e); } } GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer); GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer, @Nonnull final Validator<D> validator); }
@Test(expected = WriteFailedException.DeleteFailedException.class) public void testDeleteFail() throws Exception { doThrow(new IllegalStateException("test")).when(customizer) .deleteCurrentAttributes(DATA_OBJECT_ID, before, ctx); writer = new GenericWriter<>(DATA_OBJECT_ID, customizer); writer.deleteCurrentAttributes(DATA_OBJECT_ID, before, ctx); }
GenericWriter extends AbstractGenericWriter<D> { static boolean isUpdateSupported(final @Nonnull WriterCustomizer<?> customizer) { try { final Class<? extends WriterCustomizer> customizerClass = customizer.getClass(); final Class<?> updateDeclaringClass = customizerClass .getMethod(UPDATE_M, InstanceIdentifier.class, DataObject.class, DataObject.class, WriteContext.class) .getDeclaringClass(); final boolean supportsUpdate = !WriterCustomizer.class.equals(updateDeclaringClass); LOG.debug("Customizer {} update support : {}|Update declaring class {}", customizerClass, supportsUpdate, updateDeclaringClass); return supportsUpdate; } catch (NoSuchMethodException e) { throw new IllegalStateException("Unable to detect if customizer supports update", e); } } GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer); GenericWriter(@Nonnull final InstanceIdentifier<D> type, @Nonnull final WriterCustomizer<D> customizer, @Nonnull final Validator<D> validator); }
@Test public void testUpdateSupported() { assertFalse(GenericWriter.isUpdateSupported(new NoopWriters.NonDirectUpdateWriterCustomizer())); assertTrue(GenericWriter.isUpdateSupported(new NoopWriters.DirectUpdateWriterCustomizer())); assertTrue(GenericWriter.isUpdateSupported(new NoopWriters.ParentImplDirectUpdateWriterCustomizer())); }
GenericInitListReader extends GenericListReader<O, K, B> implements InitListReader<O, K, B> { @Override public void init(final DataBroker broker, final InstanceIdentifier<O> id, final ReadContext ctx) throws InitFailedException { LOG.debug("{}: Initializing current: {}", this, id); try { for (K k : getAllIds(id, ctx)) { initSingle(broker, RWUtils.replaceLastInId(id, RWUtils.getCurrentIdItem(id, k)), ctx); } } catch (ReadFailedException e) { LOG.warn("{}: Failed to initialize current, unable to read: {}", this, id, e); throw new InitFailedException(e.getFailedId(), e); } } GenericInitListReader(@Nonnull final InstanceIdentifier<O> id, @Nonnull final InitializingListReaderCustomizer<O, K, B> customizer); @Override void init(final DataBroker broker, final InstanceIdentifier<O> id, final ReadContext ctx); }
@SuppressWarnings("unchecked") @Test public void testInit() throws Exception { final Initialized<TestingData> initialized = Initialized.create(DATA_OBJECT_ID, data); when(getCustomizer().isPresent(any(), any(), any())).thenReturn(true); doReturn(initialized).when(getCustomizer()).init(any(), any(), any()); when(writeTx.commit()).thenReturn(FluentFuture.from(Futures.immediateFuture(null))); when(broker.newWriteOnlyTransaction()).thenReturn(writeTx); getReader().init(broker, DATA_OBJECT_ID, ctx); verify(writeTx, times(2)).merge(CONFIGURATION, DATA_OBJECT_ID, data); verify(writeTx, times(2)).commit(); } @Test(expected = InitFailedException.class) public void testInitFailed() throws Exception { doThrow(new ReadFailedException(DATA_OBJECT_ID)).when(getCustomizer()) .readCurrentAttributes(any(), any(), any()); getReader().init(broker, DATA_OBJECT_ID, ctx); verifyZeroInteractions(writeTx); }
CompositeReader extends AbstractGenericReader<D, B> implements InitReader<D, B>, DelegatingReader<D, B> { @Nonnull @Override public Optional<? extends DataObject> read(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx) throws ReadFailedException { if (shouldReadCurrent(id)) { LOG.trace("{}: Reading current: {}", this, id); return readCurrent((InstanceIdentifier<D>) id, ctx); } else if (shouldDelegateToChild(id)) { LOG.trace("{}: Reading child: {}", this, id); return readSubtree(id, ctx); } else { LOG.trace("{}: Delegating read: {}", this, id); return delegate.read(id, ctx); } } private CompositeReader(final Reader<D, B> reader, final ImmutableMap<Class<?>, Reader<? extends DataObject, ? extends Builder<?>>> childReaders); @SuppressWarnings("unchecked") static InstanceIdentifier<D> appendTypeToId( final InstanceIdentifier<? extends DataObject> parentId, final InstanceIdentifier<D> type); @Nonnull @Override Optional<? extends DataObject> read(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx); @Override Reader<D, B> getDelegate(); @Override void readCurrentAttributes(@Nonnull final InstanceIdentifier<D> id, @Nonnull final B builder, @Nonnull final ReadContext ctx); @SuppressWarnings("unchecked") @Override void init(final DataBroker broker, final InstanceIdentifier<D> id, final ReadContext ctx); }
@Test public void testReadFallback() throws Exception { compositeReader.read(DataObjects.DataObject4.DataObject42.IID, ctx); verify(reader4).read(DataObjects.DataObject4.DataObject42.IID, ctx); verify(reader41, times(0)).read(any(InstanceIdentifier.class), any(ReadContext.class)); }
CompositeReaderRegistry implements ReaderRegistry { @Override @Nonnull public Multimap<InstanceIdentifier<? extends DataObject>, ? extends DataObject> readAll( @Nonnull final ReadContext ctx) throws ReadFailedException { LOG.debug("Reading from all delegates: {}", this); LOG.trace("Reading from all delegates: {}", rootReaders.values()); final Multimap<InstanceIdentifier<? extends DataObject>, DataObject> objects = LinkedListMultimap.create(); for (Reader<? extends DataObject, ? extends Builder<?>> rootReader : rootReaders.values()) { LOG.debug("Reading from delegate: {}", rootReader); if (rootReader instanceof ListReader) { final List<? extends DataObject> listEntries = ((ListReader) rootReader).readList(rootReader.getManagedDataObjectType(), ctx); if (!listEntries.isEmpty()) { objects.putAll(rootReader.getManagedDataObjectType(), listEntries); } } else { final Optional<? extends DataObject> read = rootReader.read(rootReader.getManagedDataObjectType(), ctx); if (read.isPresent()) { objects.putAll(rootReader.getManagedDataObjectType(), Collections.singletonList(read.get())); } } } return objects; } CompositeReaderRegistry(@Nonnull final List<Reader<? extends DataObject, ? extends Builder<?>>> rootReaders); @Override @Nonnull Multimap<InstanceIdentifier<? extends DataObject>, ? extends DataObject> readAll( @Nonnull final ReadContext ctx); @Override void initAll(@Nonnull final DataBroker broker, @Nonnull final ReadContext ctx); @Nonnull @Override Optional<? extends DataObject> read(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx); @Override String toString(); }
@Test public void testReadAll() throws Exception { reg.readAll(ctx); final InOrder inOrder = inOrder(rootReader3, rootReader4, reader31, reader41); inOrder.verify(rootReader3).read(any(InstanceIdentifier.class), any(ReadContext.class)); inOrder.verify(reader31).read(any(InstanceIdentifier.class), any(ReadContext.class)); inOrder.verify(rootReader4).read(any(InstanceIdentifier.class), any(ReadContext.class)); inOrder.verify(reader41).read(any(InstanceIdentifier.class), any(ReadContext.class)); }
CompositeReaderRegistry implements ReaderRegistry { @Nonnull @Override public Optional<? extends DataObject> read(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx) throws ReadFailedException { final InstanceIdentifier.PathArgument first = checkNotNull( Iterables.getFirst(id.getPathArguments(), null), "Empty id"); final Reader<? extends DataObject, ? extends Builder<?>> reader = rootReaders.get(first.getType()); checkNotNull(reader, "Read failed. Missing reader for %s. Current readers for: %s", id, rootReaders.keySet()); LOG.debug("Reading from delegate: {}", reader); return reader.read(id, ctx); } CompositeReaderRegistry(@Nonnull final List<Reader<? extends DataObject, ? extends Builder<?>>> rootReaders); @Override @Nonnull Multimap<InstanceIdentifier<? extends DataObject>, ? extends DataObject> readAll( @Nonnull final ReadContext ctx); @Override void initAll(@Nonnull final DataBroker broker, @Nonnull final ReadContext ctx); @Nonnull @Override Optional<? extends DataObject> read(@Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx); @Override String toString(); }
@Test public void testReadSingleRoot() throws Exception { reg.read(DataObject3.IID, ctx); final InOrder inOrder = inOrder(rootReader3, rootReader4, reader31, reader41); inOrder.verify(rootReader3).read(any(InstanceIdentifier.class), any(ReadContext.class)); inOrder.verify(reader31).read(any(InstanceIdentifier.class), any(ReadContext.class)); verify(rootReader4, times(0)).read(any(InstanceIdentifier.class), any(ReadContext.class)); verify(reader41, times(0)).read(any(InstanceIdentifier.class), any(ReadContext.class)); }
SubtreeReader implements DelegatingReader<D, B> { static <D extends DataObject, B extends Builder<D>> Reader<D, B> createForReader(@Nonnull final Set<InstanceIdentifier<?>> handledChildren, @Nonnull final Reader<D, B> reader) { return (reader instanceof ListReader) ? new SubtreeListReader<>((ListReader) reader, handledChildren) : new SubtreeReader<>(reader, handledChildren); } SubtreeReader(final Reader<D, B> delegate, Set<InstanceIdentifier<?>> handledTypes); @Override @Nonnull Optional<? extends DataObject> read( @Nonnull final InstanceIdentifier<? extends DataObject> id, @Nonnull final ReadContext ctx); @Override Reader<D, B> getDelegate(); @Override @Nonnull InstanceIdentifier<D> getManagedDataObjectType(); }
@Test(expected = IllegalArgumentException.class) public void testCreateInvalid() throws Exception { SubtreeReader.createForReader(Sets.newHashSet(DataObjects.DataObject1.IID), delegate); }