src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
Content { public final double getY() { return 0; } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testGetY() throws Exception { Content content = new TestContent(); assertEquals(0, content.getY(), 0.01); }
Content { public final double getWidth() { return Math.max(width, minWidth); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testGetWidth() throws Exception { Content content = new TestContent(); assertEquals(0, content.getWidth(), 0.01); content.setMinWidth(50); assertEquals(50, content.getWidth(), 0.01); }
Content { public final double getHeight() { return Math.max(height, minHeight); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testGetHeight() throws Exception { Content content = new TestContent(); assertEquals(0, content.getHeight(), 0.01); content.setMinHeight(20); assertEquals(20, content.getHeight(), 0.01); }
Content { public void setMinWidth(double minWidth) { if(0 > minWidth) { throw new IllegalArgumentException("min width can only be a positive number"); } this.minWidth = minWidth; refreshUp(); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testSetMinWidth() throws Exception { Content content = new TestContent(); content.setMinWidth(500); assertEquals(500, content.getWidth(), 0.01); }
Content { public void setMinHeight(double minHeight){ if(0 > minHeight) { throw new IllegalArgumentException("min height can only be a positive number"); } this.minHeight = minHeight; refreshUp(); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testSetMinHeight() throws Exception { Content content = new TestContent(); content.setMinHeight(200); assertEquals(200, content.getHeight(), 0.01); }
Content { public final void refresh() { refreshUp(); refreshDown(); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testRefreshParent() throws Exception { TestingRefreshContent testingRefreshContent = new TestingRefreshContent(); assertEquals(0, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); Content content = new TestContent(); testingRefreshContent.setAsParent(content); content.refresh(); assertEquals(1, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); } @Test public void testRefreshChildren() throws Exception { TestingRefreshContent testingRefreshContent = new TestingRefreshContent(); assertEquals(0, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); Content content = new TestContent(); testingRefreshContent.setAsChildren(content); content.refresh(); assertEquals(0, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); }
Layout extends Content { public void add(Content content) { if(null == content) { throw new NullPointerException("Content can't be null"); } content.addParent(this); contents.add(content); refresh(); } void add(Content content); void remove(Content content); final Separator getSeparator(); final void setSeparator(Separator separator); boolean isEmpty(); void clear(); @Override final void draw(Graphics2D graphics); Point2D getLocation(Content content); }
@Test public void testAdd() throws Exception { }
Layout extends Content { public void remove(Content content) { if(null == content) { throw new NullPointerException("Content can't be null"); } content.removeParent(this); contents.remove(content); refresh(); } void add(Content content); void remove(Content content); final Separator getSeparator(); final void setSeparator(Separator separator); boolean isEmpty(); void clear(); @Override final void draw(Graphics2D graphics); Point2D getLocation(Content content); }
@Test public void testRemove() throws Exception { }
Layout extends Content { public final Separator getSeparator() { return separator; } void add(Content content); void remove(Content content); final Separator getSeparator(); final void setSeparator(Separator separator); boolean isEmpty(); void clear(); @Override final void draw(Graphics2D graphics); Point2D getLocation(Content content); }
@Test public void testGetSeparator() throws Exception { }
Layout extends Content { public final void setSeparator(Separator separator) { if(null==separator) { separator = Separator.EMPTY; } this.separator = separator; } void add(Content content); void remove(Content content); final Separator getSeparator(); final void setSeparator(Separator separator); boolean isEmpty(); void clear(); @Override final void draw(Graphics2D graphics); Point2D getLocation(Content content); }
@Test public void testSetSeparator() throws Exception { }
Layout extends Content { public Point2D getLocation(Content content) { int index = contents.indexOf(content); Point2D offset = new Point2D.Double(0,0); for(int i = 0; i < index; ++i) { offset = getNextOffset(offset, contents.get(i)); } return offset; } void add(Content content); void remove(Content content); final Separator getSeparator(); final void setSeparator(Separator separator); boolean isEmpty(); void clear(); @Override final void draw(Graphics2D graphics); Point2D getLocation(Content content); }
@Test public void testGetLocation() throws Exception { }
TextContent extends Content implements LineText.ChangeListener { @Override public void onChange() { refresh(); } TextContent(LineText text); @Override void onChange(); @Override void draw(Graphics2D graphics); Rectangle2D getMinimalBounds(); }
@Test public void testOnChange() throws Exception { SingleLineText singleLineText = new SingleLineText(); TextContent textContent = new TextContent(singleLineText); TestingRefreshContent testingRefreshContent = new TestingRefreshContent(); testingRefreshContent.setAsParent(textContent); assertEquals(0, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); singleLineText.setText("test"); assertEquals(1, testingRefreshContent.refreshUpCount); assertEquals(0, testingRefreshContent.refreshDownCount); }
Content { public boolean contains(Point2D point) { return getBounds().contains(point); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testContains() throws Exception { Content content = new TestContent(); content.setMinWidth(50); content.setMinHeight(20); assertTrue(content.contains(new Point(0,0))); assertTrue(content.contains(new Point(49,19))); assertTrue(content.contains(new Point(30,10))); assertTrue(content.contains(new Point(1,1))); assertTrue(content.contains(new Point(49,0))); assertFalse(content.contains(new Point(50,20))); assertFalse(content.contains(new Point(-1,0))); assertFalse(content.contains(new Point(100,100))); }
Content { public final Rectangle2D getBounds() { return new Rectangle2D.Double(getX(),getY(),getWidth(),getHeight()); } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testGetBounds() throws Exception { Content content = new TestContent(); content.setMinWidth(50); content.setMinHeight(20); assertEquals(0, content.getBounds().getX(), 0.01); assertEquals(0, content.getBounds().getY(), 0.01); assertEquals(50, content.getBounds().getWidth(), 0.01); assertEquals(20, content.getBounds().getHeight(), 0.01); }
Content { public final double getX() { return 0; } boolean contains(Point2D point); abstract void draw(Graphics2D graphics); final void draw(Graphics2D graphics, Point2D offset); final Rectangle2D getBounds(); Rectangle2D getMinimalBounds(); final double getX(); final double getY(); final double getWidth(); final double getHeight(); void setMinWidth(double minWidth); void setMinHeight(double minHeight); final void refresh(); }
@Test public void testGetX() throws Exception { Content content = new TestContent(); assertEquals(0, content.getX(), 0.01); }
ChainrFactory { public static Chainr fromClassPath( String chainrSpecClassPath ) { return fromClassPath( chainrSpecClassPath, null ); } static Chainr fromClassPath( String chainrSpecClassPath ); static Chainr fromClassPath( String chainrSpecClassPath, ChainrInstantiator chainrInstantiator ); static Chainr fromFileSystem( String chainrSpecFilePath ); static Chainr fromFileSystem( String chainrSpecFilePath, ChainrInstantiator chainrInstantiator ); static Chainr fromFile( File chainrSpecFile ); static Chainr fromFile( File chainrSpecFile, ChainrInstantiator chainrInstantiator ); }
@Test public void testGetChainrInstanceFromClassPath_success() throws Exception { Chainr result = ChainrFactory.fromClassPath( CLASSPATH + WELLFORMED_INPUT_FILENAME ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test( expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Unable to load JSON.*" ) public void testGetChainrInstanceFromClassPath_error() throws Exception { ChainrFactory.fromClassPath( CLASSPATH + MALFORMED_INPUT_FILENAME ); } @Test public void testGetChainrInstanceFromClassPathWithInstantiator_success() throws Exception { Chainr result = ChainrFactory.fromClassPath( CLASSPATH + WELLFORMED_INPUT_FILENAME, new DefaultChainrInstantiator() ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test( expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Unable to load JSON.*" ) public void testGetChainrInstanceFromClassPathWithInstantiator_error() throws Exception { ChainrFactory.fromClassPath( CLASSPATH + MALFORMED_INPUT_FILENAME, new DefaultChainrInstantiator() ); }
ChainrFactory { public static Chainr fromFile( File chainrSpecFile ) { return fromFile( chainrSpecFile, null ); } static Chainr fromClassPath( String chainrSpecClassPath ); static Chainr fromClassPath( String chainrSpecClassPath, ChainrInstantiator chainrInstantiator ); static Chainr fromFileSystem( String chainrSpecFilePath ); static Chainr fromFileSystem( String chainrSpecFilePath, ChainrInstantiator chainrInstantiator ); static Chainr fromFile( File chainrSpecFile ); static Chainr fromFile( File chainrSpecFile, ChainrInstantiator chainrInstantiator ); }
@Test public void testGetChainrInstanceFromFileWithInstantiator_success() throws Exception { Chainr result = ChainrFactory.fromFile( wellformedFile, new DefaultChainrInstantiator() ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test(expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Unable to load chainr spec file.*") public void testGetChainrInstanceFromFileWithInstantiator_error() throws Exception { ChainrFactory.fromFile( malformedFile, new DefaultChainrInstantiator() ); } @Test public void testGetChainrInstanceFromFile_success() throws Exception { Chainr result = ChainrFactory.fromFile( wellformedFile ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test( expectedExceptions = RuntimeException.class, expectedExceptionsMessageRegExp = "Unable to load chainr spec file.*" ) public void testGetChainrInstanceFromFile_error() throws Exception { ChainrFactory.fromFile( malformedFile ); }
Removr implements SpecDriven, Transform { @Override public Object transform( Object input ) { Map<String,Object> wrappedMap = new HashMap<>(); wrappedMap.put(ROOT_KEY, input); rootSpec.applyToMap( wrappedMap ); return input; } @Inject Removr( Object spec ); @Override Object transform( Object input ); }
@Test(dataProvider = "getTestCaseNames") public void runTestCases(String testCaseName) throws IOException { String testPath = "/json/removr/" + testCaseName; Map<String, Object> testUnit = JsonUtils.classpathToMap( testPath + ".json" ); Object input = testUnit.get( "input" ); Object spec = testUnit.get( "spec" ); Object expected = testUnit.get( "expected" ); Removr removr = new Removr( spec ); Object actual = removr.transform( input ); JoltTestUtil.runDiffy( "failed case " + testPath, expected, actual ); }
Chainr implements Transform, ContextualTransform { public static Chainr fromSpec( Object input ) { return new ChainrBuilder( input ).build(); } Chainr( List<JoltTransform> joltTransforms ); static Chainr fromSpec( Object input ); static Chainr fromSpec( Object input, ChainrInstantiator instantiator ); @Override Object transform( Object input, Map<String, Object> context ); @Override Object transform( Object input ); Object transform( int to, Object input ); Object transform( int to, Object input, Map<String, Object> context ); Object transform( int from, int to, Object input ); Object transform( int from, int to, Object input, Map<String, Object> context ); boolean hasContextualTransforms(); List<ContextualTransform> getContextualTransforms(); }
@Test(dataProvider = "failureSpecCases", expectedExceptions = SpecException.class) public void process_itBlowsUp_fromSpec(Object spec) { Chainr.fromSpec( spec ); Assert.fail("Should have failed during spec initialization."); }
CardinalityTransform implements SpecDriven, Transform { @Override public Object transform( Object input ) { rootSpec.apply( ROOT_KEY, Optional.of( input ), new WalkedPath(), null, null ); return input; } @Inject CardinalityTransform( Object spec ); @Override Object transform( Object input ); }
@Test (dataProvider = "getTestCaseUnits") public void runTestUnits(String testCaseName) throws IOException { String testPath = "/json/cardinality/" + testCaseName; Map<String, Object> testUnit = JsonUtils.classpathToMap( testPath + ".json" ); Object input = testUnit.get( "input" ); Object spec = testUnit.get( "spec" ); Object expected = testUnit.get( "expected" ); CardinalityTransform cardinalityTransform = new CardinalityTransform( spec ); Object actual = cardinalityTransform.transform( input ); JoltTestUtil.runDiffy( "failed case " + testPath, expected, actual ); } @Test public void testArrayCardinalityOne() throws IOException { Map<String, Object> input = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); }}; Map<String, Object> spec = new HashMap<String, Object>() {{ put("input", "ONE"); }}; Map<String, Object> expected = new HashMap<String, Object>() {{ put("input", 5); }}; CardinalityTransform cardinalityTransform = new CardinalityTransform(spec); Object actual = cardinalityTransform.transform(input); JoltTestUtil.runDiffy("failed array test", expected, actual); } @Test public void testArrayCardinalityMany() throws IOException { Map<String, Object> input = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); }}; Map<String, Object> spec = new HashMap<String, Object>() {{ put("input", "MANY"); }}; Map<String, Object> expected = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); }}; CardinalityTransform cardinalityTransform = new CardinalityTransform(spec); Object actual = cardinalityTransform.transform(input); JoltTestUtil.runDiffy("failed array test", expected, actual); }
StarRegexPathElement extends BasePathElement implements StarPathElement { @Override public MatchedElement match( String dataKey, WalkedPath walkedPath ) { Matcher matcher = pattern.matcher( dataKey ); if ( ! matcher.find() ) { return null; } int groupCount = matcher.groupCount(); List<String> subKeys = new ArrayList<>(groupCount); for ( int index = 1; index <= groupCount; index++) { subKeys.add( matcher.group( index ) ); } return new MatchedElement(dataKey, subKeys); } StarRegexPathElement( String key ); @Override boolean stringMatch( String literal ); @Override MatchedElement match( String dataKey, WalkedPath walkedPath ); @Override String getCanonicalForm(); }
@Test( dataProvider = "getStarPatternTests") public void starPatternTest( String testName, String spec, String dataKey, String expected1, String expected2 ) { StarPathElement star = new StarRegexPathElement( spec ); MatchedElement lpe = star.match( dataKey, null ); Assert.assertEquals( 3, lpe.getSubKeyCount() ); Assert.assertEquals( dataKey, lpe.getSubKeyRef( 0 ) ); Assert.assertEquals( expected1, lpe.getSubKeyRef( 1 ) ); Assert.assertEquals( expected2, lpe.getSubKeyRef( 2 ) ); } @Test public void mustMatchSomethingTest() { StarPathElement star = new StarRegexPathElement( "tuna-*-*"); Assert.assertNull( star.match( "tuna--", null ) ); Assert.assertNull( star.match( "tuna-bob-", null ) ); Assert.assertNull( star.match( "tuna--bob", null ) ); StarPathElement multiMetacharStarpathelement = new StarRegexPathElement( "rating-$-*-*"); Assert.assertNull( multiMetacharStarpathelement.match( "rating-capGrp1-capGrp2", null ) ); Assert.assertNull( multiMetacharStarpathelement.match( "rating-$capGrp1-capGrp2", null ) ); Assert.assertNotNull(multiMetacharStarpathelement.match( "rating-$-capGrp1-capGrp2",null) ); }
StarDoublePathElement extends BasePathElement implements StarPathElement { @Override public MatchedElement match(String dataKey, WalkedPath walkedPath) { if ( stringMatch( dataKey ) ) { List<String> subKeys = new ArrayList<>(2); int midStart = finMidIndex(dataKey); int midEnd = midStart + mid.length(); String firstStarPart = dataKey.substring( prefix.length(), midStart); subKeys.add( firstStarPart ); String secondStarPart = dataKey.substring( midEnd, dataKey.length() - suffix.length() ); subKeys.add( secondStarPart ); return new MatchedElement(dataKey, subKeys); } return null; } StarDoublePathElement(String key); @Override boolean stringMatch(String literal); @Override MatchedElement match(String dataKey, WalkedPath walkedPath); @Override String getCanonicalForm(); }
@Test public void testStarsInMiddleNonGreedy() { StarPathElement star = new StarDoublePathElement( "a*b*c" ); MatchedElement lpe = star.match( "abbccbccc", null ); Assert.assertEquals( "abbccbccc", lpe.getSubKeyRef( 0 ) ); Assert.assertEquals( "b", lpe.getSubKeyRef( 1 ) ); Assert.assertEquals( "ccbcc", lpe.getSubKeyRef( 2 ) ); Assert.assertEquals( 3, lpe.getSubKeyCount() ); }
DeepCopy { public static Object simpleDeepCopy( Object object ) { try { ByteArrayOutputStream bos = new ByteArrayOutputStream(); ObjectOutputStream oos = new ObjectOutputStream(bos); oos.writeObject(object); oos.flush(); oos.close(); bos.close(); byte [] byteData = bos.toByteArray(); ByteArrayInputStream bais = new ByteArrayInputStream(byteData); return new ObjectInputStream(bais).readObject(); } catch ( IOException ioe ) { throw new RuntimeException( "DeepCopy IOException", ioe ); } catch ( ClassNotFoundException cnf ) { throw new RuntimeException( "DeepCopy ClassNotFoundException", cnf ); } } static Object simpleDeepCopy( Object object ); }
@Test public void deepCopyTest() throws Exception { Object input = JsonUtils.classpathToObject( "/json/deepcopy/original.json" ); Map<String, Object> fiddle = (Map<String, Object>) DeepCopy.simpleDeepCopy( input ); JoltTestUtil.runDiffy( "Verify that the DeepCopy did in fact make a copy.", input, fiddle ); List array = (List) fiddle.get( "array" ); array.add( "c" ); array.set( 1, 3 ); Map<String,Object> subMap = (Map<String,Object>) fiddle.get( "map" ); subMap.put("c", "c"); subMap.put("b", 3 ); Object unmodified = JsonUtils.classpathToObject( "/json/deepcopy/original.json" ); JoltTestUtil.runDiffy( "Verify that the deepcopy was actually deep / input is unmodified", unmodified, input ); Object expectedModified = JsonUtils.classpathToObject( "/json/deepcopy/modifed.json" ); JoltTestUtil.runDiffy( "Verify fiddled post deepcopy object looks correct / was modifed.", expectedModified, fiddle ); }
JoltUtils { public static List<Object[]> listKeyChains(final Object source) { List<Object[]> keyChainList = new LinkedList<>(); if(source instanceof Map) { Map sourceMap = (Map) source; for (Object key: sourceMap.keySet()) { keyChainList.addAll(listKeyChains(key, sourceMap.get(key))); } } else if(source instanceof List) { List sourceList = (List) source; for(int i=0; i<sourceList.size(); i++) { keyChainList.addAll(listKeyChains(i, sourceList.get(i))); } } else { return Collections.emptyList(); } return keyChainList; } static void removeRecursive( Object json, String keyToRemove ); static T navigate( final Object source, final Object... paths ); static T navigateStrict( final Object source, final Object... paths ); static T navigateOrDefault( final T defaultValue, final Object source, final Object... paths ); @Deprecated static T navigateSafe(final T defaultValue, final Object source, final Object... paths); static boolean isVacantJson(final Object obj); static boolean isBlankJson(final Object obj); static List<Object[]> listKeyChains(final Object source); static List<Object[]> listKeyChains(final Object key, final Object value); static String toSimpleTraversrPath(Object[] paths); @SuppressWarnings("unchecked") static T cast(Object object); @SuppressWarnings("unchecked") static E[] cast(Object[] object); @SuppressWarnings("unchecked") static Object compactJson(Object source); @SuppressWarnings( "unchecked" ) static T store( Object source, T value, Object... paths ); @SuppressWarnings( "unchecked" ) static T remove( Object source, Object... paths ); }
@Test public void testListKeyChains() { List<Object[]> keyChains = JoltUtils.listKeyChains(jsonSource); for(int i=0; i<keyChains.size(); i++) { Object[] keyChain = keyChains.get(i); Object expected = flattenedValues[i]; Object actual = navigate(jsonSource, keyChain); Assert.assertEquals(actual, expected); } keyChains = JoltUtils.listKeyChains(jsonSource_empty); for(Object[] keyChain: keyChains) { Assert.assertTrue(isVacantJson(navigate(jsonSource_empty, keyChain))); } }
JoltUtils { public static String toSimpleTraversrPath(Object[] paths) { StringBuilder pathBuilder = new StringBuilder(); for(int i=0; i<paths.length; i++) { Object path = paths[i]; if(path instanceof Integer) { pathBuilder.append("[").append(((Integer) path).intValue()).append("]"); } else if(path instanceof String) { pathBuilder.append(path.toString()); } else{ throw new UnsupportedOperationException("Only Strings and Integers are supported as path element"); } if(!(i+1 == paths.length)) { pathBuilder.append("."); } } return pathBuilder.toString(); } static void removeRecursive( Object json, String keyToRemove ); static T navigate( final Object source, final Object... paths ); static T navigateStrict( final Object source, final Object... paths ); static T navigateOrDefault( final T defaultValue, final Object source, final Object... paths ); @Deprecated static T navigateSafe(final T defaultValue, final Object source, final Object... paths); static boolean isVacantJson(final Object obj); static boolean isBlankJson(final Object obj); static List<Object[]> listKeyChains(final Object source); static List<Object[]> listKeyChains(final Object key, final Object value); static String toSimpleTraversrPath(Object[] paths); @SuppressWarnings("unchecked") static T cast(Object object); @SuppressWarnings("unchecked") static E[] cast(Object[] object); @SuppressWarnings("unchecked") static Object compactJson(Object source); @SuppressWarnings( "unchecked" ) static T store( Object source, T value, Object... paths ); @SuppressWarnings( "unchecked" ) static T remove( Object source, Object... paths ); }
@Test(dataProvider = "pathProvider") public void testToSimpleTraversrPath(Object... paths) { String humanReadablePath = JoltUtils.toSimpleTraversrPath(paths); Assert.assertEquals(new SimpleTraversal<>(humanReadablePath).get(jsonSource).get(), navigate(jsonSource, paths)); }
StringTools { public static int countMatches(CharSequence sourceSequence, CharSequence subSequence) { if (isEmpty(sourceSequence) || isEmpty(subSequence) || sourceSequence.length() < subSequence.length()) { return 0; } int count = 0; int sourceSequenceIndex = 0; int subSequenceIndex = 0; while(sourceSequenceIndex < sourceSequence.length()) { if(sourceSequence.charAt(sourceSequenceIndex) == subSequence.charAt(subSequenceIndex)) { sourceSequenceIndex++; subSequenceIndex++; while(sourceSequenceIndex < sourceSequence.length() && subSequenceIndex < subSequence.length()) { if(sourceSequence.charAt(sourceSequenceIndex) != subSequence.charAt(subSequenceIndex)) { break; } sourceSequenceIndex++; subSequenceIndex++; } if(subSequenceIndex == subSequence.length()) { count++; } subSequenceIndex = 0; continue; } sourceSequenceIndex++; } return count; } static int countMatches(CharSequence sourceSequence, CharSequence subSequence); static boolean isNotBlank(CharSequence sourceSequence); static boolean isBlank(CharSequence sourceSequence); static boolean isEmpty(CharSequence sourceSequence); }
@Test(dataProvider = "testCaseGenerator") public void testCountMatches(String str, String subStr) throws Exception { Assert.assertEquals( StringTools.countMatches(str, subStr), StringTools.countMatches(str, subStr), "test failed: \nstr=\"" + str + "\"\nsubStr=\"" + subStr + "\"" ); }
StringTools { public static boolean isNotBlank(CharSequence sourceSequence) { return !isBlank(sourceSequence); } static int countMatches(CharSequence sourceSequence, CharSequence subSequence); static boolean isNotBlank(CharSequence sourceSequence); static boolean isBlank(CharSequence sourceSequence); static boolean isEmpty(CharSequence sourceSequence); }
@Test public void testIsNotBlank() throws Exception { Assert.assertTrue(StringTools.isNotBlank(" a a ")); Assert.assertTrue(StringTools.isNotBlank("a a")); Assert.assertTrue(StringTools.isNotBlank(" a ")); Assert.assertTrue(StringTools.isNotBlank("a")); Assert.assertFalse(StringTools.isNotBlank(" ")); Assert.assertFalse(StringTools.isNotBlank(" ")); Assert.assertFalse(StringTools.isNotBlank("")); Assert.assertFalse(StringTools.isNotBlank(null)); }
StringTools { public static boolean isBlank(CharSequence sourceSequence) { int sequenceLength; if (sourceSequence == null || (sequenceLength = sourceSequence.length()) == 0) { return true; } for (int i = 0; i < sequenceLength; i++) { if ((Character.isWhitespace(sourceSequence.charAt(i)) == false)) { return false; } } return true; } static int countMatches(CharSequence sourceSequence, CharSequence subSequence); static boolean isNotBlank(CharSequence sourceSequence); static boolean isBlank(CharSequence sourceSequence); static boolean isEmpty(CharSequence sourceSequence); }
@Test public void testIsBlank() throws Exception { Assert.assertFalse(StringTools.isBlank(" a a ")); Assert.assertFalse(StringTools.isBlank("a a")); Assert.assertFalse(StringTools.isBlank(" a ")); Assert.assertFalse(StringTools.isBlank("a")); Assert.assertTrue(StringTools.isBlank(" ")); Assert.assertTrue(StringTools.isBlank(" ")); Assert.assertTrue(StringTools.isBlank("")); Assert.assertTrue(StringTools.isBlank(null)); }
StringTools { public static boolean isEmpty(CharSequence sourceSequence) { return sourceSequence == null || sourceSequence.length() == 0; } static int countMatches(CharSequence sourceSequence, CharSequence subSequence); static boolean isNotBlank(CharSequence sourceSequence); static boolean isBlank(CharSequence sourceSequence); static boolean isEmpty(CharSequence sourceSequence); }
@Test public void testIsEmpty() throws Exception { Assert.assertTrue(StringTools.isEmpty("")); Assert.assertTrue(StringTools.isEmpty(null)); Assert.assertFalse(StringTools.isEmpty(" ")); }
Shiftr implements SpecDriven, Transform { @Override public Object transform( Object input ) { Map<String,Object> output = new HashMap<>(); MatchedElement rootLpe = new MatchedElement( ROOT_KEY ); WalkedPath walkedPath = new WalkedPath(); walkedPath.add( input, rootLpe ); rootSpec.apply( ROOT_KEY, Optional.of( input ), walkedPath, output, null ); return output.get( ROOT_KEY ); } @Inject Shiftr( Object spec ); @Override Object transform( Object input ); }
@Test(dataProvider = "getTestCaseUnits") public void runTestUnits(String testCaseName) throws IOException { String testPath = "/json/shiftr/" + testCaseName; Map<String, Object> testUnit = JsonUtils.classpathToMap( testPath + ".json" ); Object input = testUnit.get( "input" ); Object spec = testUnit.get( "spec" ); Object expected = testUnit.get( "expected" ); Shiftr shiftr = new Shiftr( spec ); Object actual = shiftr.transform( input ); JoltTestUtil.runDiffy( "failed case " + testPath, expected, actual ); }
SimpleTraversal { public Optional<DataType> get( Object tree ) { return (Optional<DataType>) traversr.get( tree, keys ); } SimpleTraversal( String humanReadablePath ); static SimpleTraversal<T> newTraversal(String humanReadablePath); Optional<DataType> get( Object tree ); Optional<DataType> set( Object tree, DataType data ); Optional<DataType> remove( Object tree ); }
@Test( dataProvider = "inAndOutTestCases") public void getTests( String testDescription, SimpleTraversal simpleTraversal, Object ignoredForTest, Object input, String expected ) throws IOException { Object original = JsonUtils.cloneJson( input ); Object tree = JsonUtils.cloneJson( input ); Optional actual = simpleTraversal.get( tree ); Assert.assertEquals( expected, actual.get() ); JoltTestUtil.runDiffy( "Get should not have modified the input", original, tree ); }
Defaultr implements SpecDriven, Transform { @Override public Object transform( Object input ) { if ( input == null ) { input = new HashMap(); } if ( input instanceof List ) { if ( arrayRoot == null ) { throw new TransformException( "The Spec provided can not handle input that is a top level Json Array." ); } arrayRoot.applyChildren( input ); } else { mapRoot.applyChildren( input ); } return input; } @Inject Defaultr( Object spec ); @Override Object transform( Object input ); }
@Test(dataProvider = "getDiffyTestCases" ) public void runDiffyTests( String testCaseName ) throws IOException { String testPath = "/json/defaultr/" + testCaseName; Map<String, Object> testUnit = JsonUtils.classpathToMap( testPath + ".json" ); Object input = testUnit.get( "input" ); Object spec = testUnit.get( "spec" ); Object expected = testUnit.get( "expected" ); Defaultr defaultr = new Defaultr(spec); Object actual = defaultr.transform( input ); JoltTestUtil.runDiffy( "failed case " + testPath, expected, actual ); } @Test public void deepCopyTest() throws IOException { Map<String, Object> testUnit = JsonUtils.classpathToMap( "/json/defaultr/__deepCopyTest.json" ); Object spec = testUnit.get( "spec" ); Defaultr defaultr = new Defaultr(spec); { Object input = testUnit.get( "input" ); Map<String, Object> fiddle = (Map<String, Object>) defaultr.transform( input ); List array = (List) fiddle.get( "array" ); array.add("a"); Map<String,Object> subMap = (Map<String,Object>) fiddle.get( "map" ); subMap.put("c", "c"); } { Map<String, Object> testUnit2 = JsonUtils.classpathToMap( "/json/defaultr/__deepCopyTest.json" ); Object input = testUnit2.get( "input" ); Object expected = testUnit2.get( "expected" ); Object actual = defaultr.transform( input ); JoltTestUtil.runDiffy( "Same spec deepcopy fail.", expected, actual ); } }
Sortr implements Transform { @Override public Object transform( Object input ) { return sortJson( input ); } @Override Object transform( Object input ); @SuppressWarnings( "unchecked" ) static Object sortJson( Object obj ); }
@Test(dataProvider = "getTestCaseNames") public void runTestCases(String testCaseName) throws IOException { if ("".equals( testCaseName )) { return; } String testPath = "/json/sortr/"+testCaseName; Map<String, Object> input = JsonUtils.classpathToMap(testPath + "/input.json"); Map<String, Object> expected = JsonUtils.classpathToMap( testPath + "/output.json" ); Sortr sortr = new Sortr(); Map<String, Object> actual = (Map<String, Object>) sortr.transform( input ); JoltTestUtil.runDiffy( "Make sure it is still the same object : " + testPath, expected, actual ); String orderErrorMessage = verifyOrder( actual, expected ); Assert.assertNull( orderErrorMessage, orderErrorMessage ); }
Sortr implements Transform { @SuppressWarnings( "unchecked" ) public static Object sortJson( Object obj ) { if ( obj instanceof Map ) { return sortMap( (Map<String, Object>) obj ); } else if ( obj instanceof List ) { return ordered( (List<Object>) obj ); } else { return obj; } } @Override Object transform( Object input ); @SuppressWarnings( "unchecked" ) static Object sortJson( Object obj ); }
@Test public void testDoesNotBlowUpOnUnmodifiableArray() { List<Object> hasNan = new ArrayList<>(); hasNan.add( 1 ); hasNan.add( Double.NaN ); hasNan.add( 2 ); Map<String,Object> map = new HashMap<>(); map.put("a", "shouldBeFirst"); map.put("hasNan", Collections.unmodifiableList( hasNan ) ); try { Sortr.sortJson( map ); } catch( UnsupportedOperationException uoe ) { Assert.fail( "Sort threw a UnsupportedOperationException" ); } }
Modifier implements SpecDriven, ContextualTransform { @Override public Object transform( final Object input, final Map<String, Object> context ) { Map<String, Object> contextWrapper = new HashMap<>( ); contextWrapper.put( ROOT_KEY, context ); MatchedElement rootLpe = new MatchedElement( ROOT_KEY ); WalkedPath walkedPath = new WalkedPath(); walkedPath.add( input, rootLpe ); rootSpec.apply( ROOT_KEY, Optional.of( input), walkedPath, null, contextWrapper ); return input; } @SuppressWarnings( "unchecked" ) private Modifier( Object spec, OpMode opMode, Map<String, Function> functionsMap ); @Override Object transform( final Object input, final Map<String, Object> context ); }
@Test public void testModifierFirstElementArray() throws IOException { Map<String, Object> input = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); }}; Map<String, Object> spec = new HashMap<String, Object>() {{ put("first", "=firstElement(@(1,input))"); }}; Map<String, Object> expected = new HashMap<String, Object>() {{ put("input", new Integer[]{5, 4}); put("first", 5); }}; Modifier modifier = new Modifier.Overwritr( spec ); Object actual = modifier.transform( input, null ); JoltTestUtil.runArrayOrderObliviousDiffy( "failed modifierFirstElementArray", expected, actual ); }
Math { public static Optional<Number> abs( Object arg ) { if(arg instanceof Integer) { return Optional.<Number>of( java.lang.Math.abs( (Integer) arg )); } else if(arg instanceof Double) { return Optional.<Number>of( java.lang.Math.abs( (Double) arg )); } else if(arg instanceof Long) { return Optional.<Number>of( java.lang.Math.abs( (Long) arg )); } else if(arg instanceof String) { return abs( Objects.toNumber( arg ).get() ); } return Optional.empty(); } static Optional<Number> max( List<Object> args ); static Optional<Number> min( List<Object> args ); static Optional<Number> abs( Object arg ); static Optional<Double> avg(List<Object> args); static Optional<Integer> intSum(List<Object> args); static Optional<Double> doubleSum(List<Object> args); static Optional<Long> longSum(List<Object> args); static Optional<Integer> intSubtract(List<Object> argList); static Optional<Double> doubleSubtract(List<Object> argList); static Optional<Long> longSubtract(List<Object> argList); static Optional<Double> divide(List<Object> argList); static Optional<Double> divideAndRound(List<Object> argList, int digitsAfterDecimalPoint ); }
@Test @SuppressWarnings( "all" ) public void testNitPicks() { Object c = (1.0 > 2 ? 1.0 : 2); assert c.getClass() == Double.class && c.equals( 2.0 ); assert toNumber("123").equals( Optional.of( 123 ) ); assert toNumber("123123123123123123").equals( Optional.of( 123123123123123123l ) ); assert toNumber("123123123123123123123123123123123123").equals( Optional.of( 123123123123123123123123123123123123d ) ); assert abs( "-123" ).equals( Optional.of( 123 )); assert abs("-123123123123123123").equals( Optional.of( 123123123123123123l ) ); assert abs("-123123123123123123123123123123123123").equals( Optional.of( 123123123123123123123123123123123123d ) ); }
JsonUtils { @Deprecated public static void removeRecursive( Object json, String keyToRemove ) { if ( ( json == null ) || ( keyToRemove == null ) ) { return; } if ( json instanceof Map ) { @SuppressWarnings("unchecked") Map<String, Object> jsonMap = (Map<String, Object>) json; if ( jsonMap.containsKey( keyToRemove ) ) { jsonMap.remove( keyToRemove ); } for ( Object value : jsonMap.values() ) { removeRecursive( value, keyToRemove ); } } if ( json instanceof List ) { for ( Object value : (List) json ) { removeRecursive( value, keyToRemove ); } } } static JsonUtil customJsonUtil( ObjectMapper mapper ); @Deprecated static void removeRecursive( Object json, String keyToRemove ); static Map<String, Object> javason( String javason ); static JsonUtil getDefaultJsonUtil(); static Object jsonToObject( String json ); static Object jsonToObject( String json, String charset ); static Object jsonToObject( InputStream in ); static Map<String, Object> jsonToMap( String json ); static Map<String, Object> jsonToMap( String json, String charset ); static Map<String, Object> jsonToMap( InputStream in ); static List<Object> jsonToList( String json ); static List<Object> jsonToList( String json, String charset ); static List<Object> jsonToList( InputStream in ); static Object filepathToObject( String filePath ); static Map<String, Object> filepathToMap( String filePath ); static List<Object> filepathToList( String filePath ); static Object classpathToObject( String classPath ); static Map<String, Object> classpathToMap( String classPath ); static List<Object> classpathToList( String classPath ); static T classpathToType( String classPath, TypeReference<T> typeRef ); static T classpathToType( String classPath, Class<T> aClass ); static T stringToType( String json, TypeReference<T> typeRef ); static T stringToType( String json, Class<T> aClass ); static T fileToType( String filePath, TypeReference<T> typeRef ); static T fileToType( String filePath, Class<T> aClass ); static T streamToType( InputStream in, TypeReference<T> typeRef ); static T streamToType( InputStream in, Class<T> aClass ); @Deprecated static T jsonTo( String json, TypeReference<T> typeRef ); @Deprecated static T jsonTo( InputStream in, TypeReference<T> typeRef ); static String toJsonString( Object obj ); static String toPrettyJsonString( Object obj ); static Object cloneJson( Object obj ); @SuppressWarnings("unchecked") @Deprecated static T navigate(Object source, Object... paths); }
@Test(dataProvider = "removeRecursiveCases") @SuppressWarnings("deprecation") public void testRemoveRecursive(Object json, String key, Object expected) throws IOException { JsonUtils.removeRecursive( json, key ); Diffy.Result result = diffy.diff( expected, json ); if (!result.isEmpty()) { Assert.fail( "Failed.\nhere is a diff:\nexpected: " + JsonUtils.toJsonString( result.expected ) + "\n actual: " + JsonUtils.toJsonString( result.actual ) ); } }
JsonUtils { public static Map<String, Object> jsonToMap( String json ) { return util.jsonToMap( json ); } static JsonUtil customJsonUtil( ObjectMapper mapper ); @Deprecated static void removeRecursive( Object json, String keyToRemove ); static Map<String, Object> javason( String javason ); static JsonUtil getDefaultJsonUtil(); static Object jsonToObject( String json ); static Object jsonToObject( String json, String charset ); static Object jsonToObject( InputStream in ); static Map<String, Object> jsonToMap( String json ); static Map<String, Object> jsonToMap( String json, String charset ); static Map<String, Object> jsonToMap( InputStream in ); static List<Object> jsonToList( String json ); static List<Object> jsonToList( String json, String charset ); static List<Object> jsonToList( InputStream in ); static Object filepathToObject( String filePath ); static Map<String, Object> filepathToMap( String filePath ); static List<Object> filepathToList( String filePath ); static Object classpathToObject( String classPath ); static Map<String, Object> classpathToMap( String classPath ); static List<Object> classpathToList( String classPath ); static T classpathToType( String classPath, TypeReference<T> typeRef ); static T classpathToType( String classPath, Class<T> aClass ); static T stringToType( String json, TypeReference<T> typeRef ); static T stringToType( String json, Class<T> aClass ); static T fileToType( String filePath, TypeReference<T> typeRef ); static T fileToType( String filePath, Class<T> aClass ); static T streamToType( InputStream in, TypeReference<T> typeRef ); static T streamToType( InputStream in, Class<T> aClass ); @Deprecated static T jsonTo( String json, TypeReference<T> typeRef ); @Deprecated static T jsonTo( InputStream in, TypeReference<T> typeRef ); static String toJsonString( Object obj ); static String toPrettyJsonString( Object obj ); static Object cloneJson( Object obj ); @SuppressWarnings("unchecked") @Deprecated static T navigate(Object source, Object... paths); }
@Test public void validateJacksonClosesInputStreams() { final Set<String> closedSet = new HashSet<>(); ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream( "{ \"a\" : \"b\" }".getBytes() ) { @Override public void close() throws IOException { closedSet.add("closed"); super.close(); } }; Map<String,Object> map = JsonUtils.jsonToMap( byteArrayInputStream ); Assert.assertNotNull( map ); Assert.assertEquals( 1, map.size() ); Assert.assertEquals( 1, closedSet.size() ); }
JsonUtils { @SuppressWarnings("unchecked") @Deprecated public static <T> T navigate(Object source, Object... paths) throws NullPointerException, UnsupportedOperationException { Object destination = source; for (Object path : paths) { if(destination == null) throw new NullPointerException("Navigation not possible on null object"); if(destination instanceof Map) destination = ((Map) destination).get(path); else if(path instanceof Integer && destination instanceof List) destination = ((List) destination).get((Integer)path); else throw new UnsupportedOperationException("Navigation supports only Map and List source types and non-null String and Integer path types"); } return (T) destination; } static JsonUtil customJsonUtil( ObjectMapper mapper ); @Deprecated static void removeRecursive( Object json, String keyToRemove ); static Map<String, Object> javason( String javason ); static JsonUtil getDefaultJsonUtil(); static Object jsonToObject( String json ); static Object jsonToObject( String json, String charset ); static Object jsonToObject( InputStream in ); static Map<String, Object> jsonToMap( String json ); static Map<String, Object> jsonToMap( String json, String charset ); static Map<String, Object> jsonToMap( InputStream in ); static List<Object> jsonToList( String json ); static List<Object> jsonToList( String json, String charset ); static List<Object> jsonToList( InputStream in ); static Object filepathToObject( String filePath ); static Map<String, Object> filepathToMap( String filePath ); static List<Object> filepathToList( String filePath ); static Object classpathToObject( String classPath ); static Map<String, Object> classpathToMap( String classPath ); static List<Object> classpathToList( String classPath ); static T classpathToType( String classPath, TypeReference<T> typeRef ); static T classpathToType( String classPath, Class<T> aClass ); static T stringToType( String json, TypeReference<T> typeRef ); static T stringToType( String json, Class<T> aClass ); static T fileToType( String filePath, TypeReference<T> typeRef ); static T fileToType( String filePath, Class<T> aClass ); static T streamToType( InputStream in, TypeReference<T> typeRef ); static T streamToType( InputStream in, Class<T> aClass ); @Deprecated static T jsonTo( String json, TypeReference<T> typeRef ); @Deprecated static T jsonTo( InputStream in, TypeReference<T> typeRef ); static String toJsonString( Object obj ); static String toPrettyJsonString( Object obj ); static Object cloneJson( Object obj ); @SuppressWarnings("unchecked") @Deprecated static T navigate(Object source, Object... paths); }
@Test (dataProvider = "coordinates") @SuppressWarnings("deprecation") public void navigator(Object expected, Object[] path) throws Exception { Object actual = JsonUtils.navigate(jsonSource, path); Assert.assertEquals(actual, expected); }
ArrayOrderObliviousDiffy extends Diffy { public ArrayOrderObliviousDiffy(JsonUtil jsonUtil) { super(jsonUtil); } ArrayOrderObliviousDiffy(JsonUtil jsonUtil); ArrayOrderObliviousDiffy(); }
@Test(dataProvider = "testCases") public void ArrayOrderObliviousDiffy(String testCase) throws Exception { Object expected = JsonUtils.classpathToObject("/jsonUtils/" + testCase + "/expected.json"); Object actual = JsonUtils.classpathToObject("/jsonUtils/" + testCase + "/actual.json"); Diffy.Result result = unit.diff(expected, actual); Assert.assertTrue(result.isEmpty(), result.toString()); }
JoltCli { protected static boolean runJolt( String[] args ) { ArgumentParser parser = ArgumentParsers.newArgumentParser( "jolt" ); Subparsers subparsers = parser.addSubparsers().help( "transform: given a Jolt transform spec, runs the specified transforms on the input data.\n" + "diffy: diff two JSON documents.\n" + "sort: sort a JSON document alphabetically for human readability." ); for ( Map.Entry<String, JoltCliProcessor> entry : JOLT_CLI_PROCESSOR_MAP.entrySet() ) { entry.getValue().intializeSubCommand( subparsers ); } Namespace ns; try { ns = parser.parseArgs( args ); } catch ( ArgumentParserException e ) { parser.handleError( e ); return false; } JoltCliProcessor joltToolProcessor = JOLT_CLI_PROCESSOR_MAP.get( args[0] ); if ( joltToolProcessor != null ) { return joltToolProcessor.process( ns ); } else { return false; } } static void main( String[] args ); }
@Test public void testRunJolt() throws IOException { String path = System.getProperty( "user.dir" ); if ( path.endsWith( "cli" ) ) { path += " } else { path += " } Assert.assertTrue( JoltCli.runJolt( new String[] {"diffy", path + "input1.json", path + "input1.json", "-s"} ) ); Assert.assertFalse( JoltCli.runJolt( new String[] {"diffy", path + "input1.json", path + "input2.json", "-s"} ) ); Assert.assertTrue( JoltCli.runJolt( new String[] {"sort", path + "input1.json"} ) ); Assert.assertTrue( JoltCli.runJolt( new String[] {"transform", path + "spec.json", path + "transformInput.json"} ) ); }
ChainrFactory { public static Chainr fromFileSystem( String chainrSpecFilePath ) { return fromFileSystem( chainrSpecFilePath, null ); } static Chainr fromClassPath( String chainrSpecClassPath ); static Chainr fromClassPath( String chainrSpecClassPath, ChainrInstantiator chainrInstantiator ); static Chainr fromFileSystem( String chainrSpecFilePath ); static Chainr fromFileSystem( String chainrSpecFilePath, ChainrInstantiator chainrInstantiator ); static Chainr fromFile( File chainrSpecFile ); static Chainr fromFile( File chainrSpecFile, ChainrInstantiator chainrInstantiator ); }
@Test public void testGetChainrInstanceFromFileSystem_success() throws Exception { Chainr result = ChainrFactory.fromFileSystem( fileSystemPath + WELLFORMED_INPUT_FILENAME ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test( expectedExceptions = JsonUnmarshalException.class, expectedExceptionsMessageRegExp = "Unable to unmarshal JSON.*" ) public void testGetChainrInstanceFromFileSystem_error() throws Exception { ChainrFactory.fromFileSystem( fileSystemPath + MALFORMED_INPUT_FILENAME ); } @Test public void testGetChainrInstanceFromFileSystemWithInstantiator_success() throws Exception { Chainr result = ChainrFactory.fromFileSystem( fileSystemPath + WELLFORMED_INPUT_FILENAME, new DefaultChainrInstantiator() ); Assert.assertNotNull( result, "ChainrFactory did not return an instance of Chainr." ); } @Test(expectedExceptions = JsonUnmarshalException.class, expectedExceptionsMessageRegExp = "Unable to unmarshal JSON.*") public void testGetChainrInstanceFromFileSystemWithInstantiator_error() throws Exception { ChainrFactory.fromFileSystem( fileSystemPath + MALFORMED_INPUT_FILENAME, new DefaultChainrInstantiator() ); }
LeastAdjustmentMetaServerAssigningStrategy implements MetaServerAssigningStrategy { @Override public Assignment<String> assign(List<Server> metaServers, List<Topic> topics, Assignment<String> originAssignments) { Assignment<String> newAssignments = new Assignment<>(); if (metaServers == null || metaServers.isEmpty() || topics == null || topics.isEmpty()) { return newAssignments; } if (originAssignments == null) { originAssignments = new Assignment<>(); } Set<String> topicNames = new HashSet<>(); for (Topic topic : topics) { topicNames.add(topic.getName()); } Map<String, Server> currentMetaServers = new HashMap<>(); for (Server server : metaServers) { currentMetaServers.put(server.getId(), server); } Map<String, List<String>> originMetaServerToTopic = mapMetaServerToTopics(topicNames, currentMetaServers, originAssignments); List<String> freeTopics = findFreeTopics(topics, originMetaServerToTopic); Map<String, List<String>> newAssins = m_assignBalancer.assign(originMetaServerToTopic, freeTopics); for (Entry<String, List<String>> entry : newAssins.entrySet()) { putAssignToResult(newAssignments, currentMetaServers, entry.getKey(), entry.getValue()); } return newAssignments; } @Override Assignment<String> assign(List<Server> metaServers, List<Topic> topics, Assignment<String> originAssignments); }
@Test public void AddAndDeleteMetaServerTest() { LeastAdjustmentMetaServerAssigningStrategy strategy = new LeastAdjustmentMetaServerAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentMetaServerAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Server> metaServers = new ArrayList<>(); metaServers.add(new Server("a").setPort(1)); metaServers.add(new Server("c").setPort(1)); metaServers.add(new Server("d").setPort(1)); metaServers.add(new Server("e").setPort(1)); List<Topic> topics = new ArrayList<>(); for (int i = 0; i < 6; i++) { topics.add(new Topic(String.valueOf(i))); } Assignment<String> originAssignments = new Assignment<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); originAssignments.addAssignment("0", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("1", new HashMap<String, ClientContext>() { { put("b", b); } }); originAssignments.addAssignment("2", new HashMap<String, ClientContext>() { { put("c", c); } }); originAssignments.addAssignment("3", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("4", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("5", new HashMap<String, ClientContext>() { { put("b", b); } }); Assignment<String> newAssigns = strategy.assign(metaServers, topics, originAssignments); System.out.println("AddAndDeleteMetaServerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current MetaServers:"); for (Server server : metaServers) { System.out.println(server.getId()); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : originAssignments.getAssignments().entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : newAssigns.getAssignments().entrySet()) { System.out.println(assign); } normalAssert(metaServers, topics, newAssigns); Set<String> inuseMetaServers = new HashSet<>(); for (Entry<String, Map<String, ClientContext>> topicAssign : newAssigns.getAssignments().entrySet()) { String metaServer = topicAssign.getValue().entrySet().iterator().next().getKey(); inuseMetaServers.add(metaServer); } assertFalse(inuseMetaServers.contains("b")); assertTrue(inuseMetaServers.contains("d")); assertTrue(inuseMetaServers.contains("e")); } @Test public void AddAndDeleteTopicTest() { LeastAdjustmentMetaServerAssigningStrategy strategy = new LeastAdjustmentMetaServerAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentMetaServerAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Server> metaServers = new ArrayList<>(); metaServers.add(new Server("a").setPort(1)); metaServers.add(new Server("b").setPort(1)); metaServers.add(new Server("c").setPort(1)); List<Topic> topics = new ArrayList<>(); for (int i = 0; i < 4; i++) { topics.add(new Topic(String.valueOf(i))); } topics.add(new Topic("10")); topics.add(new Topic("11")); topics.add(new Topic("12")); Assignment<String> originAssignments = new Assignment<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); originAssignments.addAssignment("0", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("1", new HashMap<String, ClientContext>() { { put("b", b); } }); originAssignments.addAssignment("2", new HashMap<String, ClientContext>() { { put("c", c); } }); originAssignments.addAssignment("3", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("4", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("5", new HashMap<String, ClientContext>() { { put("b", b); } }); Assignment<String> newAssigns = strategy.assign(metaServers, topics, originAssignments); System.out.println("AddAndDeleteTopicTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Topics:"); for (Topic topic : topics) { System.out.println(topic.getName()); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : originAssignments.getAssignments().entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : newAssigns.getAssignments().entrySet()) { System.out.println(assign); } normalAssert(metaServers, topics, newAssigns); assertTrue(newAssigns.getAssignment("5") == null); assertTrue(newAssigns.getAssignment("10") != null); assertTrue(newAssigns.getAssignment("11") != null); assertTrue(newAssigns.getAssignment("12") != null); } @Test public void AddAndDeleteTopicAndMetaServerTest() { LeastAdjustmentMetaServerAssigningStrategy strategy = new LeastAdjustmentMetaServerAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentMetaServerAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Server> metaServers = new ArrayList<>(); metaServers.add(new Server("a").setPort(1)); metaServers.add(new Server("c").setPort(1)); metaServers.add(new Server("d").setPort(1)); metaServers.add(new Server("e").setPort(1)); List<Topic> topics = new ArrayList<>(); for (int i = 0; i < 4; i++) { topics.add(new Topic(String.valueOf(i))); } topics.add(new Topic("10")); topics.add(new Topic("11")); topics.add(new Topic("12")); Assignment<String> originAssignments = new Assignment<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); originAssignments.addAssignment("0", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("1", new HashMap<String, ClientContext>() { { put("b", b); } }); originAssignments.addAssignment("2", new HashMap<String, ClientContext>() { { put("c", c); } }); originAssignments.addAssignment("3", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("4", new HashMap<String, ClientContext>() { { put("a", a); } }); originAssignments.addAssignment("5", new HashMap<String, ClientContext>() { { put("b", b); } }); Assignment<String> newAssigns = strategy.assign(metaServers, topics, originAssignments); System.out.println("AddAndDeleteTopicAndMetaServerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current MetaServers:"); for (Server server : metaServers) { System.out.print(server.getId() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Current Topics:"); for (Topic topic : topics) { System.out.print(topic.getName() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : originAssignments.getAssignments().entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<String, Map<String, ClientContext>> assign : newAssigns.getAssignments().entrySet()) { System.out.println(assign); } normalAssert(metaServers, topics, newAssigns); Set<String> inuseMetaServers = new HashSet<>(); for (Entry<String, Map<String, ClientContext>> topicAssign : newAssigns.getAssignments().entrySet()) { String metaServer = topicAssign.getValue().entrySet().iterator().next().getKey(); inuseMetaServers.add(metaServer); } assertFalse(inuseMetaServers.contains("b")); assertTrue(inuseMetaServers.contains("d")); assertTrue(inuseMetaServers.contains("e")); assertTrue(newAssigns.getAssignment("5") == null); assertTrue(newAssigns.getAssignment("10") != null); assertTrue(newAssigns.getAssignment("11") != null); assertTrue(newAssigns.getAssignment("12") != null); }
LeastAdjustmentBrokerPartitionAssigningStrategy implements BrokerPartitionAssigningStrategy { @Override public Map<String, Assignment<Integer>> assign(Map<String, ClientContext> brokers, List<Topic> topics, Map<String, Assignment<Integer>> originAssignments) { Map<String, Assignment<Integer>> newAssignments = new HashMap<String, Assignment<Integer>>(); if (brokers == null || brokers.isEmpty() || topics == null || topics.isEmpty()) { return newAssignments; } Map<String, List<Pair<String, Integer>>> avalibleOriginAssignments = new HashMap<String, List<Pair<String, Integer>>>(); List<Pair<String, Integer>> freeTps = new ArrayList<Pair<String, Integer>>(); for (String broker : brokers.keySet()) { avalibleOriginAssignments.put(broker, new ArrayList<Pair<String, Integer>>()); } for (Topic topic : topics) { if (originAssignments.containsKey(topic.getName())) { Assignment<Integer> topicOriginAssignments = originAssignments.get(topic.getName()); for (Partition partition : topic.getPartitions()) { Map<String, ClientContext> originTPAssianment = topicOriginAssignments.getAssignment(partition.getId()); if (originTPAssianment != null) { if (originTPAssianment.size() != 1) { log.error("TP have more than one broker assigned"); } ClientContext broker = originTPAssianment.values().iterator().next(); if (brokers.containsKey(broker.getName())) { avalibleOriginAssignments.get(broker.getName()).add( new Pair<String, Integer>(topic.getName(), partition.getId())); continue; } } freeTps.add(new Pair<String, Integer>(topic.getName(), partition.getId())); } } else { for (Partition partition : topic.getPartitions()) { freeTps.add(new Pair<String, Integer>(topic.getName(), partition.getId())); } } } Map<String, List<Pair<String, Integer>>> newAssigns = m_assignBalancer.assign(avalibleOriginAssignments, freeTps); for (Entry<String, List<Pair<String, Integer>>> assign : newAssigns.entrySet()) { for (Pair<String, Integer> tp : assign.getValue()) { if (!newAssignments.containsKey(tp.getKey())) { newAssignments.put(tp.getKey(), new Assignment<Integer>()); } Map<String, ClientContext> broker = new HashMap<String, ClientContext>(); broker.put(assign.getKey(), brokers.get(assign.getKey())); newAssignments.get(tp.getKey()).addAssignment(tp.getValue(), broker); } } return newAssignments; } @Override Map<String, Assignment<Integer>> assign(Map<String, ClientContext> brokers, List<Topic> topics, Map<String, Assignment<Integer>> originAssignments); }
@Test public void AddAndDeleteTPTest() { LeastAdjustmentBrokerPartitionAssigningStrategy strategy = new LeastAdjustmentBrokerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentBrokerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); Map<String, ClientContext> brokers = generateBrokers(5, "b"); Map<String, Assignment<Integer>> originAssignments = new HashMap<>(); Map<String, Topic> topicMap = generateTopicsWithIncrementalPartitionCount(10, "t"); List<Topic> topics = new ArrayList<>(topicMap.values()); int brokerPos = 0; int brokerListSize = brokers.size(); for (int i = 1; i <= 10; i++) { originAssignments.put("t" + i, new Assignment<Integer>()); Assignment<Integer> assignment = originAssignments.get("t" + i); for (int j = 0; j < i; j++) { Map<String, ClientContext> brokerAssign = new HashMap<>(); brokerAssign.put("b" + (brokerPos + 1), brokers.get("b" + (brokerPos + 1))); brokerPos = (brokerPos + 1) % brokerListSize; assignment.addAssignment(j, brokerAssign); } } topics.remove(new Topic("t1")); Topic t = new Topic("t11"); for (int i = 0; i < 10; i++) { t.addPartition(new Partition(i)); } topics.add(t); t = new Topic("t12"); for (int i = 0; i < 10; i++) { t.addPartition(new Partition(i)); } topics.add(t); Map<String, Assignment<Integer>> newAssignments = strategy.assign(brokers, topics, originAssignments); System.out.println("AddAndDeleteBrokerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Topics:"); for (Topic topic : topics) { System.out.print(topic.getName() + " "); } System.out.println(); printResult(originAssignments, newAssignments); normalAssert(brokers, topics, newAssignments); assertFalse(newAssignments.keySet().contains("t1")); assertTrue(newAssignments.keySet().contains("t11")); assertTrue(newAssignments.keySet().contains("t12")); } @Test public void AddAndDeleteBrokerAndTPTest() { LeastAdjustmentBrokerPartitionAssigningStrategy strategy = new LeastAdjustmentBrokerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentBrokerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); Map<String, ClientContext> brokers = generateBrokers(5, "b"); Map<String, Assignment<Integer>> originAssignments = new HashMap<>(); Map<String, Topic> topicMap = generateTopicsWithIncrementalPartitionCount(10, "t"); List<Topic> topics = new ArrayList<>(topicMap.values()); int brokerPos = 0; int brokerListSize = brokers.size(); for (int i = 1; i <= 10; i++) { originAssignments.put("t" + i, new Assignment<Integer>()); Assignment<Integer> assignment = originAssignments.get("t" + i); for (int j = 0; j < i; j++) { Map<String, ClientContext> brokerAssign = new HashMap<>(); brokerAssign.put("b" + (brokerPos + 1), brokers.get("b" + (brokerPos + 1))); brokerPos = (brokerPos + 1) % brokerListSize; assignment.addAssignment(j, brokerAssign); } } brokers.remove("b1"); brokers.put("b7", new ClientContext("b7", "a", 1, "a", "a", 1L)); topics.remove(new Topic("t10")); Topic t = new Topic("t11"); for (int i = 0; i < 10; i++) { t.addPartition(new Partition(i)); } topics.add(t); Map<String, Assignment<Integer>> newAssignments = strategy.assign(brokers, topics, originAssignments); System.out.println("AddAndDeleteBrokerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Brokers:"); for (String broker : brokers.keySet()) { System.out.print(broker + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Current Topics:"); for (Topic topic : topics) { System.out.print(topic.getName() + " "); } System.out.println(); printResult(originAssignments, newAssignments); normalAssert(brokers, topics, newAssignments); assertFalse(newAssignments.keySet().contains("t10")); assertTrue(newAssignments.keySet().contains("t11")); Set<ClientContext> inuseBrokers = new HashSet<>(); for (Entry<String, Assignment<Integer>> assign : newAssignments.entrySet()) { Map<Integer, Map<String, ClientContext>> pAssign = assign.getValue().getAssignments(); for (Map<String, ClientContext> broker : pAssign.values()) { inuseBrokers.addAll(broker.values()); } } assertFalse(inuseBrokers.contains(brokers.get("b1"))); assertTrue(inuseBrokers.contains(brokers.get("b7"))); } @Test public void AddAndDeletPartitionTest() { LeastAdjustmentBrokerPartitionAssigningStrategy strategy = new LeastAdjustmentBrokerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentBrokerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); Map<String, ClientContext> brokers = generateBrokers(5, "b"); Map<String, Assignment<Integer>> originAssignments = new HashMap<>(); Map<String, Topic> topicMap = generateTopicsWithIncrementalPartitionCount(10, "t"); List<Topic> topics = new ArrayList<>(topicMap.values()); int brokerPos = 0; int brokerListSize = brokers.size(); for (int i = 1; i <= 10; i++) { originAssignments.put("t" + i, new Assignment<Integer>()); Assignment<Integer> assignment = originAssignments.get("t" + i); for (int j = 0; j < i; j++) { Map<String, ClientContext> brokerAssign = new HashMap<>(); brokerAssign.put("b" + (brokerPos + 1), brokers.get("b" + (brokerPos + 1))); brokerPos = (brokerPos + 1) % brokerListSize; assignment.addAssignment(j, brokerAssign); } } Topic t9 = topicMap.get("t9"); t9.removePartition(5); Topic t10 = topicMap.get("t10"); t10.addPartition(new Partition(15)); Map<String, Assignment<Integer>> newAssignments = strategy.assign(brokers, topics, originAssignments); System.out.println("AddAndDeleteBrokerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Brokers:"); for (String broker : brokers.keySet()) { System.out.print(broker + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Current Topics:"); for (Topic topic : topics) { System.out.print(topic.getName() + " "); } System.out.println(); printResult(originAssignments, newAssignments); normalAssert(brokers, topics, newAssignments); assertTrue(newAssignments.get("t9").getAssignment(5) == null); assertTrue(newAssignments.get("t10").getAssignment(15) != null); } @Test public void AddAndDeleteBrokerTest() { LeastAdjustmentBrokerPartitionAssigningStrategy strategy = new LeastAdjustmentBrokerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentBrokerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); Map<String, ClientContext> brokers = generateBrokers(5, "b"); Map<String, Topic> topicMap = generateTopicsWithIncrementalPartitionCount(10, "t"); List<Topic> topics = new ArrayList<>(topicMap.values()); Map<String, Assignment<Integer>> originAssignments = new HashMap<>(); int brokerPos = 0; int brokerListSize = brokers.size(); for (int i = 1; i <= 10; i++) { originAssignments.put("t" + i, new Assignment<Integer>()); Assignment<Integer> assignment = originAssignments.get("t" + i); for (int j = 0; j < i; j++) { Map<String, ClientContext> brokerAssign = new HashMap<>(); brokerAssign.put("b" + (brokerPos + 1), brokers.get("b" + (brokerPos + 1))); brokerPos = (brokerPos + 1) % brokerListSize; assignment.addAssignment(j, brokerAssign); } } brokers.remove("b1"); brokers.put("b6", new ClientContext("b6", "a", 1, "a", "a", 1L)); brokers.put("b7", new ClientContext("b7", "a", 1, "a", "a", 1L)); Map<String, Assignment<Integer>> newAssignments = strategy.assign(brokers, topics, originAssignments); System.out.println("AddAndDeleteBrokerTest:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Brokers:"); for (String broker : brokers.keySet()) { System.out.print(broker + " "); } System.out.println(); printResult(originAssignments, newAssignments); normalAssert(brokers, topics, newAssignments); Set<ClientContext> inuseBrokers = new HashSet<>(); for (Entry<String, Assignment<Integer>> assign : newAssignments.entrySet()) { Map<Integer, Map<String, ClientContext>> pAssign = assign.getValue().getAssignments(); for (Map<String, ClientContext> broker : pAssign.values()) { inuseBrokers.addAll(broker.values()); } } assertFalse(inuseBrokers.contains(brokers.get("b1"))); assertTrue(inuseBrokers.contains(brokers.get("b6"))); assertTrue(inuseBrokers.contains(brokers.get("b7"))); }
LeastAdjustmentConsumerPartitionAssigningStrategy implements ConsumerPartitionAssigningStrategy { @Override public Map<Integer, Map<String, ClientContext>> assign(List<Partition> partitions, Map<String, ClientContext> currentConsumers, Map<Integer, Map<String, ClientContext>> originAssigns) { Map<Integer, Map<String, ClientContext>> result = new HashMap<>(); if (partitions == null || partitions.isEmpty() || currentConsumers == null || currentConsumers.isEmpty()) { return result; } if (originAssigns == null) { originAssigns = Collections.emptyMap(); } Map<String, List<Integer>> originConsumerToPartition = mapConsumerToPartitions(partitions, currentConsumers, originAssigns); List<Integer> freePartitions = getFreePartitions(originConsumerToPartition, partitions); Map<String, List<Integer>> newAssigns = m_assignBalancer.assign(originConsumerToPartition, freePartitions); for (Entry<String, List<Integer>> assign : newAssigns.entrySet()) { putAssignToResult(result, currentConsumers, assign.getKey(), assign.getValue()); } return result; } @Override Map<Integer, Map<String, ClientContext>> assign(List<Partition> partitions, Map<String, ClientContext> currentConsumers, Map<Integer, Map<String, ClientContext>> originAssigns); }
@Test public void testPartitionAddAndDelete() { LeastAdjustmentConsumerPartitionAssigningStrategy strategy = new LeastAdjustmentConsumerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentConsumerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Partition> partitions = new ArrayList<>(); for (int i = 1; i < 10; i++) { Partition p = new Partition(i); partitions.add(p); } Map<String, ClientContext> currentConsumers = new HashMap<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); final ClientContext d = new ClientContext("d", "a", 1, "a", "a", 1L); final ClientContext e = new ClientContext("e", "a", 1, "a", "a", 1L); currentConsumers.put("a", a); currentConsumers.put("b", b); currentConsumers.put("c", c); currentConsumers.put("d", d); currentConsumers.put("e", e); Map<Integer, Map<String, ClientContext>> originAssigns = new HashMap<>(); originAssigns.put(0, new HashMap<String, ClientContext>() { { put("a", a); } }); originAssigns.put(1, new HashMap<String, ClientContext>() { { put("b", b); } }); originAssigns.put(2, new HashMap<String, ClientContext>() { { put("c", c); } }); originAssigns.put(3, new HashMap<String, ClientContext>() { { put("d", d); } }); originAssigns.put(4, new HashMap<String, ClientContext>() { { put("e", e); } }); originAssigns.put(5, new HashMap<String, ClientContext>() { { put("e", a); } }); Map<Integer, Map<String, ClientContext>> newAssigns = strategy .assign(partitions, currentConsumers, originAssigns); System.out.println("TestPartitionAddAndDelete:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Partitions:"); for (Partition partition : partitions) { System.out.print(partition.getId() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : originAssigns.entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : newAssigns.entrySet()) { System.out.println(assign); } normalAssert(partitions, currentConsumers, newAssigns); } @Test public void testConsumerAddAndDelete() { LeastAdjustmentConsumerPartitionAssigningStrategy strategy = new LeastAdjustmentConsumerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentConsumerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Partition> partitions = new ArrayList<>(); for (int i = 0; i < 10; i++) { Partition p = new Partition(i); partitions.add(p); } Map<String, ClientContext> currentConsumers = new HashMap<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); final ClientContext d = new ClientContext("d", "a", 1, "a", "a", 1L); final ClientContext e = new ClientContext("e", "a", 1, "a", "a", 1L); final ClientContext f = new ClientContext("f", "a", 1, "a", "a", 1L); currentConsumers.put("a", a); currentConsumers.put("b", b); currentConsumers.put("c", c); currentConsumers.put("f", f); Map<Integer, Map<String, ClientContext>> originAssigns = new HashMap<>(); originAssigns.put(0, new HashMap<String, ClientContext>() { { put("a", a); } }); originAssigns.put(1, new HashMap<String, ClientContext>() { { put("b", b); } }); originAssigns.put(2, new HashMap<String, ClientContext>() { { put("c", c); } }); originAssigns.put(3, new HashMap<String, ClientContext>() { { put("d", d); } }); originAssigns.put(4, new HashMap<String, ClientContext>() { { put("e", e); } }); originAssigns.put(5, new HashMap<String, ClientContext>() { { put("a", a); } }); Map<Integer, Map<String, ClientContext>> newAssigns = strategy .assign(partitions, currentConsumers, originAssigns); System.out.println("TestConsumerAddAndDelete:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Consumers:"); for (Entry<String, ClientContext> consumer : currentConsumers.entrySet()) { System.out.print(consumer.getKey() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : originAssigns.entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : newAssigns.entrySet()) { System.out.println(assign); } normalAssert(partitions, currentConsumers, newAssigns); } @Test public void testParitionAndConsumerAddAndDelete() { LeastAdjustmentConsumerPartitionAssigningStrategy strategy = new LeastAdjustmentConsumerPartitionAssigningStrategy(); AssignBalancer assignBalancer = new LeastAdjustmentAssianBalancer(); Reflects.forField().setDeclaredFieldValue(LeastAdjustmentConsumerPartitionAssigningStrategy.class, "m_assignBalancer", strategy, assignBalancer); List<Partition> partitions = new ArrayList<>(); for (int i = 0; i < 3; i++) { Partition p = new Partition(i); partitions.add(p); } partitions.add(new Partition(5)); partitions.add(new Partition(10)); partitions.add(new Partition(11)); partitions.add(new Partition(12)); Map<String, ClientContext> currentConsumers = new HashMap<>(); final ClientContext a = new ClientContext("a", "a", 1, "a", "a", 1L); final ClientContext b = new ClientContext("b", "a", 1, "a", "a", 1L); final ClientContext c = new ClientContext("c", "a", 1, "a", "a", 1L); final ClientContext d = new ClientContext("d", "a", 1, "a", "a", 1L); final ClientContext e = new ClientContext("e", "a", 1, "a", "a", 1L); final ClientContext f = new ClientContext("f", "a", 1, "a", "a", 1L); currentConsumers.put("a", a); currentConsumers.put("b", b); currentConsumers.put("c", c); currentConsumers.put("f", f); Map<Integer, Map<String, ClientContext>> originAssigns = new HashMap<>(); originAssigns.put(0, new HashMap<String, ClientContext>() { { put("a", a); } }); originAssigns.put(1, new HashMap<String, ClientContext>() { { put("b", b); } }); originAssigns.put(2, new HashMap<String, ClientContext>() { { put("c", c); } }); originAssigns.put(3, new HashMap<String, ClientContext>() { { put("d", d); } }); originAssigns.put(4, new HashMap<String, ClientContext>() { { put("e", e); } }); originAssigns.put(5, new HashMap<String, ClientContext>() { { put("a", a); } }); Map<Integer, Map<String, ClientContext>> newAssigns = strategy .assign(partitions, currentConsumers, originAssigns); System.out.println("TestConsumerAddAndDelete:"); System.out.println("---------------------------------------------------------------"); System.out.println("Current Consumers:"); for (Entry<String, ClientContext> consumer : currentConsumers.entrySet()) { System.out.print(consumer.getKey() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Current Partitions:"); for (Partition partition : partitions) { System.out.print(partition.getId() + " "); } System.out.println(); System.out.println("---------------------------------------------------------------"); System.out.println("Origin Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : originAssigns.entrySet()) { System.out.println(assign); } System.out.println("---------------------------------------------------------------"); System.out.println("New Assignments:"); for (Entry<Integer, Map<String, ClientContext>> assign : newAssigns.entrySet()) { System.out.println(assign); } normalAssert(partitions, currentConsumers, newAssigns); }
ConsumerAssignmentHolder implements Initializable { protected void rebalance() { try { Map<Pair<String, String>, Map<String, ClientContext>> changes = m_activeConsumerListHolder.scanChanges( m_config.getConsumerHeartbeatTimeoutMillis(), TimeUnit.MILLISECONDS); if (changes != null && !changes.isEmpty()) { Map<Pair<String, String>, Assignment<Integer>> newAssignments = new HashMap<>(m_assignments.get()); for (Map.Entry<Pair<String, String>, Map<String, ClientContext>> change : changes.entrySet()) { Pair<String, String> topicGroup = change.getKey(); Map<String, ClientContext> consumerList = change.getValue(); if (consumerList == null || consumerList.isEmpty()) { newAssignments.remove(topicGroup); } else { Assignment<Integer> newAssignment = createNewAssignment(topicGroup, consumerList, newAssignments.get(topicGroup)); if (newAssignment != null) { newAssignments.put(topicGroup, newAssignment); } } } m_assignments.set(newAssignments); if (traceLog.isInfoEnabled()) { traceLog.info("Consumer assignment changed.\n{}", JSON.toJSONString(newAssignments)); } } } catch (Exception e) { log.warn("Error occurred while doing assignment check in ConsumerRebalanceChecker", e); } } ConsumerAssignmentHolder(); void setActiveConsumerListHolder(ActiveConsumerListHolder activeConsumerListHolder); void setConfig(MetaServerConfig config); void setMetaHolder(MetaHolder metaHolder); void setPartitionAssigningStrategy(ConsumerPartitionAssigningStrategy partitionAssigningStrategy); Assignment<Integer> getAssignment(Pair<String, String> topicGroup); Map<Pair<String, String>, Assignment<Integer>> getAssignments(); @Override void initialize(); }
@Test public void test() throws Exception { Map<Pair<String, String>, Map<String, ClientContext>> changes1 = new HashMap<>(); Pair<String, String> t1g1 = new Pair<String, String>("t1", "g1"); Map<String, ClientContext> t1g1Consumers = new LinkedHashMap<>(); t1g1Consumers.put("c1", new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L)); t1g1Consumers.put("c2", new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L)); changes1.put(t1g1, t1g1Consumers); Pair<String, String> t1g4 = new Pair<String, String>("t1", "g4"); Map<String, ClientContext> t1g4Consumers = new LinkedHashMap<>(); t1g4Consumers.put("c1", new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L)); changes1.put(t1g4, t1g4Consumers); Pair<String, String> t1g2 = new Pair<String, String>("t1", "g2"); Map<String, ClientContext> t1g2Consumers = new LinkedHashMap<>(); t1g2Consumers.put("c3", new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L)); t1g2Consumers.put("c4", new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L)); changes1.put(t1g2, t1g2Consumers); Pair<String, String> t2g1 = new Pair<String, String>("t2", "g1"); Map<String, ClientContext> t2g1Consumers = new LinkedHashMap<>(); t2g1Consumers.put("c1", new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L)); t2g1Consumers.put("c4", new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L)); changes1.put(t2g1, t2g1Consumers); Pair<String, String> t2g2 = new Pair<String, String>("t2", "g2"); Map<String, ClientContext> t2g2Consumers = new LinkedHashMap<>(); t2g2Consumers.put("c2", new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L)); t2g2Consumers.put("c3", new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L)); changes1.put(t2g2, t2g2Consumers); Pair<String, String> t3g1 = new Pair<String, String>("t3", "g1"); Map<String, ClientContext> t3g1Consumers = new LinkedHashMap<>(); t3g1Consumers.put("c2", new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L)); changes1.put(t3g1, t3g1Consumers); when(m_activeConsumerListHolder.scanChanges(anyLong(), any(TimeUnit.class))).thenReturn(changes1); m_holder.rebalance(); assertAssignment(t1g1, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L))) ) ); assertAssignment(t1g2, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))) ) ); assertAssignment(t2g1, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))) ) ); assertAssignment(t2g2, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L))) ) ); Map<Pair<String, String>, Map<String, ClientContext>> changes2 = new HashMap<>(changes1); changes2.get(t1g1).remove("c1"); changes2.get(t1g2).remove("c3"); changes2.get(t2g2).remove("c2"); changes2.get(t2g2).put("c4", new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L)); reset(m_activeConsumerListHolder); when(m_activeConsumerListHolder.scanChanges(anyLong(), any(TimeUnit.class))).thenReturn(changes2); m_holder.rebalance(); assertAssignment(t1g1, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c2", "2.2.2.2", 2222, null, null, -2L))) ) ); assertAssignment(t1g2, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))) ) ); assertAssignment(t2g1, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c1", "1.1.1.1", 1111, null, null, -1L), new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))) ) ); assertAssignment(t2g2, Arrays.asList( new Pair<Integer, List<ClientContext>>(0, Arrays.asList(new ClientContext("c3", "3.3.3.3", 3333, null, null, -3L))), new Pair<Integer, List<ClientContext>>(1, Arrays.asList(new ClientContext("c4", "4.4.4.4", 4444, null, null, -4L))) ) ); }
ActiveConsumerList { public void heartbeat(String consumerName, long heartbeatTime, String ip) { if (!m_consumers.containsKey(consumerName)) { m_changed = true; m_consumers.put(consumerName, new ClientContext(consumerName, ip, -1, null, null, heartbeatTime)); } else { ClientContext consumerContext = m_consumers.get(consumerName); if (!StringUtils.equals(consumerContext.getIp(), ip)) { m_changed = true; consumerContext.setIp(ip); } consumerContext.setLastHeartbeatTime(heartbeatTime); } } void heartbeat(String consumerName, long heartbeatTime, String ip); void purgeExpired(long timeoutMillis, long now); boolean getAndResetChanged(); Map<String, ClientContext> getActiveConsumers(); }
@Test public void testHeartbeat() throws Exception { String consumerName = "c1"; String ip = "1.1.1.1"; long heartbeatTime = 1L; m_list.heartbeat(consumerName, heartbeatTime, ip); Map<String, ClientContext> activeConsumers = m_list.getActiveConsumers(); assertEquals(1, activeConsumers.size()); TestHelper.assertClientContextEquals(consumerName, ip, heartbeatTime, activeConsumers.get(consumerName)); assertTrue(m_list.getAndResetChanged()); }
ActiveConsumerList { public void purgeExpired(long timeoutMillis, long now) { Iterator<Entry<String, ClientContext>> iter = m_consumers.entrySet().iterator(); while (iter.hasNext()) { Entry<String, ClientContext> entry = iter.next(); if (entry.getValue().getLastHeartbeatTime() + timeoutMillis < now) { iter.remove(); m_changed = true; } } } void heartbeat(String consumerName, long heartbeatTime, String ip); void purgeExpired(long timeoutMillis, long now); boolean getAndResetChanged(); Map<String, ClientContext> getActiveConsumers(); }
@Test public void testPurgeExpired() throws Exception { String consumerName = "c1"; String ip = "1.1.1.1"; long heartbeatTime = 1L; m_list.heartbeat(consumerName, heartbeatTime, ip); m_list.purgeExpired(10, 12L); Map<String, ClientContext> activeConsumers = m_list.getActiveConsumers(); assertEquals(0, activeConsumers.size()); assertTrue(m_list.getAndResetChanged()); m_list.heartbeat(consumerName, heartbeatTime, ip); m_list.purgeExpired(10, 1L); activeConsumers = m_list.getActiveConsumers(); assertEquals(1, activeConsumers.size()); TestHelper.assertClientContextEquals(consumerName, ip, heartbeatTime, activeConsumers.get(consumerName)); assertTrue(m_list.getAndResetChanged()); }
ActiveConsumerListHolder { public void heartbeat(Pair<String, String> topicGroup, String consumerName, String ip) { m_lock.lock(); try { if (!m_activeConsumerLists.containsKey(topicGroup)) { m_activeConsumerLists.put(topicGroup, new ActiveConsumerList()); } ActiveConsumerList activeConsumerList = m_activeConsumerLists.get(topicGroup); activeConsumerList.heartbeat(consumerName, m_systemClockService.now(), ip); } finally { m_lock.unlock(); } } void setSystemClockService(SystemClockService systemClockService); void heartbeat(Pair<String, String> topicGroup, String consumerName, String ip); Map<Pair<String, String>, Map<String, ClientContext>> scanChanges(long timeout, TimeUnit timeUnit); ActiveConsumerList getActiveConsumerList(Pair<String, String> topicGroup); }
@Test public void testHeartbeat() throws Exception { Pair<String, String> t1g1 = new Pair<>("t1", "g1"); Pair<String, String> t1g2 = new Pair<>("t1", "g2"); Pair<String, String> t2g1 = new Pair<>("t2", "g1"); m_holder.heartbeat(t1g1, "c1", "1.1.1.1"); m_holder.heartbeat(t1g1, "c2", "1.1.1.2"); m_holder.heartbeat(t1g2, "c3", "1.1.1.3"); m_holder.heartbeat(t2g1, "c1", "1.1.1.1"); ActiveConsumerList activeConsumerList = m_holder.getActiveConsumerList(t1g1); Map<String, ClientContext> activeConsumers = activeConsumerList.getActiveConsumers(); assertEquals(2, activeConsumers.size()); TestHelper.assertClientContextEquals("c1", "1.1.1.1", activeConsumers.get("c1")); TestHelper.assertClientContextEquals("c2", "1.1.1.2", activeConsumers.get("c2")); activeConsumerList = m_holder.getActiveConsumerList(t1g2); activeConsumers = activeConsumerList.getActiveConsumers(); assertEquals(1, activeConsumers.size()); TestHelper.assertClientContextEquals("c3", "1.1.1.3", activeConsumers.get("c3")); activeConsumerList = m_holder.getActiveConsumerList(t2g1); activeConsumers = activeConsumerList.getActiveConsumers(); assertEquals(1, activeConsumers.size()); TestHelper.assertClientContextEquals("c1", "1.1.1.1", activeConsumers.get("c1")); }
DefaultMetaService implements MetaService, Initializable { @Override public int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId) { Topic topic = findTopic(topicName, getMeta()); if (containsConsumerGroup(topicName, groupId)) { ConsumerGroup consumerGroup = topic.findConsumerGroup(groupId); if (consumerGroup.getAckTimeoutSeconds() == null) { return topic.getAckTimeoutSeconds(); } else { return consumerGroup.getAckTimeoutSeconds(); } } else { throw new RuntimeException(String.format("Consumer group %s for topic %s not found", groupId, topicName)); } } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testGetAckTimeoutSecondsByTopicAndConsumerGroup() throws Exception { assertEquals(10, m_metaService.getAckTimeoutSecondsByTopicAndConsumerGroup("test_broker", "group1")); assertEquals(6, m_metaService.getAckTimeoutSecondsByTopicAndConsumerGroup("test_broker", "group2")); assertEquals(5, m_metaService.getAckTimeoutSecondsByTopicAndConsumerGroup("test_broker", "group3")); try { m_metaService.getAckTimeoutSecondsByTopicAndConsumerGroup("topic_not_found", "group3"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } try { m_metaService.getAckTimeoutSecondsByTopicAndConsumerGroup("test_broker", "group_not_found"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } }
DefaultMetaService implements MetaService, Initializable { @Override public List<Partition> listPartitionsByTopic(String topicName) { return findTopic(topicName, getMeta()).getPartitions(); } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testListPartitionsByTopic() throws Exception { List<Partition> kafkaPartitions = m_metaService.listPartitionsByTopic("test_kafka"); assertEquals(1, kafkaPartitions.size()); assertEquals("kafka1", kafkaPartitions.get(0).getEndpoint()); assertEquals(Integer.valueOf(0), kafkaPartitions.get(0).getId()); List<Partition> brokerPartitions = m_metaService.listPartitionsByTopic("test_broker"); assertEquals(2, brokerPartitions.size()); assertEquals("br0", brokerPartitions.get(0).getEndpoint()); assertEquals(Integer.valueOf(0), brokerPartitions.get(0).getId()); assertEquals("br1", brokerPartitions.get(1).getEndpoint()); assertEquals(Integer.valueOf(1), brokerPartitions.get(1).getId()); } @Test(expected = RuntimeException.class) public void testListPartitionsByTopicTopicNotFound() throws Exception { m_metaService.listPartitionsByTopic("topicNotFound"); }
DefaultMetaService implements MetaService, Initializable { @Override public Storage findStorageByTopic(String topicName) { Meta meta = getMeta(); Topic topic = findTopic(topicName, meta); String storageType = topic.getStorageType(); return meta.findStorage(storageType); } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testFindStorageByTopic() throws Exception { Storage brokerStorage = m_metaService.findStorageByTopic("test_broker"); assertEquals(Storage.MYSQL, brokerStorage.getType()); Storage kafkaStorage = m_metaService.findStorageByTopic("test_kafka"); assertEquals(Storage.KAFKA, kafkaStorage.getType()); } @Test(expected = RuntimeException.class) public void testFindStorageByTopicTopicNotFound() throws Exception { m_metaService.findStorageByTopic("topicNotFound"); }
DefaultMetaService implements MetaService, Initializable { @Override public Partition findPartitionByTopicAndPartition(String topicName, int partitionId) { return findTopic(topicName, getMeta()).findPartition(partitionId); } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testFindPartitionByTopicAndPartition() throws Exception { Partition brokerPartition = m_metaService.findPartitionByTopicAndPartition("test_broker", 0); assertEquals("br0", brokerPartition.getEndpoint()); assertEquals(Integer.valueOf(0), brokerPartition.getId()); brokerPartition = m_metaService.findPartitionByTopicAndPartition("test_broker", 1); assertEquals("br1", brokerPartition.getEndpoint()); assertEquals(Integer.valueOf(1), brokerPartition.getId()); Partition kafkaPartition = m_metaService.findPartitionByTopicAndPartition("test_kafka", 0); assertEquals("kafka1", kafkaPartition.getEndpoint()); assertEquals(Integer.valueOf(0), kafkaPartition.getId()); assertNull(m_metaService.findPartitionByTopicAndPartition("test_kafka", 1)); } @Test(expected = RuntimeException.class) public void testFindPartitionByTopicAndPartitionTopicNotFound() throws Exception { m_metaService.findPartitionByTopicAndPartition("topicNotFound", 0); }
DefaultMetaService implements MetaService, Initializable { @Override public List<Topic> listTopicsByPattern(String topicPattern) { if (StringUtils.isBlank(topicPattern)) { throw new RuntimeException("Topic pattern can not be null or blank"); } topicPattern = StringUtils.trim(topicPattern); Meta meta = getMeta(); List<Topic> matchedTopics = new ArrayList<Topic>(); Collection<Topic> topics = meta.getTopics().values(); for (Topic topic : topics) { if (isTopicMatch(topicPattern, topic.getName())) { matchedTopics.add(topic); } } return matchedTopics; } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testListTopicsByPattern() { DefaultMetaService ms = new DefaultMetaService(); assertTrue(ms.isTopicMatch("a", "a")); assertTrue(ms.isTopicMatch("a.b", "a.b")); assertTrue(ms.isTopicMatch("a.b.c", "a.b.c")); assertTrue(ms.isTopicMatch("a.*", "a.bbb")); assertFalse(ms.isTopicMatch("a.*", "a")); assertFalse(ms.isTopicMatch("a.*", "a.b.c")); assertTrue(ms.isTopicMatch("a.#", "a.b")); assertFalse(ms.isTopicMatch("a.#", "a")); assertTrue(ms.isTopicMatch("a.#", "a.bbb")); assertTrue(ms.isTopicMatch("a.#", "a.bbb.ccc")); assertTrue(ms.isTopicMatch("a.*.c", "a.bbb.c")); assertFalse(ms.isTopicMatch("a.*.c", "a.bbb.xxx.c")); assertTrue(ms.isTopicMatch("a.#.c", "a.bbb.c")); assertTrue(ms.isTopicMatch("a.#.c", "a.bbb.xxx.c")); assertFalse(ms.isTopicMatch("a.#.c", "a.bbb.d")); assertFalse(ms.isTopicMatch("a.#.c", "a.bbb.xxx.d")); assertTrue(ms.isTopicMatch("a.*.c", "a.b--c.c")); assertTrue(ms.isTopicMatch("a.#.c", "a.b-c.d-d.c")); assertTrue(ms.isTopicMatch("a.#.c", "a.b-c..c")); assertFalse(ms.isTopicMatch("a.*.c", "a.b-c.dd.c")); assertFalse(ms.isTopicMatch("a.#.c", "a.b-c.d-d.d")); assertFalse(ms.isTopicMatch("a.#.c", "a.b-c..d")); }
DefaultMetaService implements MetaService, Initializable { @Override public Topic findTopicByName(String topicName) { try { return findTopic(topicName, getMeta()); } catch (Exception e) { return null; } } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testFindTopicByName() throws Exception { Topic topic = m_metaService.findTopicByName("test_broker"); assertEquals("test_broker", topic.getName()); topic = m_metaService.findTopicByName("topic_not_found"); assertNull(topic); }
DefaultMetaService implements MetaService, Initializable { @Override public int translateToIntGroupId(String topicName, String groupName) { Topic topic = findTopic(topicName, getMeta()); if (containsConsumerGroup(topicName, groupName)) { ConsumerGroup consumerGroup = topic.findConsumerGroup(groupName); return consumerGroup.getId(); } else { throw new RuntimeException(String.format("Consumer group not found for topic %s and group %s", topicName, groupName)); } } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testTranslateToIntGroupId() throws Exception { assertEquals(1, m_metaService.translateToIntGroupId("test_broker", "group1")); assertEquals(2, m_metaService.translateToIntGroupId("test_broker", "group2")); assertEquals(3, m_metaService.translateToIntGroupId("test_broker", "group3")); try { m_metaService.translateToIntGroupId("topic_not_found", "group3"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } try { m_metaService.translateToIntGroupId("test_broker", "group_not_found"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } }
DefaultMetaService implements MetaService, Initializable { @Override public List<Datasource> listAllMysqlDataSources() { Meta meta = getMeta(); final List<Datasource> dataSources = new ArrayList<Datasource>(); meta.accept(new BaseVisitor2() { @Override protected void visitDatasourceChildren(Datasource ds) { Storage storage = getAncestor(2); if (StringUtils.equalsIgnoreCase(Storage.MYSQL, storage.getType())) { dataSources.add(ds); } super.visitDatasourceChildren(ds); } }); return dataSources; } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testListAllMysqlDataSources() throws Exception { List<Datasource> datasources = m_metaService.listAllMysqlDataSources(); assertEquals(2, datasources.size()); assertEquals("ds0", datasources.get(0).getId()); assertEquals("ds1", datasources.get(1).getId()); }
DefaultMetaService implements MetaService, Initializable { @Override public boolean containsConsumerGroup(String topicName, String groupId) { Topic topic = findTopic(topicName, getMeta()); return topic != null && topic.findConsumerGroup(groupId) != null; } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testContainsConsumerGroup() throws Exception { assertTrue(m_metaService.containsConsumerGroup("test_broker", "group1")); assertFalse(m_metaService.containsConsumerGroup("test_broker", "group5")); try { m_metaService.containsConsumerGroup("topic_not_found", "group3"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } }
DefaultMetaService implements MetaService, Initializable { @Override public boolean containsEndpoint(Endpoint endpoint) { return getMeta().getEndpoints().containsKey(endpoint.getId()); } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testContainsEndpoint() throws Exception { assertTrue(m_metaService.containsEndpoint(new Endpoint("br0"))); assertFalse(m_metaService.containsEndpoint(new Endpoint("notFound"))); }
DefaultMetaService implements MetaService, Initializable { @Override public RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId) { Topic topic = findTopic(topicName, getMeta()); if (containsConsumerGroup(topicName, groupId)) { ConsumerGroup consumerGroup = topic.findConsumerGroup(groupId); String retryPolicyValue = consumerGroup.getRetryPolicy(); if (StringUtils.isBlank(retryPolicyValue)) { retryPolicyValue = topic.getConsumerRetryPolicy(); } return RetryPolicyFactory.create(retryPolicyValue); } else { throw new RuntimeException(String.format("Consumer group %s for topic %s not found", groupId, topicName)); } } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testFindRetryPolicyByTopicAndGroup() throws Exception { RetryPolicy policy = m_metaService.findRetryPolicyByTopicAndGroup("test_broker", "group1"); assertTrue(policy instanceof FrequencySpecifiedRetryPolicy); assertEquals(4, ((FrequencySpecifiedRetryPolicy) policy).getRetryTimes()); policy = m_metaService.findRetryPolicyByTopicAndGroup("test_broker", "group2"); assertTrue(policy instanceof FrequencySpecifiedRetryPolicy); assertEquals(2, ((FrequencySpecifiedRetryPolicy) policy).getRetryTimes()); policy = m_metaService.findRetryPolicyByTopicAndGroup("test_broker", "group3"); assertTrue(policy instanceof FrequencySpecifiedRetryPolicy); assertEquals(3, ((FrequencySpecifiedRetryPolicy) policy).getRetryTimes()); try { m_metaService.findRetryPolicyByTopicAndGroup("topic_not_found", "group3"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } try { m_metaService.findRetryPolicyByTopicAndGroup("test_broker", "group_not_found"); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } }
DefaultMetaService implements MetaService, Initializable { @Override public synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition) { return new Pair<Endpoint, Long>(getMeta().findEndpoint( findTopic(topic, getMeta()).findPartition(partition).getEndpoint()), m_lastLoadedTime); } @Override List<Partition> listPartitionsByTopic(String topicName); @Override Storage findStorageByTopic(String topicName); @Override Partition findPartitionByTopicAndPartition(String topicName, int partitionId); @Override List<Topic> listTopicsByPattern(String topicPattern); @Override Topic findTopicByName(String topicName); @Override int translateToIntGroupId(String topicName, String groupName); @Override List<Datasource> listAllMysqlDataSources(); @Override int getAckTimeoutSecondsByTopicAndConsumerGroup(String topicName, String groupId); @Override LeaseAcquireResponse tryAcquireConsumerLease(Tpg tpg, String sessionId); @Override LeaseAcquireResponse tryRenewConsumerLease(Tpg tpg, Lease lease, String sessionId); @Override LeaseAcquireResponse tryRenewBrokerLease(String topic, int partition, Lease lease, String sessionId, int brokerPort); @Override LeaseAcquireResponse tryAcquireBrokerLease(String topic, int partition, String sessionId, int brokerPort); @Override void initialize(); @Override synchronized Pair<Endpoint, Long> findEndpointByTopicAndPartition(String topic, int partition); @Override RetryPolicy findRetryPolicyByTopicAndGroup(String topicName, String groupId); @Override boolean containsEndpoint(Endpoint endpoint); @Override boolean containsConsumerGroup(String topicName, String groupId); @Override Offset findMessageOffsetByTime(String topic, int partition, long time); @Override Map<Integer, Offset> findMessageOffsetByTime(String topic, long time); @Override List<ZookeeperEnsemble> listAllZookeeperEnsemble(); @Override Idc getPrimaryIdc(); }
@Test public void testFindEndpointByTopicAndPartition() throws Exception { Endpoint endpoint = m_metaService.findEndpointByTopicAndPartition("test_broker", 0).getKey(); assertEquals("br0", endpoint.getId()); endpoint = m_metaService.findEndpointByTopicAndPartition("test_broker", 1).getKey(); assertEquals("br1", endpoint.getId()); try { m_metaService.findEndpointByTopicAndPartition("topic_not_found", 0).getKey(); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } try { m_metaService.findEndpointByTopicAndPartition("test_broker", 100); fail(); } catch (RuntimeException e) { } catch (Exception e) { fail(); } }
ExponentialSchedulePolicy implements SchedulePolicy { @Override public long fail(boolean shouldSleep) { int delayTime = m_lastDelayTime; if (delayTime == 0) { delayTime = m_delayBase; } else { delayTime = Math.min(m_lastDelayTime << 1, m_delayUpperbound); } if (shouldSleep) { try { Thread.sleep(delayTime); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } } m_lastDelayTime = delayTime; return delayTime; } ExponentialSchedulePolicy(int delayBase, int delayUpperbound); @Override long fail(boolean shouldSleep); @Override void succeess(); }
@Test public void testFail() { ExponentialSchedulePolicy policy = new ExponentialSchedulePolicy(10, 80); assertEquals(10, policy.fail(false)); assertEquals(20, policy.fail(false)); assertEquals(40, policy.fail(false)); assertEquals(80, policy.fail(false)); for (int i = 0; i < 100; i++) { assertEquals(80, policy.fail(false)); } }
Magic { public static void writeMagic(ByteBuf buf) { buf.writeBytes(MAGIC); } static void readAndCheckMagic(ByteBuffer buf); static void readAndCheckMagic(ByteBuf buf); static void writeMagic(ByteBuf buf); static int length(); }
@Test public void testWrite() { ByteBuf buf = Unpooled.buffer(); Magic.writeMagic(buf); byte[] bytes = new byte[Magic.MAGIC.length]; buf.readBytes(bytes); assertArrayEquals(Magic.MAGIC, bytes); }
Magic { public static void readAndCheckMagic(ByteBuffer buf) { byte[] magic = new byte[MAGIC.length]; buf.get(magic); for (int i = 0; i < magic.length; i++) { if (magic[i] != MAGIC[i]) { throw new IllegalArgumentException("Magic number mismatch"); } } } static void readAndCheckMagic(ByteBuffer buf); static void readAndCheckMagic(ByteBuf buf); static void writeMagic(ByteBuf buf); static int length(); }
@Test(expected = IllegalArgumentException.class) public void testReadAndCheckByteBufFail() { byte[] bytes = new byte[Magic.MAGIC.length]; System.arraycopy(Magic.MAGIC, 0, bytes, 0, Magic.MAGIC.length); bytes[bytes.length - 1] = (byte) (bytes[bytes.length - 1] + 1); Magic.readAndCheckMagic(Unpooled.wrappedBuffer(bytes)); } @Test public void testReadAndCheckByteBuf() { byte[] bytes = new byte[Magic.MAGIC.length]; System.arraycopy(Magic.MAGIC, 0, bytes, 0, Magic.MAGIC.length); Magic.readAndCheckMagic(Unpooled.wrappedBuffer(bytes)); } @Test(expected = IllegalArgumentException.class) public void testReadAndCheckByteBufferFail() { byte[] bytes = new byte[Magic.MAGIC.length]; System.arraycopy(Magic.MAGIC, 0, bytes, 0, Magic.MAGIC.length); bytes[bytes.length - 1] = (byte) (bytes[bytes.length - 1] + 1); Magic.readAndCheckMagic(ByteBuffer.wrap(bytes)); } @Test public void testReadAndCheckByteBuffer() { byte[] bytes = new byte[Magic.MAGIC.length]; System.arraycopy(Magic.MAGIC, 0, bytes, 0, Magic.MAGIC.length); Magic.readAndCheckMagic(ByteBuffer.wrap(bytes)); }
EndpointMaker implements Initializable { public Map<String, Map<Integer, Endpoint>> makeEndpoints(EventBus eventBus, long version, ClusterStateHolder stateHolder, Map<String, Assignment<Integer>> brokerAssignments, boolean mergeOnce) throws Exception { long start = System.currentTimeMillis(); try { Map<String, Map<Integer, Endpoint>> topicPartition2Endpoints = new HashMap<>(); Pair<Long, Long> delayRebalanceTimespan = new Pair<>(0L, 0L); Map<Pair<String, Integer>, Map<String, ClientLeaseInfo>> brokerLeases = m_brokerLeaseHolder .getAllValidLeases(); for (Map.Entry<String, Assignment<Integer>> topicAssignment : brokerAssignments.entrySet()) { String topicName = topicAssignment.getKey(); Map<Integer, Map<String, ClientContext>> assignment = topicAssignment.getValue().getAssignments(); if (assignment != null && !assignment.isEmpty()) { topicPartition2Endpoints.put(topicName, new HashMap<Integer, Endpoint>()); for (Map.Entry<Integer, Map<String, ClientContext>> partitionAssignment : assignment.entrySet()) { int partition = partitionAssignment.getKey(); Map<String, ClientContext> assignedBrokers = partitionAssignment.getValue(); topicPartition2Endpoints.get(topicName).putAll( makePartition2Endpoints(topicName, partition, assignedBrokers, brokerLeases.get(new Pair<>(topicName, partition)), delayRebalanceTimespan)); } } } if (!mergeOnce) { scheduleDelayReblanceTasks(eventBus, version, stateHolder, delayRebalanceTimespan); } return topicPartition2Endpoints; } finally { log.info("Make endpoint cost {}ms", (System.currentTimeMillis() - start)); } } void setBrokerLeaseHolder(BrokerLeaseHolder brokerLeaseHolder); void setConfig(MetaServerConfig config); void setScheduledExecutor(ScheduledExecutorService scheduledExecutor); Map<String, Map<Integer, Endpoint>> makeEndpoints(EventBus eventBus, long version, ClusterStateHolder stateHolder, Map<String, Assignment<Integer>> brokerAssignments, boolean mergeOnce); @Override void initialize(); }
@Test public void test() throws Exception { long now = System.currentTimeMillis(); Map<String, Assignment<Integer>> brokerAssignments = new HashMap<>(); Assignment<Integer> t1Assignment = new Assignment<Integer>(); t1Assignment.addAssignment(1, createBroker("br1", "1.1.1.1", 1111)); t1Assignment.addAssignment(2, createBroker("br2", "1.1.1.2", 2222)); t1Assignment.addAssignment(3, createBroker("br1", "1.1.1.1", 1111)); brokerAssignments.put("t1", t1Assignment); Assignment<Integer> t2Assignment = new Assignment<Integer>(); t2Assignment.addAssignment(1, createBroker("br1", "1.1.1.1", 1111)); t2Assignment.addAssignment(2, createBroker("br2", "1.1.1.2", 2222)); t2Assignment.addAssignment(3, createBroker("br3", "1.1.1.3", 3333)); brokerAssignments.put("t2", t2Assignment); Map<Pair<String, Integer>, Map<String, ClientLeaseInfo>> leases = new HashMap<>(); leases.put(new Pair<String, Integer>("t1", 2), createBrokerLease("br1", "1.1.1.11", 8888, 1, now + 10 * 1000L)); leases.put(new Pair<String, Integer>("t1", 4), createBrokerLease("br1", "1.1.1.11", 8888, 1, now + 10 * 1000L)); leases.put(new Pair<String, Integer>("t2", 1), createBrokerLease("br1", "1.1.1.1", 1111, 2, now + 20 * 1000L)); when(m_brokerLeaseHolder.getAllValidLeases()).thenReturn(leases); when(m_scheduledExecutor.schedule(any(Runnable.class), anyLong(), eq(TimeUnit.MILLISECONDS))).thenReturn(null); Map<String, Map<Integer, Endpoint>> endpoints = m_maker.makeEndpoints(null, -1, null, brokerAssignments, false); verify(m_scheduledExecutor, times(1)).schedule(any(Runnable.class), anyLong(), eq(TimeUnit.MILLISECONDS)); assertEquals(2, endpoints.size()); Map<Integer, Endpoint> endpoints1 = endpoints.get("t1"); assertEquals(3, endpoints1.size()); assertEndpoint("br1", "1.1.1.1", 1111, endpoints1.get(1)); assertEndpoint("br1", "1.1.1.11", 8888, endpoints1.get(2)); assertEndpoint("br1", "1.1.1.1", 1111, endpoints1.get(3)); Map<Integer, Endpoint> endpoints2 = endpoints.get("t2"); assertEquals(3, endpoints2.size()); assertEndpoint("br1", "1.1.1.1", 1111, endpoints2.get(1)); assertEndpoint("br2", "1.1.1.2", 2222, endpoints2.get(2)); assertEndpoint("br3", "1.1.1.3", 3333, endpoints2.get(3)); }
SendMessageCommand extends AbstractCommand { public List<Pair<ProducerMessage<?>, SettableFuture<SendResult>>> getProducerMessageFuturePairs() { List<Pair<ProducerMessage<?>, SettableFuture<SendResult>>> pairs = new LinkedList<Pair<ProducerMessage<?>, SettableFuture<SendResult>>>(); Collection<List<ProducerMessage<?>>> msgsList = getProducerMessages(); for (List<ProducerMessage<?>> msgs : msgsList) { for (ProducerMessage<?> msg : msgs) { SettableFuture<SendResult> future = m_futures.get(msg.getMsgSeqNo()); if (future != null) { pairs.add(new Pair<ProducerMessage<?>, SettableFuture<SendResult>>(msg, future)); } } } return pairs; } SendMessageCommand(); SendMessageCommand(String topic, int partition); ConcurrentMap<Integer, List<ProducerMessage<?>>> getMsgs(); String getTopic(); int getPartition(); void addMessage(ProducerMessage<?> msg, SettableFuture<SendResult> future); Map<Integer, MessageBatchWithRawData> getMessageRawDataBatches(); int getMessageCount(); void onResultReceived(SendMessageResultCommand result); @Override void parse0(ByteBuf buf); @Override void toBytes0(ByteBuf buf); Collection<List<ProducerMessage<?>>> getProducerMessages(); List<Pair<ProducerMessage<?>, SettableFuture<SendResult>>> getProducerMessageFuturePairs(); Map<Integer, SettableFuture<SendResult>> getFutures(); }
@Test public void testGetProducerMessageFuturePairs() throws Exception { String topic = "topic"; long bornTime = System.currentTimeMillis(); SendMessageCommand cmd = new SendMessageCommand("topic", 10); ProducerMessage<String> pmsg1 = createProducerMessage(topic, "body1", "key1", 10, "pKey1", bornTime, true, true, Arrays.asList(new Pair<String, String>("a1", "A1")), Arrays.asList(new Pair<String, String>("b1", "B1")), Arrays.asList(new Pair<String, String>("c1", "C1"))); SettableFuture<SendResult> future1 = SettableFuture.create(); cmd.addMessage(pmsg1, future1); ProducerMessage<String> pmsg2 = createProducerMessage(topic, "body2", "key2", 10, "pKey2", bornTime, true, true, Arrays.asList(new Pair<String, String>("a2", "A2")), Arrays.asList(new Pair<String, String>("b2", "B2")), Arrays.asList(new Pair<String, String>("c2", "C2"))); SettableFuture<SendResult> future2 = SettableFuture.create(); cmd.addMessage(pmsg2, future2); ProducerMessage<String> pmsg3 = createProducerMessage(topic, "body3", "key3", 10, "pKey3", bornTime, false, true, Arrays.asList(new Pair<String, String>("a3", "A3")), Arrays.asList(new Pair<String, String>("b3", "B3")), Arrays.asList(new Pair<String, String>("c3", "C3"))); SettableFuture<SendResult> future3 = SettableFuture.create(); cmd.addMessage(pmsg3, future3); ProducerMessage<String> pmsg4 = createProducerMessage(topic, "body4", "key4", 10, "pKey4", bornTime, false, true, Arrays.asList(new Pair<String, String>("a4", "A4")), Arrays.asList(new Pair<String, String>("b4", "B4")), Arrays.asList(new Pair<String, String>("c4", "C4"))); SettableFuture<SendResult> future4 = SettableFuture.create(); cmd.addMessage(pmsg4, future4); List<Pair<ProducerMessage<?>, SettableFuture<SendResult>>> pairs = cmd.getProducerMessageFuturePairs(); assertEquals(4, pairs.size()); assertSame(pmsg1, pairs.get(0).getKey()); assertSame(future1, pairs.get(0).getValue()); assertSame(pmsg2, pairs.get(1).getKey()); assertSame(future2, pairs.get(1).getValue()); assertSame(pmsg3, pairs.get(2).getKey()); assertSame(future3, pairs.get(2).getValue()); assertSame(pmsg4, pairs.get(3).getKey()); assertSame(future4, pairs.get(3).getValue()); }
SendMessageCommand extends AbstractCommand { public void addMessage(ProducerMessage<?> msg, SettableFuture<SendResult> future) { validate(msg); int msgSeqNo = m_msgCounter.getAndIncrement(); msg.setMsgSeqNo(msgSeqNo); if (msg.isPriority()) { if (!m_msgs.containsKey(0)) { m_msgs.putIfAbsent(0, new LinkedList<ProducerMessage<?>>()); } m_msgs.get(0).add(msg); } else { if (!m_msgs.containsKey(1)) { m_msgs.putIfAbsent(1, new LinkedList<ProducerMessage<?>>()); } m_msgs.get(1).add(msg); } m_futures.put(msgSeqNo, future); } SendMessageCommand(); SendMessageCommand(String topic, int partition); ConcurrentMap<Integer, List<ProducerMessage<?>>> getMsgs(); String getTopic(); int getPartition(); void addMessage(ProducerMessage<?> msg, SettableFuture<SendResult> future); Map<Integer, MessageBatchWithRawData> getMessageRawDataBatches(); int getMessageCount(); void onResultReceived(SendMessageResultCommand result); @Override void parse0(ByteBuf buf); @Override void toBytes0(ByteBuf buf); Collection<List<ProducerMessage<?>>> getProducerMessages(); List<Pair<ProducerMessage<?>, SettableFuture<SendResult>>> getProducerMessageFuturePairs(); Map<Integer, SettableFuture<SendResult>> getFutures(); }
@Test(expected = IllegalArgumentException.class) public void testTopicNotMatch() throws Exception { String topic = "topic"; long bornTime = System.currentTimeMillis(); SendMessageCommand cmd = new SendMessageCommand("topic", 10); ProducerMessage<String> pmsg1 = createProducerMessage(topic + "1111", "body1", "key1", 10, "pKey1", bornTime, true, true, Arrays.asList(new Pair<String, String>("a1", "A1")), Arrays.asList(new Pair<String, String>("b1", "B1")), Arrays.asList(new Pair<String, String>("c1", "C1"))); SettableFuture<SendResult> future1 = SettableFuture.create(); cmd.addMessage(pmsg1, future1); } @Test(expected = IllegalArgumentException.class) public void testPartitionNotMatch() throws Exception { String topic = "topic"; long bornTime = System.currentTimeMillis(); SendMessageCommand cmd = new SendMessageCommand("topic", 10); ProducerMessage<String> pmsg1 = createProducerMessage(topic, "body1", "key1", 101, "pKey1", bornTime, true, true, Arrays.asList(new Pair<String, String>("a1", "A1")), Arrays.asList(new Pair<String, String>("b1", "B1")), Arrays.asList(new Pair<String, String>("c1", "C1"))); SettableFuture<SendResult> future1 = SettableFuture.create(); cmd.addMessage(pmsg1, future1); }
CallbackContext { public Slot[] nextAwaitingSlots() { Slot[] nextAwaitingSlots = new Slot[slotMatchResults.length]; for (int i = 0; i < slotMatchResults.length; i++) { SlotMatchResult slotMatchResult = slotMatchResults[i]; long nextAwaitingOffset; if (slotMatchResult.isMatch()) { nextAwaitingOffset = 1 + slotMatchResult.getTriggeringOffset(); } else { nextAwaitingOffset = slotMatchResult.getAwaitingOffset(); } nextAwaitingSlots[i] = new Slot(slotMatchResult.getIndex(), nextAwaitingOffset); } return nextAwaitingSlots; } CallbackContext(SlotMatchResult[] slotMatchResults); SlotMatchResult[] getSlotMatchResults(); int getSlotCount(); Slot[] nextAwaitingSlots(); long getTriggerTime(); }
@Test public void testSingleMatch() { int slotCount = 3; SlotMatchResult[] slotMatchResults = new SlotMatchResult[slotCount]; slotMatchResults[0] = new SlotMatchResult(0, true, 10, 15); slotMatchResults[1] = new SlotMatchResult(1, false, 20, -1); slotMatchResults[2] = new SlotMatchResult(2, false, 123456789, 123456788); CallbackContext ctx = new CallbackContext(slotMatchResults); Slot[] actualSlots = ctx.nextAwaitingSlots(); Slot[] expSlots = new Slot[slotCount]; expSlots[0] = new Slot(0, 16); expSlots[1] = new Slot(1, 20); expSlots[2] = new Slot(2, 123456789); assertArrayEquals(expSlots, actualSlots); } @Test public void testMultipleMatch() { int slotCount = 3; SlotMatchResult[] slotMatchResults = new SlotMatchResult[slotCount]; slotMatchResults[0] = new SlotMatchResult(0, true, 10, 15); slotMatchResults[1] = new SlotMatchResult(1, false, 20, -1); slotMatchResults[2] = new SlotMatchResult(2, true, 123456789, 1234567890); CallbackContext ctx = new CallbackContext(slotMatchResults); Slot[] actualSlots = ctx.nextAwaitingSlots(); Slot[] expSlots = new Slot[slotCount]; expSlots[0] = new Slot(0, 16); expSlots[1] = new Slot(1, 20); expSlots[2] = new Slot(2, 1234567891); assertArrayEquals(expSlots, actualSlots); }
MessageCodecUtils { public static ByteBuffer getPayload(ByteBuffer consumerMsg) { Pair<ByteBuffer, Date> pair = getPayloadAndBornTime(consumerMsg); return pair != null ? pair.getKey() : null; } static ByteBuffer getPayload(ByteBuffer consumerMsg); static Pair<ByteBuffer, Date> getPayloadAndBornTime(ByteBuffer consumerMsg); static byte[] getPayload(byte[] consumerMsg); }
@Test public void getJsonPayloadByByteBuffer() { ProducerMessage<String> proMsg = new ProducerMessage<String>(); String expected = "Hello Ctrip"; proMsg.setTopic("kafka.SimpleTextTopic"); proMsg.setBody(expected); proMsg.setPartitionKey("MyPartition"); proMsg.setKey("MyKey"); proMsg.setBornTime(System.currentTimeMillis()); DefaultMessageCodec codec = new DefaultMessageCodec(); byte[] proMsgByte = codec.encode(proMsg); ByteBuffer byteBuffer = ByteBuffer.wrap(proMsgByte); ByteBuffer payload = MessageCodecUtils.getPayload(byteBuffer); Object actual = JSON.parseObject(payload.array(), String.class); assertEquals(expected, actual); } @Test public void getJsonPayloadByByteArray() { ProducerMessage<String> proMsg = new ProducerMessage<String>(); String expected = "Hello Ctrip"; proMsg.setTopic("kafka.SimpleTextTopic"); proMsg.setBody(expected); proMsg.setPartitionKey("MyPartition"); proMsg.setKey("MyKey"); proMsg.setBornTime(System.currentTimeMillis()); DefaultMessageCodec codec = new DefaultMessageCodec(); byte[] proMsgByte = codec.encode(proMsg); byte[] payload = MessageCodecUtils.getPayload(proMsgByte); Object actual = JSON.parseObject(payload, String.class); assertEquals(expected, actual); }
DefaultMessageCodec implements MessageCodec { @Override public BaseConsumerMessage<?> decode(String topic, ByteBuf buf, Class<?> bodyClazz) { Magic.readAndCheckMagic(buf); MessageCodecVersion version = getVersion(buf); return version.getHandler().decode(topic, buf, bodyClazz); } @Override void encode(ProducerMessage<?> msg, ByteBuf buf); @Override byte[] encode(ProducerMessage<?> msg); @Override PartialDecodedMessage decodePartial(ByteBuf buf); @Override BaseConsumerMessage<?> decode(String topic, ByteBuf buf, Class<?> bodyClazz); @Override void encodePartial(PartialDecodedMessage msg, ByteBuf buf); }
@Test(expected = IllegalArgumentException.class) public void testMagicNumberCheckFail() throws Exception { ByteBuf buf = Unpooled.buffer(); buf.writeBytes(new byte[] { 1, 2, 3, 4 }); MessageCodec codec = new DefaultMessageCodec(); codec.decode("topic", buf, String.class); } @Test(expected = IllegalArgumentException.class) public void testUnknowVersion() throws Exception { ByteBuf buf = Unpooled.buffer(); Magic.writeMagic(buf); buf.writeByte(100); MessageCodec codec = new DefaultMessageCodec(); codec.decode("topic", buf, String.class); }
DefaultBrokerPartitionAssigningStrategy implements BrokerPartitionAssigningStrategy { @Override public Map<String, Assignment<Integer>> assign(Map<String, ClientContext> brokers, List<Topic> topics, Map<String, Assignment<Integer>> originAssignments) { Map<String, Assignment<Integer>> newAssignments = new HashMap<>(); if (topics != null && !topics.isEmpty()) { List<Entry<String, ClientContext>> brokerEntries = brokers != null && !brokers.isEmpty() ? new ArrayList<>( brokers.entrySet()) : new ArrayList<Entry<String, ClientContext>>(); int brokerPos = 0; int brokerCount = brokerEntries.size(); for (Topic topic : topics) { if (Endpoint.BROKER.equals(topic.getEndpointType())) { List<Partition> partitions = topic.getPartitions(); if (partitions != null && !partitions.isEmpty()) { Assignment<Integer> assignment = new Assignment<>(); newAssignments.put(topic.getName(), assignment); for (Partition partition : partitions) { Map<String, ClientContext> broker = new HashMap<>(); if (brokerCount > 0) { Entry<String, ClientContext> brokerEntry = brokerEntries.get(brokerPos); brokerPos = (brokerPos + 1) % brokerCount; broker.put(brokerEntry.getKey(), brokerEntry.getValue()); } assignment.addAssignment(partition.getId(), broker); } } } } } return newAssignments; } @Override Map<String, Assignment<Integer>> assign(Map<String, ClientContext> brokers, List<Topic> topics, Map<String, Assignment<Integer>> originAssignments); }
@Test public void testBrokerAdded() throws Exception { long now = System.currentTimeMillis(); Map<String, ClientContext> brokers = createBrokers(Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); List<Topic> topics = new ArrayList<>(); topics.add(createTopic("t1", 2, Endpoint.BROKER)); topics.add(createTopic("t2", 2, Endpoint.KAFKA)); topics.add(createTopic("t3", 2, Endpoint.BROKER)); Map<String, Assignment<Integer>> originAssignments = createAssignments(Arrays.asList( new Pair<>(new Pair<>("t1", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t1", 1), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 1), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)) )); Map<String, Assignment<Integer>> assignments = m_strategy.assign(brokers, topics, originAssignments); assertAssignment(assignments, topics, Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); } @Test public void testBrokerDeleted() throws Exception { long now = System.currentTimeMillis(); Map<String, ClientContext> brokers = createBrokers(Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now) )); List<Topic> topics = new ArrayList<>(); topics.add(createTopic("t1", 2, Endpoint.BROKER)); topics.add(createTopic("t2", 2, Endpoint.KAFKA)); topics.add(createTopic("t3", 2, Endpoint.BROKER)); Map<String, Assignment<Integer>> originAssignments = createAssignments(Arrays.asList( new Pair<>(new Pair<>("t1", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t1", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)) )); Map<String, Assignment<Integer>> assignments = m_strategy.assign(brokers, topics, originAssignments); assertAssignment(assignments, topics, Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now) )); } @Test public void testTopicAdded() throws Exception { long now = System.currentTimeMillis(); Map<String, ClientContext> brokers = createBrokers(Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); List<Topic> topics = new ArrayList<>(); topics.add(createTopic("t1", 2, Endpoint.BROKER)); topics.add(createTopic("t2", 2, Endpoint.KAFKA)); topics.add(createTopic("t3", 2, Endpoint.BROKER)); topics.add(createTopic("t4", 2, Endpoint.BROKER)); Map<String, Assignment<Integer>> originAssignments = createAssignments(Arrays.asList( new Pair<>(new Pair<>("t1", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t1", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)) )); Map<String, Assignment<Integer>> assignments = m_strategy.assign(brokers, topics, originAssignments); assertAssignment(assignments, topics, Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); } @Test public void testTopicDeleted() throws Exception { long now = System.currentTimeMillis(); Map<String, ClientContext> brokers = createBrokers(Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); List<Topic> topics = new ArrayList<>(); topics.add(createTopic("t1", 2, Endpoint.BROKER)); topics.add(createTopic("t2", 2, Endpoint.KAFKA)); Map<String, Assignment<Integer>> originAssignments = createAssignments(Arrays.asList( new Pair<>(new Pair<>("t1", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t1", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)) )); Map<String, Assignment<Integer>> assignments = m_strategy.assign(brokers, topics, originAssignments); assertAssignment(assignments, topics, Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br1", "0.0.0.1", 1234, null, null, now) )); } @Test public void testTopicAndBrokerAllChanged() throws Exception { long now = System.currentTimeMillis(); Map<String, ClientContext> brokers = createBrokers(Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br2", "0.0.0.2", 1234, null, null, now) )); List<Topic> topics = new ArrayList<>(); topics.add(createTopic("t1", 2, Endpoint.BROKER)); topics.add(createTopic("t2", 2, Endpoint.KAFKA)); topics.add(createTopic("t4", 2, Endpoint.BROKER)); Map<String, Assignment<Integer>> originAssignments = createAssignments(Arrays.asList( new Pair<>(new Pair<>("t1", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t1", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 0), new ClientContext("br0", "0.0.0.0", 1234, null, null, now)), new Pair<>(new Pair<>("t3", 1), new ClientContext("br1", "0.0.0.1", 1234, null, null, now)) )); Map<String, Assignment<Integer>> assignments = m_strategy.assign(brokers, topics, originAssignments); assertAssignment(assignments, topics, Arrays.asList( new ClientContext("br0", "0.0.0.0", 1234, null, null, now), new ClientContext("br2", "0.0.0.2", 1234, null, null, now) )); }
HashPartitioningStrategy implements PartitioningStrategy { @Override public int computePartitionNo(String key, int partitionCount) { if (key == null) { return m_random.nextInt(Integer.MAX_VALUE) % partitionCount; } else { return (key.hashCode() == Integer.MIN_VALUE ? 0 : Math.abs(key.hashCode())) % partitionCount; } } @Override int computePartitionNo(String key, int partitionCount); }
@Test public void testNormal() throws Exception { for (int i = 0; i < 100000; i++) { int partitionNo = strategy.computePartitionNo(generateRandomString(100), 5); assertTrue(partitionNo < 5); assertTrue(partitionNo >= 0); } } @Test public void testNull() throws Exception { int partitionNo = strategy.computePartitionNo(null, 5); assertTrue(partitionNo < 5); assertTrue(partitionNo >= 0); }
RetryPolicyFactory { public static RetryPolicy create(String policyValue) { if (policyValue != null) { RetryPolicy policy = m_cache.get(policyValue); if (policy == null) { synchronized (m_cache) { policy = m_cache.get(policyValue); if (policy == null) { policy = createPolicy(policyValue); m_cache.put(policyValue, policy); } } } if (policy != null) { return policy; } } throw new IllegalArgumentException(String.format("Unknown retry policy for value %s", policyValue)); } static RetryPolicy create(String policyValue); }
@Test(expected = IllegalArgumentException.class) public void testInvalid1() throws Exception { RetryPolicyFactory.create(null); } @Test(expected = IllegalArgumentException.class) public void testInvalid2() throws Exception { RetryPolicyFactory.create("2"); } @Test(expected = IllegalArgumentException.class) public void testInvalid3() throws Exception { RetryPolicyFactory.create("2:"); } @Test(expected = IllegalArgumentException.class) public void testInvalid4() throws Exception { RetryPolicyFactory.create("2: "); } @Test(expected = IllegalArgumentException.class) public void testInvalid5() throws Exception { RetryPolicyFactory.create(" :1"); } @Test public void testValid() throws Exception { RetryPolicy policy = RetryPolicyFactory.create("1:[1,2]"); assertTrue(policy instanceof FrequencySpecifiedRetryPolicy); assertEquals(2, policy.getRetryTimes()); } @Test public void testValid2() throws Exception { RetryPolicy policy = RetryPolicyFactory.create("3:[10,5000]"); assertTrue(policy instanceof FixedIntervalRetryPolicy); assertEquals(10, policy.getRetryTimes()); assertEquals(5000, ((FixedIntervalRetryPolicy) policy).getIntervalMillis()); }
PayloadCodecFactory { public static PayloadCodec getCodecByType(String codecString) { CodecDesc codecDesc = CodecDesc.valueOf(codecString); if (codecDesc == null) { throw new IllegalArgumentException(String.format("codec type '%s' is illegal", codecString)); } else if (codecDesc.getCompressionAlgo() == null) { return PlexusComponentLocator.lookup(PayloadCodec.class, codecString); } else { PayloadCodecCompositor cached = compositorCache.get(codecString); if (cached != null) { return cached; } else { synchronized (PayloadCodecFactory.class) { cached = compositorCache.get(codecString); if (cached == null) { cached = new PayloadCodecCompositor(codecString); cached.addPayloadCodec(PlexusComponentLocator.lookup(PayloadCodec.class, codecDesc.getCodec())); if (Codec.GZIP.equals(codecDesc.getCompressionAlgo())) { cached.addPayloadCodec(PlexusComponentLocator.lookup(PayloadCodec.class, Codec.GZIP)); } else if (Codec.DEFLATER.equals(codecDesc.getCompressionAlgo())) { int compressionLevel = 5; if (codecDesc.getLevel() != -1) { compressionLevel = codecDesc.getLevel(); } cached.addPayloadCodec(PlexusComponentLocator.lookup(PayloadCodec.class, Codec.DEFLATER + "-" + compressionLevel)); } compositorCache.put(codecString, cached); } } return cached; } } } static PayloadCodec getCodecByTopicName(String topicName); static PayloadCodec getCodecByType(String codecString); }
@Test public void test() throws Exception { PayloadCodec jsonGzipCodec = PayloadCodecFactory.getCodecByType("json,gzip"); PayloadCodec jsonCodec = PayloadCodecFactory.getCodecByType("json"); assertTrue(jsonGzipCodec instanceof PayloadCodecCompositor); String xml = IO.INSTANCE.readFrom(this.getClass().getResourceAsStream("/META-INF/dal/model/meta-codegen.xml"), "utf-8"); byte[] gzipEncoded = jsonGzipCodec.encode("topic", xml); byte[] encoded = jsonCodec.encode("topic", xml); assertTrue(gzipEncoded.length < encoded.length); String xmlDecodec = jsonGzipCodec.decode(gzipEncoded, String.class); assertEquals(xml, xmlDecodec); }
DefaultCMessagingConfigService implements CMessagingConfigService, Initializable { @Override public int getConsumerType(String exchangeName, String identifier, String ip) { Exchange exchange = findExchange(exchangeName); if (exchange == null || exchange.getConsume() == null) { return CMessagingConfigService.CONSUME_FROM_CMESSAGING; } else { if (Consume.GRAY.equalsIgnoreCase(exchange.getConsume().getState())) { ConsumeGroup group = exchange.getConsume().findConsumeGroup(identifier); if (group == null) { return CMessagingConfigService.CONSUME_FROM_CMESSAGING; } else { if (Consume.GRAY.equalsIgnoreCase(group.getState())) { Node node = group.findNode(ip); if (node == null) { return CMessagingConfigService.CONSUME_FROM_CMESSAGING; } else { return CMessagingConfigService.CONSUME_FROM_BOTH; } } else { return nonGrayConsumeStateToInt(group.getState()); } } } else { return nonGrayConsumeStateToInt(exchange.getConsume().getState()); } } } @Override String getTopic(String exchangeName); @Override String getGroupId(String exchangeName, String idAndQueueName); @Override boolean isHermesProducerEnabled(String exchangeName, String identifier, String ip); @Override int getConsumerType(String exchangeName, String identifier, String ip); @Override void initialize(); }
@Test public void testConsume() throws Exception { DefaultCMessagingConfigService s = createServiceWithXml("consume_both.xml"); assertEquals(CMessagingConfigService.CONSUME_FROM_CMESSAGING, s.getConsumerType("ex1", "cid0", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_CMESSAGING, s.getConsumerType("ex1", "cid1", "1.1.1.3")); assertEquals(CMessagingConfigService.CONSUME_FROM_CMESSAGING, s.getConsumerType("ex1", "unknown", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_CMESSAGING, s.getConsumerType("ex2", "unknown", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_CMESSAGING, s.getConsumerType("ex10", "unknown", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_BOTH, s.getConsumerType("ex1", "cid1", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_BOTH, s.getConsumerType("ex1", "cid1", "1.1.1.2")); assertEquals(CMessagingConfigService.CONSUME_FROM_BOTH, s.getConsumerType("ex1", "cid2", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_BOTH, s.getConsumerType("ex1", "cid2", "1.1.1.3")); assertEquals(CMessagingConfigService.CONSUME_FROM_BOTH, s.getConsumerType("ex3", "unknown", "1.1.1.3")); assertEquals(CMessagingConfigService.CONSUME_FROM_HERMES, s.getConsumerType("ex1", "cid3", "1.1.1.1")); assertEquals(CMessagingConfigService.CONSUME_FROM_HERMES, s.getConsumerType("ex1", "cid3", "1.1.1.3")); assertEquals(CMessagingConfigService.CONSUME_FROM_HERMES, s.getConsumerType("ex4", "cid1", "1.1.1.3")); }
DefaultCMessagingConfigService implements CMessagingConfigService, Initializable { @Override public boolean isHermesProducerEnabled(String exchangeName, String identifier, String ip) { Exchange exchange = findExchange(exchangeName); if (exchange == null || exchange.getProduce() == null) { return false; } else { if (Produce.GRAY.equalsIgnoreCase(exchange.getProduce().getState())) { ProduceGroup group = exchange.getProduce().findProduceGroup(identifier); if (group == null) { return false; } else { if (Produce.GRAY.equalsIgnoreCase(group.getState())) { return group.findNode(ip) != null; } else { return nonGrayProduceStateToBoolean(group.getState()); } } } else { return nonGrayProduceStateToBoolean(exchange.getProduce().getState()); } } } @Override String getTopic(String exchangeName); @Override String getGroupId(String exchangeName, String idAndQueueName); @Override boolean isHermesProducerEnabled(String exchangeName, String identifier, String ip); @Override int getConsumerType(String exchangeName, String identifier, String ip); @Override void initialize(); }
@Test public void testProduce() throws Exception { DefaultCMessagingConfigService s = createServiceWithXml("consume_both.xml"); assertFalse(s.isHermesProducerEnabled("ex1", "pid0", "2.2.2.1")); assertFalse(s.isHermesProducerEnabled("ex1", "pid0", "2.2.2.3")); assertFalse(s.isHermesProducerEnabled("ex1", "pid3", "2.2.2.1")); assertFalse(s.isHermesProducerEnabled("ex2", "pid0", "2.2.2.1")); assertFalse(s.isHermesProducerEnabled("ex10", "pid0", "2.2.2.1")); assertTrue(s.isHermesProducerEnabled("ex1", "pid1", "2.2.2.1")); assertTrue(s.isHermesProducerEnabled("ex1", "pid1", "2.2.2.2")); assertFalse(s.isHermesProducerEnabled("ex1", "pid1", "2.2.2.3")); assertTrue(s.isHermesProducerEnabled("ex1", "pid2", "2.2.2.1")); assertTrue(s.isHermesProducerEnabled("ex1", "pid2", "2.2.2.3")); assertTrue(s.isHermesProducerEnabled("ex3", "pid10", "2.2.2.3")); }
DefaultCMessagingConfigService implements CMessagingConfigService, Initializable { @Override public String getGroupId(String exchangeName, String idAndQueueName) { Exchange exchange = findExchange(exchangeName); String result = null; if (exchange != null) { ConsumeGroup group = exchange.getConsume().findConsumeGroup(idAndQueueName); if (group != null) { result = group.getHermesConsumerGroup(); } } return result; } @Override String getTopic(String exchangeName); @Override String getGroupId(String exchangeName, String idAndQueueName); @Override boolean isHermesProducerEnabled(String exchangeName, String identifier, String ip); @Override int getConsumerType(String exchangeName, String identifier, String ip); @Override void initialize(); }
@Test public void testGetGroupId() throws Exception { DefaultCMessagingConfigService s = createServiceWithXml("consume_both.xml"); assertEquals("hermes-cid0", s.getGroupId("ex1", "cid0")); assertEquals("hermes-cid1", s.getGroupId("ex1", "cid1")); assertNull(s.getGroupId("ex1", "cid10")); }
DefaultCMessagingConfigService implements CMessagingConfigService, Initializable { @Override public String getTopic(String exchangeName) { Exchange exchange = findExchange(exchangeName); String result = null; if (exchange != null) { result = exchange.getHermesTopic(); } return result; } @Override String getTopic(String exchangeName); @Override String getGroupId(String exchangeName, String idAndQueueName); @Override boolean isHermesProducerEnabled(String exchangeName, String identifier, String ip); @Override int getConsumerType(String exchangeName, String identifier, String ip); @Override void initialize(); }
@Test public void testGetTopic() throws Exception { DefaultCMessagingConfigService s = createServiceWithXml("consume_both.xml"); assertEquals("hermes-ex1", s.getTopic("ex1")); assertEquals("hermes-ex2", s.getTopic("ex2")); assertNull(s.getTopic("ex10")); }
DefaultPullConsumerHolder implements PullConsumerHolder<T>, MessageListener<T> { @Override public PulledBatch<T> poll(int maxMessageCount, int timeout) { long startTime = System.currentTimeMillis(); Transaction t = Cat.newTransaction(CatConstants.TYPE_MESSAGE_CONSUME_POLL_TRIED, m_topic + ":" + m_group); try { PulledBatch<T> batch = retrive(maxMessageCount, timeout, RetrivePolicy.FAVOUR_FAST_RETURN); if (batch.getMessages() != null && !batch.getMessages().isEmpty()) { CatUtil.logElapse(CatConstants.TYPE_MESSAGE_CONSUME_POLL_ELAPSE, m_topic + ":" + m_group, startTime, batch .getMessages().size(), null, Transaction.SUCCESS); } return batch; } finally { t.setStatus(Transaction.SUCCESS); t.complete(); } } DefaultPullConsumerHolder(String topic, String groupId, int partitionCount, PullConsumerConfig config, AckManager ackManager, OffsetStorage offsetStorage, ConsumerConfig consumerConfig); @Override void onMessage(List<ConsumerMessage<T>> msgs); @Override PulledBatch<T> poll(int maxMessageCount, int timeout); @Override PulledBatch<T> collect(int maxMessageCount, int timeout); @Override void close(); void setConsumerHolder(ConsumerHolder consumerHolder); }
@Test public void testPollFail() { long start = System.currentTimeMillis(); int timeout = 10; int error = 100; List<ConsumerMessage<String>> msgs = holder.poll(1, timeout).getMessages(); assertTrue(System.currentTimeMillis() - start < config.getScanIntervalMax() + timeout + error); assertEquals(0, msgs.size()); }
DefaultPullConsumerHolder implements PullConsumerHolder<T>, MessageListener<T> { @Override public PulledBatch<T> collect(int maxMessageCount, int timeout) { long startTime = System.currentTimeMillis(); Transaction t = Cat.newTransaction(CatConstants.TYPE_MESSAGE_CONSUME_COLLECT_TRIED, m_topic + ":" + m_group); try { PulledBatch<T> batch = retrive(maxMessageCount, timeout, RetrivePolicy.FAVOUR_MORE_MESSAGE); if (batch.getMessages() != null && !batch.getMessages().isEmpty()) { CatUtil.logElapse(CatConstants.TYPE_MESSAGE_CONSUME_COLLECT_ELAPSE, m_topic + ":" + m_group, startTime, batch.getMessages().size(), null, Transaction.SUCCESS); } return batch; } finally { t.setStatus(Transaction.SUCCESS); t.complete(); } } DefaultPullConsumerHolder(String topic, String groupId, int partitionCount, PullConsumerConfig config, AckManager ackManager, OffsetStorage offsetStorage, ConsumerConfig consumerConfig); @Override void onMessage(List<ConsumerMessage<T>> msgs); @Override PulledBatch<T> poll(int maxMessageCount, int timeout); @Override PulledBatch<T> collect(int maxMessageCount, int timeout); @Override void close(); void setConsumerHolder(ConsumerHolder consumerHolder); }
@Test public void testCollectFail() { long start = System.currentTimeMillis(); int timeout = 10; int error = 100; List<ConsumerMessage<String>> msgs = holder.collect(1, timeout).getMessages(); assertTrue(System.currentTimeMillis() - start < config.getScanIntervalMax() + timeout + error); assertEquals(0, msgs.size()); }
KafkaService { public void resetConsumerOffset(String topic, String consumerGroup, RESET_POSITION position) { Properties prop = getProperties(topic, consumerGroup); KafkaConsumer<String, byte[]> consumer = new KafkaConsumer<String, byte[]>(prop); List<PartitionInfo> partitionInfos = consumer.partitionsFor(topic); List<TopicPartition> topicPartitions = new ArrayList<TopicPartition>(); for (PartitionInfo partitionInfo : partitionInfos) { TopicPartition tp = new TopicPartition(partitionInfo.topic(), partitionInfo.partition()); topicPartitions.add(tp); } consumer.assign(topicPartitions); Set<TopicPartition> assignment = consumer.assignment(); List<PartitionInfo> partitions = consumer.partitionsFor(topic); if (assignment.size() != partitions.size()) { m_logger.warn("ASSIGNMENTS: " + assignment); m_logger.warn("PARTITIONS: " + partitions); m_logger.warn("Could not match, reset failed"); consumer.close(); return; } for (TopicPartition tp : assignment) { long before = consumer.position(tp); if (position == RESET_POSITION.EARLIEST) { consumer.seekToBeginning(tp); } else if (position == RESET_POSITION.LATEST) { consumer.seekToEnd(tp); } long after = consumer.position(tp); m_logger.info("Reset partition: {} From: {} To: {}", tp.partition(), before, after); } consumer.commitSync(); consumer.close(); m_logger.info("Reset offset Done"); } void resetConsumerOffset(String topic, String consumerGroup, RESET_POSITION position); }
@Test public void resetToEarliestTest() throws IOException, InterruptedException, ExecutionException { String topic = "kafka.SimpleTextTopic1"; kafkaCluster.createTopic(topic, 3, 1); TopicMetadata topicMeta = kafkaCluster.waitTopicUntilReady(topic); System.out.println(topicMeta); String group = "SimpleTextTopic1Group"; List<String> expected = new ArrayList<String>(); expected.add("abc"); expected.add("DEF"); expected.add("#$%"); expected.add(" 23"); expected.add("+- "); expected.add(" # "); final List<String> actual = new ArrayList<String>(); Producer producer = Producer.getInstance(); ConsumerHolder consumer = Consumer.getInstance().start(topic, group, new BaseMessageListener<String>() { @Override protected void onMessage(ConsumerMessage<String> msg) { String body = msg.getBody(); actual.add(body); System.out.println("Receive1: " + body); } }); System.out.println("Starting consumer..."); Thread.sleep(CONSUMER_WAIT_BEFORE_READY); for (int i = 0; i < expected.size(); i++) { String proMsg = expected.get(i); MessageHolder holder = producer.message(topic, String.valueOf(i), proMsg); KafkaFuture future = (KafkaFuture) holder.send(); KafkaSendResult result = future.get(); System.out.println(String.format("Sent:%s, Partition:%s, Offset:%s", proMsg, result.getPartition(), result.getOffset())); } int sleepCount = 0; while (actual.size() < expected.size() && sleepCount++ < 50) { Thread.sleep(100); } consumer.close(); Assert.assertEquals(expected.size(), actual.size()); Assert.assertEquals(new HashSet<String>(expected), new HashSet<String>(actual)); KafkaService kafkaService = PlexusComponentLocator.lookup(KafkaService.class); kafkaService.resetConsumerOffset(topic, group, KafkaService.RESET_POSITION.EARLIEST); consumer = Consumer.getInstance().start(topic, group, new BaseMessageListener<String>() { @Override protected void onMessage(ConsumerMessage<String> msg) { String body = msg.getBody(); actual.add(body); System.out.println("Receive1: " + body); } }); System.out.println("Starting consumer..."); Thread.sleep(CONSUMER_WAIT_BEFORE_READY); sleepCount = 0; while (actual.size() < expected.size() * 2 && sleepCount++ < 50) { Thread.sleep(100); } consumer.close(); Assert.assertEquals(expected.size() * 2, actual.size()); Assert.assertEquals(new HashSet<String>(expected), new HashSet<String>(actual)); KafkaMessageSender kafkaSender = (KafkaMessageSender) PlexusComponentLocator.lookup(MessageSender.class, Endpoint.KAFKA); kafkaSender.close(); } @Test public void resetToLargestTest() throws IOException, InterruptedException, ExecutionException { String topic = "kafka.SimpleTextTopic2"; kafkaCluster.createTopic(topic, 3, 1); TopicMetadata topicMeta = kafkaCluster.waitTopicUntilReady(topic); System.out.println(topicMeta); String group = "SimpleTextTopic2Group"; List<String> expected = new ArrayList<String>(); expected.add("abc"); expected.add("DEF"); expected.add("#$%"); expected.add(" 23"); expected.add("+- "); expected.add(" # "); final List<String> actual = new ArrayList<String>(); Producer producer = Producer.getInstance(); ConsumerHolder consumer = Consumer.getInstance().start(topic, group, new BaseMessageListener<String>() { @Override protected void onMessage(ConsumerMessage<String> msg) { String body = msg.getBody(); actual.add(body); System.out.println("Receive1: " + body); } }); System.out.println("Starting consumer..."); Thread.sleep(CONSUMER_WAIT_BEFORE_READY); for (int i = 0; i < expected.size(); i++) { String proMsg = expected.get(i); MessageHolder holder = producer.message(topic, String.valueOf(i), proMsg); KafkaFuture future = (KafkaFuture) holder.send(); KafkaSendResult result = future.get(); System.out.println(String.format("Sent:%s, Partition:%s, Offset:%s", proMsg, result.getPartition(), result.getOffset())); } int sleepCount = 0; while (actual.size() < expected.size() && sleepCount++ < 50) { Thread.sleep(100); } consumer.close(); Assert.assertEquals(expected.size(), actual.size()); Assert.assertEquals(new HashSet<String>(expected), new HashSet<String>(actual)); for (int i = 0; i < expected.size(); i++) { String proMsg = expected.get(i); MessageHolder holder = producer.message(topic, String.valueOf(i), proMsg); KafkaFuture future = (KafkaFuture) holder.send(); KafkaSendResult result = future.get(); System.out.println(String.format("Sent:%s, Partition:%s, Offset:%s", proMsg, result.getPartition(), result.getOffset())); } KafkaService kafkaService = PlexusComponentLocator.lookup(KafkaService.class); kafkaService.resetConsumerOffset(topic, group, KafkaService.RESET_POSITION.LATEST); consumer = Consumer.getInstance().start(topic, group, new BaseMessageListener<String>() { @Override protected void onMessage(ConsumerMessage<String> msg) { String body = msg.getBody(); actual.add(body); System.out.println("Receive1: " + body); } }); System.out.println("Starting consumer..."); Thread.sleep(CONSUMER_WAIT_BEFORE_READY); for (int i = 0; i < expected.size(); i++) { String proMsg = expected.get(i); MessageHolder holder = producer.message(topic, String.valueOf(i), proMsg); KafkaFuture future = (KafkaFuture) holder.send(); KafkaSendResult result = future.get(); System.out.println(String.format("Sent:%s, Partition:%s, Offset:%s", proMsg, result.getPartition(), result.getOffset())); } sleepCount = 0; while (actual.size() < expected.size() * 2 && sleepCount++ < 50) { Thread.sleep(100); } consumer.close(); Assert.assertEquals(expected.size() * 2, actual.size()); Assert.assertEquals(new HashSet<String>(expected), new HashSet<String>(actual)); KafkaMessageSender kafkaSender = (KafkaMessageSender) PlexusComponentLocator.lookup(MessageSender.class, Endpoint.KAFKA); kafkaSender.close(); }
DefaultFilter implements Filter, Initializable { @Override public boolean isMatch(String topicName, String filter, Map<String, String> source) { try { Map<String, String> conditions = m_filterConditionCache.get(filter); for (Entry<String, String> entry : conditions.entrySet()) { String sourceValue = source.get(entry.getKey()); if (StringUtils.isBlank(sourceValue) || !matches(topicName, sourceValue, entry.getValue())) { return false; } } } catch (ExecutionException e) { log.error("Can not find matchedCache for topic: {]", topicName, e); return false; } return true; } @Override boolean isMatch(String topicName, String filter, Map<String, String> source); @Override void initialize(); }
@Test public void testFilter() { int topicCount = 5000; int loopCount = 1000000, loop = 500; int maxTagCount = 20000; List<String> topics = randomStrings(topicCount, 256); Filter filter = lookup(Filter.class); Map<String, List<String>> pattern2Tags = new HashMap<>(); for (String pattern : m_patterns) { pattern2Tags.put(pattern, generateTags(pattern, m_rand.nextBoolean() ? m_rand.nextBoolean() ? m_rand.nextInt(maxTagCount) : m_rand.nextInt(500) : 1)); } List<Pair<String, Map<String, String>>> list = new ArrayList<>(); for (int i = 0; i < loopCount; i++) { list.add(generateSource(m_rand.nextInt(10) < 7, pattern2Tags)); } for (int i = 0; i < loop; i++) { long begin = System.currentTimeMillis(); for (int j = 0; j < loopCount; j++) { Pair<String, Map<String, String>> seed = list.get(m_rand.nextInt(list.size())); filter.isMatch(topics.get(m_rand.nextInt(topicCount)), seed.getKey(), seed.getValue()); } System.out.println("cost: " + (System.currentTimeMillis() - begin)); } }
MySQLMessageQueueStorage implements MessageQueueStorage, Initializable { @Override public synchronized Object findLastOffset(Tpp tpp, int groupId) throws DalException { String topic = tpp.getTopic(); int partition = tpp.getPartition(); int priority = tpp.getPriorityInt(); if (!hasStorageForPriority(tpp.getTopic(), tpp.getPriorityInt())) { return 0L; } return findLastOffset(topic, partition, priority, groupId).getOffset(); } @Override void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches); @Override synchronized Object findLastOffset(Tpp tpp, int groupId); @Override FetchResult fetchMessages(Tpp tpp, List<Object> offsets); @Override FetchResult fetchMessages(Tpp tpp, Object startOffset, int batchSize, String filter); @Override void nack(Tpp tpp, String groupId, boolean resend, List<Pair<Long, MessageMeta>> msgId2Metas); @Override void ack(Tpp tpp, String groupId, boolean resend, long msgSeq); @Override synchronized Object findLastResendOffset(Tpg tpg); @SuppressWarnings("unchecked") @Override FetchResult fetchResendMessages(Tpg tpg, Object startOffset, int batchSize); @Override Object findMessageOffsetByTime(Tpp tpp, long time); @Override void initialize(); }
@Test public void testFindLastOffset() throws Exception { Tpp tpp = new Tpp("order_new", 0, true); s.findLastOffset(tpp, 100); }
MySQLMessageQueueStorage implements MessageQueueStorage, Initializable { @Override public synchronized Object findLastResendOffset(Tpg tpg) throws DalException { int intGroupId = m_metaService.translateToIntGroupId(tpg.getTopic(), tpg.getGroupId()); OffsetResend offset = findLastResendOffset(tpg.getTopic(), tpg.getPartition(), intGroupId); return new Pair<>(offset.getLastScheduleDate(), offset.getLastId()); } @Override void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches); @Override synchronized Object findLastOffset(Tpp tpp, int groupId); @Override FetchResult fetchMessages(Tpp tpp, List<Object> offsets); @Override FetchResult fetchMessages(Tpp tpp, Object startOffset, int batchSize, String filter); @Override void nack(Tpp tpp, String groupId, boolean resend, List<Pair<Long, MessageMeta>> msgId2Metas); @Override void ack(Tpp tpp, String groupId, boolean resend, long msgSeq); @Override synchronized Object findLastResendOffset(Tpg tpg); @SuppressWarnings("unchecked") @Override FetchResult fetchResendMessages(Tpg tpg, Object startOffset, int batchSize); @Override Object findMessageOffsetByTime(Tpp tpp, long time); @Override void initialize(); }
@Test public void testFindLastResendOffset() throws Exception { Tpg tpg = new Tpg("order_new", 0, "group1"); s.findLastResendOffset(tpg); }
MySQLMessageQueueStorage implements MessageQueueStorage, Initializable { @Override public FetchResult fetchMessages(Tpp tpp, List<Object> offsets) { List<MessagePriority> msgs = new ArrayList<MessagePriority>(); if (!hasStorageForPriority(tpp.getTopic(), tpp.getPriorityInt())) { return buildFetchResult(tpp, msgs, null); } for (Long[] subOffsets : splitOffsets(offsets)) { try { msgs.addAll(m_msgDao.findWithOffsets(tpp.getTopic(), tpp.getPartition(), tpp.getPriorityInt(), subOffsets, MessagePriorityEntity.READSET_FULL)); } catch (Exception e) { log.error("Failed to fetch message({}).", tpp, e); continue; } } return buildFetchResult(tpp, msgs, null); } @Override void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches); @Override synchronized Object findLastOffset(Tpp tpp, int groupId); @Override FetchResult fetchMessages(Tpp tpp, List<Object> offsets); @Override FetchResult fetchMessages(Tpp tpp, Object startOffset, int batchSize, String filter); @Override void nack(Tpp tpp, String groupId, boolean resend, List<Pair<Long, MessageMeta>> msgId2Metas); @Override void ack(Tpp tpp, String groupId, boolean resend, long msgSeq); @Override synchronized Object findLastResendOffset(Tpg tpg); @SuppressWarnings("unchecked") @Override FetchResult fetchResendMessages(Tpg tpg, Object startOffset, int batchSize); @Override Object findMessageOffsetByTime(Tpp tpp, long time); @Override void initialize(); }
@Test public void testFetchMessages() throws Exception { Tpp tpp = new Tpp("order_new", 0, true); FetchResult result = s.fetchMessages(tpp, 0L, 10, null); ByteBuf out = Unpooled.buffer(); result.getBatch().getTransferCallback().transfer(out); assertTrue(out.readableBytes() > 0); assertTrue(!result.getBatch().getMessageMetas().isEmpty()); }
MySQLMessageQueueStorage implements MessageQueueStorage, Initializable { @Override public void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches) throws Exception { List<MessagePriority> msgs = new LinkedList<>(); Topic topic = m_metaService.findTopicByName(topicName); int count = 0; long bytes = 0; for (MessageBatchWithRawData batch : batches) { List<PartialDecodedMessage> pdmsgs = batch.getMessages(); BrokerStatusMonitor.INSTANCE.msgSaved(topicName, partition, pdmsgs.size()); for (PartialDecodedMessage pdmsg : pdmsgs) { count++; bytes += pdmsg.getBody().readableBytes() + pdmsg.getDurableProperties().readableBytes(); MessagePriority msg = new MessagePriority(); msg.setAttributes(pdmsg.readDurableProperties()); msg.setCreationDate(new Date(pdmsg.getBornTime())); msg.setPartition(partition); msg.setPayload(new ByteBufInputStream(pdmsg.getBody().duplicate())); if (topic.isPriorityMessageEnabled()) { msg.setPriority(priority ? 0 : 1); } else { msg.setPriority(1); } msg.setProducerId(0); msg.setProducerIp(""); msg.setRefKey(pdmsg.getKey()); msg.setTopic(topicName); msg.setCodecType(pdmsg.getBodyCodecType()); msgs.add(msg); } } if (!msgs.isEmpty()) { batchInsert(topic, partition, priority, msgs); } if (count > 0) { logSelecotrMetric(topicName, partition, priority, count); } if (bytes > 0) { try { CatUtil.logEventPeriodically( CatConstants.TYPE_MESSAGE_BROKER_PRODUCE_BYTES + topic.getPartitions().get(partition).getWriteDatasource(), topicName, bytes); } catch (Exception e) { log.warn("Exception occurred while loging bytes for {}-{}", topicName, partition, e); } } } @Override void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches); @Override synchronized Object findLastOffset(Tpp tpp, int groupId); @Override FetchResult fetchMessages(Tpp tpp, List<Object> offsets); @Override FetchResult fetchMessages(Tpp tpp, Object startOffset, int batchSize, String filter); @Override void nack(Tpp tpp, String groupId, boolean resend, List<Pair<Long, MessageMeta>> msgId2Metas); @Override void ack(Tpp tpp, String groupId, boolean resend, long msgSeq); @Override synchronized Object findLastResendOffset(Tpg tpg); @SuppressWarnings("unchecked") @Override FetchResult fetchResendMessages(Tpg tpg, Object startOffset, int batchSize); @Override Object findMessageOffsetByTime(Tpp tpp, long time); @Override void initialize(); }
@Test public void testAppendMessages() throws Exception { String topic = "order_new"; Collection<MessageBatchWithRawData> batches = new ArrayList<>(); ByteBuf rawData = Unpooled.buffer(); MessageBatchWithRawData batch = new MessageBatchWithRawData(topic, Arrays.asList(1), rawData, null); batches.add(batch); s.appendMessages(topic, 0, true, batches); }
MySQLMessageQueueStorage implements MessageQueueStorage, Initializable { @Override public void ack(Tpp tpp, String groupId, boolean resend, long msgSeq) { try { String topic = tpp.getTopic(); int partition = tpp.getPartition(); int intGroupId = m_metaService.translateToIntGroupId(tpp.getTopic(), groupId); if (resend) { OffsetResend proto = getOffsetResend(topic, partition, intGroupId); proto.setTopic(topic); proto.setPartition(partition); proto.setLastScheduleDate(new Date()); proto.setLastId(msgSeq); m_ackFlusher.ackOffsetResend(proto); } else { OffsetMessage proto = getOffsetMessage(tpp, intGroupId); proto.setTopic(topic); proto.setPartition(partition); proto.setOffset(msgSeq); m_ackFlusher.ackOffsetMessage(proto); } } catch (DalException e) { log.error("Failed to ack messages(topic={}, partition={}, priority={}, groupId={}).", tpp.getTopic(), tpp.getPartition(), tpp.isPriority(), groupId, e); } } @Override void appendMessages(String topicName, int partition, boolean priority, Collection<MessageBatchWithRawData> batches); @Override synchronized Object findLastOffset(Tpp tpp, int groupId); @Override FetchResult fetchMessages(Tpp tpp, List<Object> offsets); @Override FetchResult fetchMessages(Tpp tpp, Object startOffset, int batchSize, String filter); @Override void nack(Tpp tpp, String groupId, boolean resend, List<Pair<Long, MessageMeta>> msgId2Metas); @Override void ack(Tpp tpp, String groupId, boolean resend, long msgSeq); @Override synchronized Object findLastResendOffset(Tpg tpg); @SuppressWarnings("unchecked") @Override FetchResult fetchResendMessages(Tpg tpg, Object startOffset, int batchSize); @Override Object findMessageOffsetByTime(Tpp tpp, long time); @Override void initialize(); }
@Test public void testAckMessage() throws Exception { Tpp tpp = new Tpp("order_new", 0, true); s.ack(tpp, "group1", false, 222); } @Test public void testAckResend() throws Exception { Tpp tpp = new Tpp("order_new", 0, true); s.ack(tpp, "group1", true, 1); }
TimeFormatter implements StringFormatter<Long> { @Override public String format(Long millis) { long diff = System.currentTimeMillis() - millis; if (diff < TimeUnit.HOURS.toMillis(1)) { return resolver.get(R.string.label_minutes, Math.max(1, TimeUnit.MILLISECONDS.toMinutes(diff))); } if (diff < TimeUnit.DAYS.toMillis(1)) { return resolver.get(R.string.label_hours, Math.max(1, TimeUnit.MILLISECONDS.toHours(diff))); } return format.format(new Date(millis)); } @Inject TimeFormatter(StringResolver resolver); private TimeFormatter(StringResolver resolver, SimpleDateFormat format); @Override String format(Long millis); }
@Test public void shouldShowAtLeastOneMinute() throws Exception { given(resolver.get(eq(R.string.label_minutes), anyString())).willAnswer(new StringAnswer(StringAnswer.MINUTES)); assertThat(formatter.format(System.currentTimeMillis())).isEqualTo("1m"); } @Test public void shouldShowUpToOneHour() throws Exception { given(resolver.get(eq(R.string.label_minutes), anyString())).willAnswer(new StringAnswer(StringAnswer.MINUTES)); assertThat(formatter.format((System.currentTimeMillis() - TimeUnit.HOURS.toMillis(1)) + 1)).isEqualTo("59m"); } @Test public void shouldShowOneHour() throws Exception { given(resolver.get(eq(R.string.label_hours), anyString())).willAnswer(new StringAnswer(StringAnswer.HOURS)); assertThat(formatter.format(System.currentTimeMillis() - TimeUnit.HOURS.toMillis(1))).isEqualTo("1h"); } @Test public void shouldShowUpToOneDay() throws Exception { given(resolver.get(eq(R.string.label_hours), anyString())).willAnswer(new StringAnswer(StringAnswer.HOURS)); assertThat(formatter.format((System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1)) + 1)).isEqualTo("23h"); } @Test @SuppressLint("SimpleDateFormat") public void shouldShowFormattedDate() throws Exception { long yesterday = System.currentTimeMillis() - TimeUnit.DAYS.toMillis(1); String formatted = new SimpleDateFormat("MMM dd").format(new Date(yesterday)); assertThat(formatter.format(yesterday)).isEqualTo(formatted); }
StringResolver { public String get(@StringRes int resId, Object... args) { return resources.getString(resId, args); } @Inject StringResolver(Resources resources); String get(@StringRes int resId, Object... args); }
@Test public void shouldGetStringWithoutArguments() throws Exception { resolver.get(R.string.application); then(resources).should().getString(R.string.application, new Object[0]); } @Test @SuppressLint("StringFormatInvalid") public void shouldGetStringWithArguments() throws Exception { resolver.get(R.string.application, "arg0", "arg1"); then(resources).should().getString(R.string.application, "arg0", "arg1"); }
HurlStack implements HttpStack { @SuppressWarnings("deprecation") static void setConnectionParametersForRequest(HttpURLConnection connection, Request<?> request) throws IOException, AuthFailureError { switch (request.getMethod()) { case Method.DEPRECATED_GET_OR_POST: byte[] postBody = request.getPostBody(); if (postBody != null) { connection.setRequestMethod("POST"); addBody(connection, request, postBody); } break; case Method.GET: connection.setRequestMethod("GET"); break; case Method.DELETE: connection.setRequestMethod("DELETE"); break; case Method.POST: connection.setRequestMethod("POST"); addBodyIfExists(connection, request); break; case Method.PUT: connection.setRequestMethod("PUT"); addBodyIfExists(connection, request); break; case Method.HEAD: connection.setRequestMethod("HEAD"); break; case Method.OPTIONS: connection.setRequestMethod("OPTIONS"); break; case Method.TRACE: connection.setRequestMethod("TRACE"); break; case Method.PATCH: connection.setRequestMethod("PATCH"); addBodyIfExists(connection, request); break; default: throw new IllegalStateException("Unknown method type."); } } HurlStack(); HurlStack(UrlRewriter urlRewriter); HurlStack(UrlRewriter urlRewriter, SSLSocketFactory sslSocketFactory); @Override HttpResponse performRequest(Request<?> request, Map<String, String> additionalHeaders); }
@Test public void connectionForDeprecatedGetRequest() throws Exception { TestRequest.DeprecatedGet request = new TestRequest.DeprecatedGet(); assertEquals(request.getMethod(), Method.DEPRECATED_GET_OR_POST); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("GET", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForDeprecatedPostRequest() throws Exception { TestRequest.DeprecatedPost request = new TestRequest.DeprecatedPost(); assertEquals(request.getMethod(), Method.DEPRECATED_GET_OR_POST); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("POST", mMockConnection.getRequestMethod()); assertTrue(mMockConnection.getDoOutput()); } @Test public void connectionForGetRequest() throws Exception { TestRequest.Get request = new TestRequest.Get(); assertEquals(request.getMethod(), Method.GET); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("GET", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForPostRequest() throws Exception { TestRequest.Post request = new TestRequest.Post(); assertEquals(request.getMethod(), Method.POST); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("POST", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForPostWithBodyRequest() throws Exception { TestRequest.PostWithBody request = new TestRequest.PostWithBody(); assertEquals(request.getMethod(), Method.POST); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("POST", mMockConnection.getRequestMethod()); assertTrue(mMockConnection.getDoOutput()); } @Test public void connectionForPutRequest() throws Exception { TestRequest.Put request = new TestRequest.Put(); assertEquals(request.getMethod(), Method.PUT); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("PUT", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForPutWithBodyRequest() throws Exception { TestRequest.PutWithBody request = new TestRequest.PutWithBody(); assertEquals(request.getMethod(), Method.PUT); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("PUT", mMockConnection.getRequestMethod()); assertTrue(mMockConnection.getDoOutput()); } @Test public void connectionForDeleteRequest() throws Exception { TestRequest.Delete request = new TestRequest.Delete(); assertEquals(request.getMethod(), Method.DELETE); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("DELETE", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForHeadRequest() throws Exception { TestRequest.Head request = new TestRequest.Head(); assertEquals(request.getMethod(), Method.HEAD); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("HEAD", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForOptionsRequest() throws Exception { TestRequest.Options request = new TestRequest.Options(); assertEquals(request.getMethod(), Method.OPTIONS); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("OPTIONS", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForTraceRequest() throws Exception { TestRequest.Trace request = new TestRequest.Trace(); assertEquals(request.getMethod(), Method.TRACE); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("TRACE", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForPatchRequest() throws Exception { TestRequest.Patch request = new TestRequest.Patch(); assertEquals(request.getMethod(), Method.PATCH); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("PATCH", mMockConnection.getRequestMethod()); assertFalse(mMockConnection.getDoOutput()); } @Test public void connectionForPatchWithBodyRequest() throws Exception { TestRequest.PatchWithBody request = new TestRequest.PatchWithBody(); assertEquals(request.getMethod(), Method.PATCH); HurlStack.setConnectionParametersForRequest(mMockConnection, request); assertEquals("PATCH", mMockConnection.getRequestMethod()); assertTrue(mMockConnection.getDoOutput()); }
SpinedBuffer extends AbstractSpinedBuffer implements Consumer<E>, Iterable<E> { public Spliterator<E> spliterator() { class Splitr implements Spliterator<E> { int splSpineIndex; final int lastSpineIndex; int splElementIndex; final int lastSpineElementFence; E[] splChunk; Splitr(int firstSpineIndex, int lastSpineIndex, int firstSpineElementIndex, int lastSpineElementFence) { this.splSpineIndex = firstSpineIndex; this.lastSpineIndex = lastSpineIndex; this.splElementIndex = firstSpineElementIndex; this.lastSpineElementFence = lastSpineElementFence; assert spine != null || firstSpineIndex == 0 && lastSpineIndex == 0; splChunk = (spine == null) ? curChunk : spine[firstSpineIndex]; } @Override public long estimateSize() { return (splSpineIndex == lastSpineIndex) ? (long) lastSpineElementFence - splElementIndex : priorElementCount[lastSpineIndex] + lastSpineElementFence - priorElementCount[splSpineIndex] - splElementIndex; } @Override public int characteristics() { return SPLITERATOR_CHARACTERISTICS; } @Override public boolean tryAdvance(Consumer<? super E> consumer) { Objects.requireNonNull(consumer); if (splSpineIndex < lastSpineIndex || (splSpineIndex == lastSpineIndex && splElementIndex < lastSpineElementFence)) { consumer.accept(splChunk[splElementIndex++]); if (splElementIndex == splChunk.length) { splElementIndex = 0; ++splSpineIndex; if (spine != null && splSpineIndex <= lastSpineIndex) splChunk = spine[splSpineIndex]; } return true; } return false; } @Override public void forEachRemaining(Consumer<? super E> consumer) { Objects.requireNonNull(consumer); if (splSpineIndex < lastSpineIndex || (splSpineIndex == lastSpineIndex && splElementIndex < lastSpineElementFence)) { int i = splElementIndex; for (int sp = splSpineIndex; sp < lastSpineIndex; sp++) { E[] chunk = spine[sp]; for (; i < chunk.length; i++) { consumer.accept(chunk[i]); } i = 0; } E[] chunk = (splSpineIndex == lastSpineIndex) ? splChunk : spine[lastSpineIndex]; int hElementIndex = lastSpineElementFence; for (; i < hElementIndex; i++) { consumer.accept(chunk[i]); } splSpineIndex = lastSpineIndex; splElementIndex = lastSpineElementFence; } } @Override public Spliterator<E> trySplit() { if (splSpineIndex < lastSpineIndex) { Spliterator<E> ret = new Splitr(splSpineIndex, lastSpineIndex - 1, splElementIndex, spine[lastSpineIndex-1].length); splSpineIndex = lastSpineIndex; splElementIndex = 0; splChunk = spine[splSpineIndex]; return ret; } else if (splSpineIndex == lastSpineIndex) { int t = (lastSpineElementFence - splElementIndex) / 2; if (t == 0) return null; else { Spliterator<E> ret = Arrays.spliterator(splChunk, splElementIndex, splElementIndex + t); splElementIndex += t; return ret; } } else { return null; } } } return new Splitr(0, spineIndex, 0, elementIndex); } @SuppressWarnings("unchecked") SpinedBuffer(int initialCapacity); @SuppressWarnings("unchecked") SpinedBuffer(); E get(long index); void copyInto(E[] array, int offset); E[] asArray(IntFunction<E[]> arrayFactory); @Override void clear(); @Override Iterator<E> iterator(); @Override void forEach(Consumer<? super E> consumer); @Override void accept(E e); @Override String toString(); Spliterator<E> spliterator(); }
@Test(dataProvider = "SpinedBuffer") public void testSpliterator(int[] array, SpinedBuffer<Integer> sb) { assertEquals(sb.count(), array.length); assertEquals(sb.count(), sb.spliterator().getExactSizeIfKnown()); SpliteratorTestHelper.testSpliterator(sb::spliterator); } @Test(dataProvider = "SpinedBuffer", groups = { "serialization-hostile" }) public void testLastSplit(int[] array, SpinedBuffer<Integer> sb) { Spliterator<Integer> spliterator = sb.spliterator(); Spliterator<Integer> split = spliterator.trySplit(); long splitSizes = (split == null) ? 0 : split.getExactSizeIfKnown(); long lastSplitSize = spliterator.getExactSizeIfKnown(); splitSizes += lastSplitSize; assertEquals(splitSizes, array.length); List<Integer> contentOfLastSplit = new ArrayList<>(); spliterator.forEachRemaining(contentOfLastSplit::add); assertEquals(contentOfLastSplit.size(), lastSplitSize); List<Integer> end = Arrays.stream(array) .boxed() .skip(array.length - lastSplitSize) .collect(Collectors.toList()); assertEquals(contentOfLastSplit, end); } @Test(dataProvider = "IntSpinedBuffer") public void testIntSpliterator(int[] array, SpinedBuffer.OfInt sb) { assertEquals(sb.count(), array.length); assertEquals(sb.count(), sb.spliterator().getExactSizeIfKnown()); SpliteratorTestHelper.testIntSpliterator(sb::spliterator); } @Test(dataProvider = "IntSpinedBuffer", groups = { "serialization-hostile" }) public void testIntLastSplit(int[] array, SpinedBuffer.OfInt sb) { Spliterator.OfInt spliterator = sb.spliterator(); Spliterator.OfInt split = spliterator.trySplit(); long splitSizes = (split == null) ? 0 : split.getExactSizeIfKnown(); long lastSplitSize = spliterator.getExactSizeIfKnown(); splitSizes += lastSplitSize; assertEquals(splitSizes, array.length); List<Integer> contentOfLastSplit = new ArrayList<>(); spliterator.forEachRemaining((IntConsumer) contentOfLastSplit::add); assertEquals(contentOfLastSplit.size(), lastSplitSize); List<Integer> end = Arrays.stream(array) .boxed() .skip(array.length - lastSplitSize) .collect(Collectors.toList()); assertEquals(contentOfLastSplit, end); } @Test(dataProvider = "LongSpinedBuffer") public void testLongSpliterator(long[] array, SpinedBuffer.OfLong sb) { assertEquals(sb.count(), array.length); assertEquals(sb.count(), sb.spliterator().getExactSizeIfKnown()); SpliteratorTestHelper.testLongSpliterator(sb::spliterator); } @Test(dataProvider = "LongSpinedBuffer", groups = { "serialization-hostile" }) public void testLongLastSplit(long[] array, SpinedBuffer.OfLong sb) { Spliterator.OfLong spliterator = sb.spliterator(); Spliterator.OfLong split = spliterator.trySplit(); long splitSizes = (split == null) ? 0 : split.getExactSizeIfKnown(); long lastSplitSize = spliterator.getExactSizeIfKnown(); splitSizes += lastSplitSize; assertEquals(splitSizes, array.length); List<Long> contentOfLastSplit = new ArrayList<>(); spliterator.forEachRemaining((LongConsumer) contentOfLastSplit::add); assertEquals(contentOfLastSplit.size(), lastSplitSize); List<Long> end = Arrays.stream(array) .boxed() .skip(array.length - lastSplitSize) .collect(Collectors.toList()); assertEquals(contentOfLastSplit, end); } @Test(dataProvider = "DoubleSpinedBuffer") public void testDoubleSpliterator(double[] array, SpinedBuffer.OfDouble sb) { assertEquals(sb.count(), array.length); assertEquals(sb.count(), sb.spliterator().getExactSizeIfKnown()); SpliteratorTestHelper.testDoubleSpliterator(sb::spliterator); } @Test(dataProvider = "DoubleSpinedBuffer", groups = { "serialization-hostile" }) public void testLongLastSplit(double[] array, SpinedBuffer.OfDouble sb) { Spliterator.OfDouble spliterator = sb.spliterator(); Spliterator.OfDouble split = spliterator.trySplit(); long splitSizes = (split == null) ? 0 : split.getExactSizeIfKnown(); long lastSplitSize = spliterator.getExactSizeIfKnown(); splitSizes += lastSplitSize; assertEquals(splitSizes, array.length); List<Double> contentOfLastSplit = new ArrayList<>(); spliterator.forEachRemaining((DoubleConsumer) contentOfLastSplit::add); assertEquals(contentOfLastSplit.size(), lastSplitSize); List<Double> end = Arrays.stream(array) .boxed() .skip(array.length - lastSplitSize) .collect(Collectors.toList()); assertEquals(contentOfLastSplit, end); }
SpinedBuffer extends AbstractSpinedBuffer implements Consumer<E>, Iterable<E> { @SuppressWarnings("unchecked") public SpinedBuffer(int initialCapacity) { super(initialCapacity); curChunk = (E[]) new Object[1 << initialChunkPower]; } @SuppressWarnings("unchecked") SpinedBuffer(int initialCapacity); @SuppressWarnings("unchecked") SpinedBuffer(); E get(long index); void copyInto(E[] array, int offset); E[] asArray(IntFunction<E[]> arrayFactory); @Override void clear(); @Override Iterator<E> iterator(); @Override void forEach(Consumer<? super E> consumer); @Override void accept(E e); @Override String toString(); Spliterator<E> spliterator(); }
@Test(groups = { "serialization-hostile" }) public void testSpinedBuffer() { List<Integer> list1 = new ArrayList<>(); List<Integer> list2 = new ArrayList<>(); SpinedBuffer<Integer> sb = new SpinedBuffer<>(); for (int i = 0; i < TEST_SIZE; i++) { list1.add(i); sb.accept(i); } Iterator<Integer> it = sb.iterator(); for (int i = 0; i < TEST_SIZE; i++) list2.add(it.next()); assertFalse(it.hasNext()); assertEquals(list1, list2); for (int i = 0; i < TEST_SIZE; i++) assertEquals(sb.get(i), (Integer) i, Integer.toString(i)); list2.clear(); sb.forEach(list2::add); assertEquals(list1, list2); Integer[] array = sb.asArray(LambdaTestHelpers.integerArrayGenerator); list2.clear(); for (Integer i : array) list2.add(i); assertEquals(list1, list2); }
LayoutBinder implements FileScopeProvider { public IdentifierExpr addVariable(String name, String type, Location location, boolean declared) { Preconditions.check(!mUserDefinedVariables.containsKey(name), "%s has already been defined as %s", name, type); final IdentifierExpr id = mExprModel.identifier(name); id.setUserDefinedType(type); id.enableDirectInvalidation(); if (location != null) { id.addLocation(location); } mUserDefinedVariables.put(name, type); if (declared) { id.setDeclared(); } return id; } LayoutBinder(ResourceBundle.LayoutFileBundle layoutBundle); void resolveWhichExpressionsAreUsed(); IdentifierExpr addVariable(String name, String type, Location location, boolean declared); HashMap<String, String> getUserDefinedVariables(); BindingTarget createBindingTarget(ResourceBundle.BindingTargetBundle targetBundle); Expr parse(String input, boolean isTwoWay, @Nullable Location locationInFile); List<BindingTarget> getBindingTargets(); List<BindingTarget> getSortedTargets(); boolean isEmpty(); ExprModel getModel(); void sealModel(); String writeViewBinderBaseClass(boolean forLibrary); String writeViewBinder(int minSdk); String getPackage(); boolean isMerge(); String getModulePackage(); String getLayoutname(); String getImplementationName(); String getClassName(); String getTag(); boolean hasVariations(); @Override String provideScopeFilePath(); }
@Test public void testRegisterId() { int originalSize = mExprModel.size(); mLayoutBinder.addVariable("test", "java.lang.String", null); assertEquals(originalSize + 1, mExprModel.size()); final Map.Entry<String, Expr> entry = findIdentifier("test"); final Expr value = entry.getValue(); assertEquals(value.getClass(), IdentifierExpr.class); final IdentifierExpr id = (IdentifierExpr) value; assertEquals("test", id.getName()); assertEquals(new JavaClass(String.class), id.getResolvedType()); assertTrue(id.isDynamic()); }
LayoutBinder implements FileScopeProvider { public Expr parse(String input, boolean isTwoWay, @Nullable Location locationInFile) { final Expr parsed = mExpressionParser.parse(input, locationInFile); parsed.setBindingExpression(true); parsed.setTwoWay(isTwoWay); return parsed; } LayoutBinder(ResourceBundle.LayoutFileBundle layoutBundle); void resolveWhichExpressionsAreUsed(); IdentifierExpr addVariable(String name, String type, Location location, boolean declared); HashMap<String, String> getUserDefinedVariables(); BindingTarget createBindingTarget(ResourceBundle.BindingTargetBundle targetBundle); Expr parse(String input, boolean isTwoWay, @Nullable Location locationInFile); List<BindingTarget> getBindingTargets(); List<BindingTarget> getSortedTargets(); boolean isEmpty(); ExprModel getModel(); void sealModel(); String writeViewBinderBaseClass(boolean forLibrary); String writeViewBinder(int minSdk); String getPackage(); boolean isMerge(); String getModulePackage(); String getLayoutname(); String getImplementationName(); String getClassName(); String getTag(); boolean hasVariations(); @Override String provideScopeFilePath(); }
@Test public void testParse() { int originalSize = mExprModel.size(); mLayoutBinder.addVariable("user", "android.databinding.tool2.LayoutBinderTest.TestUser", null); mLayoutBinder.parse("user.name", false, null); mLayoutBinder.parse("user.lastName", false, null); assertEquals(originalSize + 3, mExprModel.size()); final List<Expr> bindingExprs = mExprModel.getBindingExpressions(); assertEquals(2, bindingExprs.size()); IdentifierExpr id = mExprModel.identifier("user"); assertTrue(bindingExprs.get(0) instanceof FieldAccessExpr); assertTrue(bindingExprs.get(1) instanceof FieldAccessExpr); assertEquals(2, id.getParents().size()); assertTrue(bindingExprs.get(0).getChildren().contains(id)); assertTrue(bindingExprs.get(1).getChildren().contains(id)); }
ExprModel { public <T extends Expr> T register(T expr) { Preconditions.check(!mSealed, "Cannot add expressions to a model after it is sealed"); Location location = null; if (mCurrentParserContext != null) { location = new Location(mCurrentParserContext); location.setParentLocation(mCurrentLocationInFile); } T existing = (T) mExprMap.get(expr.getUniqueKey()); if (existing != null) { Preconditions.check(expr.getParents().isEmpty(), "If an expression already exists, it should've never been added to a parent," + "if thats the case, somewhere we are creating an expression w/o" + "calling expression model"); expr.onSwappedWith(existing); if (location != null) { existing.addLocation(location); } return existing; } mExprMap.put(expr.getUniqueKey(), expr); expr.setModel(this); if (location != null) { expr.addLocation(location); } return expr; } T register(T expr); void setCurrentParserContext(ParserRuleContext currentParserContext); Map<String, Expr> getExprMap(); int size(); ComparisonExpr comparison(String op, Expr left, Expr right); InstanceOfExpr instanceOfOp(Expr expr, String type); FieldAccessExpr field(Expr parent, String name); FieldAccessExpr observableField(Expr parent, String name); SymbolExpr symbol(String text, Class type); TernaryExpr ternary(Expr pred, Expr ifTrue, Expr ifFalse); IdentifierExpr identifier(String name); StaticIdentifierExpr staticIdentifier(String name); BuiltInVariableExpr builtInVariable(String name, String type, String accessCode); ViewFieldExpr viewFieldExpr(BindingTarget bindingTarget); StaticIdentifierExpr staticIdentifierFor(final ModelClass modelClass); MethodCallExpr methodCall(Expr target, String name, List<Expr> args); MathExpr math(Expr left, String op, Expr right); TernaryExpr logical(Expr left, String op, Expr right); BitShiftExpr bitshift(Expr left, String op, Expr right); UnaryExpr unary(String op, Expr expr); Expr group(Expr grouped); Expr resourceExpr(String packageName, String resourceType, String resourceName, List<Expr> args); Expr bracketExpr(Expr variableExpr, Expr argExpr); Expr castExpr(String type, Expr expr); TwoWayListenerExpr twoWayListenerExpr(InverseBinding inverseBinding); List<Expr> getBindingExpressions(); StaticIdentifierExpr addImport(String alias, String type, Location location); Map<String, String> getImports(); Expr bindingExpr(Expr bindingExpr); void removeExpr(Expr expr); List<Expr> getObservables(); void seal(); int getFlagBucketCount(); int getTotalFlagCount(); int getInvalidateableFieldLimit(); String[] getFlagMapping(); String getFlag(int id); List<Expr> getPendingExpressions(); boolean markBitsRead(); static List<Expr> filterShouldRead(Iterable<Expr> exprs); Expr findFlagExpression(int flag); BitSet getInvalidateAnyBitSet(); int getInvalidateAnyFlagIndex(); Expr argListExpr(Iterable<Expr> expressions); void setCurrentLocationInFile(Location location); Expr listenerExpr(Expr expression, String name, ModelClass listenerType, ModelMethod listenerMethod); }
@Test public void testAddWithChildren() { DummyExpr a = new DummyExpr("a"); DummyExpr b = new DummyExpr("b"); DummyExpr c = new DummyExpr("c", a, b); mExprModel.register(c); DummyExpr a2 = new DummyExpr("a"); DummyExpr b2 = new DummyExpr("b"); DummyExpr c2 = new DummyExpr("c", a, b); assertEquals(c, mExprModel.register(c2)); }
ExprModel { public void seal() { L.d("sealing model"); List<Expr> notifiableExpressions = new ArrayList<Expr>(); final ModelAnalyzer modelAnalyzer = ModelAnalyzer.getInstance(); updateExpressions(modelAnalyzer); int counter = 0; final Iterable<Expr> observables = filterObservables(modelAnalyzer); List<String> flagMapping = new ArrayList<String>(); mObservables = new ArrayList<Expr>(); for (Expr expr : observables) { flagMapping.add(expr.getUniqueKey()); expr.setId(counter++); mObservables.add(expr); notifiableExpressions.add(expr); L.d("observable %s", expr.getUniqueKey()); } final Iterable<Expr> nonObservableIds = filterNonObservableIds(modelAnalyzer); for (Expr expr : nonObservableIds) { flagMapping.add(expr.getUniqueKey()); expr.setId(counter++); notifiableExpressions.add(expr); L.d("non-observable %s", expr.getUniqueKey()); } for (Expr expr : observables) { for (Expr parent : expr.getParents()) { if (parent.hasId()) { continue; } if (parent instanceof FieldAccessExpr) { FieldAccessExpr fae = (FieldAccessExpr) parent; L.d("checking field access expr %s. getter: %s", fae,fae.getGetter()); if (fae.isDynamic() && fae.getGetter().canBeInvalidated()) { flagMapping.add(parent.getUniqueKey()); parent.setId(counter++); notifiableExpressions.add(parent); L.d("notifiable field %s : %s for %s : %s", parent.getUniqueKey(), Integer.toHexString(System.identityHashCode(parent)), expr.getUniqueKey(), Integer.toHexString(System.identityHashCode(expr))); } } } } for (Expr expr : mExprMap.values()) { if (expr instanceof FieldAccessExpr) { FieldAccessExpr fieldAccessExpr = (FieldAccessExpr) expr; if (fieldAccessExpr.getChild() instanceof ViewFieldExpr) { flagMapping.add(fieldAccessExpr.getUniqueKey()); fieldAccessExpr.setId(counter++); } } } L.d("list of binding expressions"); for (int i = 0; i < mBindingExpressions.size(); i++) { L.d("[%d] %s", i, mBindingExpressions.get(i)); } for (Expr expr : notifiableExpressions) { expr.enableDirectInvalidation(); } for (Expr expr : mExprMap.values()) { expr.getDependencies(); } mInvalidateAnyFlagIndex = counter ++; flagMapping.add("INVALIDATE ANY"); mInvalidateableFieldLimit = counter; mInvalidateableFlags = new BitSet(); for (int i = 0; i < mInvalidateableFieldLimit; i++) { mInvalidateableFlags.set(i, true); } for (Expr expr : mExprMap.values()) { if (expr.isConditional()) { L.d("requirement id for %s is %d", expr, counter); expr.setRequirementId(counter); flagMapping.add(expr.getUniqueKey() + FALSE_KEY_SUFFIX); flagMapping.add(expr.getUniqueKey() + TRUE_KEY_SUFFIX); counter += 2; } } mConditionalFlags = new BitSet(); for (int i = mInvalidateableFieldLimit; i < counter; i++) { mConditionalFlags.set(i, true); } mRequirementIdCount = (counter - mInvalidateableFieldLimit) / 2; for (Map.Entry<String, Expr> entry : mExprMap.entrySet()) { final Expr value = entry.getValue(); if (!value.hasId()) { value.setId(counter++); } } mFlagMapping = new String[flagMapping.size()]; flagMapping.toArray(mFlagMapping); mFlagBucketCount = 1 + (getTotalFlagCount() / FlagSet.sBucketSize); mInvalidateAnyFlags = new BitSet(); mInvalidateAnyFlags.set(mInvalidateAnyFlagIndex, true); for (Expr expr : mExprMap.values()) { expr.getShouldReadFlagsWithConditionals(); } for (Expr expr : mExprMap.values()) { expr.getResolvedType(); } mSealed = true; } T register(T expr); void setCurrentParserContext(ParserRuleContext currentParserContext); Map<String, Expr> getExprMap(); int size(); ComparisonExpr comparison(String op, Expr left, Expr right); InstanceOfExpr instanceOfOp(Expr expr, String type); FieldAccessExpr field(Expr parent, String name); FieldAccessExpr observableField(Expr parent, String name); SymbolExpr symbol(String text, Class type); TernaryExpr ternary(Expr pred, Expr ifTrue, Expr ifFalse); IdentifierExpr identifier(String name); StaticIdentifierExpr staticIdentifier(String name); BuiltInVariableExpr builtInVariable(String name, String type, String accessCode); ViewFieldExpr viewFieldExpr(BindingTarget bindingTarget); StaticIdentifierExpr staticIdentifierFor(final ModelClass modelClass); MethodCallExpr methodCall(Expr target, String name, List<Expr> args); MathExpr math(Expr left, String op, Expr right); TernaryExpr logical(Expr left, String op, Expr right); BitShiftExpr bitshift(Expr left, String op, Expr right); UnaryExpr unary(String op, Expr expr); Expr group(Expr grouped); Expr resourceExpr(String packageName, String resourceType, String resourceName, List<Expr> args); Expr bracketExpr(Expr variableExpr, Expr argExpr); Expr castExpr(String type, Expr expr); TwoWayListenerExpr twoWayListenerExpr(InverseBinding inverseBinding); List<Expr> getBindingExpressions(); StaticIdentifierExpr addImport(String alias, String type, Location location); Map<String, String> getImports(); Expr bindingExpr(Expr bindingExpr); void removeExpr(Expr expr); List<Expr> getObservables(); void seal(); int getFlagBucketCount(); int getTotalFlagCount(); int getInvalidateableFieldLimit(); String[] getFlagMapping(); String getFlag(int id); List<Expr> getPendingExpressions(); boolean markBitsRead(); static List<Expr> filterShouldRead(Iterable<Expr> exprs); Expr findFlagExpression(int flag); BitSet getInvalidateAnyBitSet(); int getInvalidateAnyFlagIndex(); Expr argListExpr(Iterable<Expr> expressions); void setCurrentLocationInFile(Location location); Expr listenerExpr(Expr expression, String name, ModelClass listenerType, ModelMethod listenerMethod); }
@Test public void testStaticMethodOfInstance() { MockLayoutBinder lb = new MockLayoutBinder(); mExprModel = lb.getModel(); lb.addVariable("user", User.class.getCanonicalName(), null); MethodCallExpr methodCall = parse(lb, "user.ourStaticMethod()", MethodCallExpr.class); assertTrue(methodCall.isDynamic()); mExprModel.seal(); final Expr child = methodCall.getTarget(); assertTrue(child instanceof StaticIdentifierExpr); StaticIdentifierExpr id = (StaticIdentifierExpr) child; assertEquals(id.getResolvedType().getCanonicalName(), User.class.getCanonicalName()); }
Location { public boolean contains(Location other) { if (startLine > other.startLine) { return false; } if (startLine == other.startLine && startOffset > other.startOffset) { return false; } if (endLine < other.endLine) { return false; } return !(endLine == other.endLine && endOffset < other.endOffset); } Location(); Location(Location other); Location(Token start, Token end); Location(ParserRuleContext context); Location(int startLine, int startOffset, int endLine, int endOffset); @Override String toString(); void setParentLocation(Location parentLocation); @Override boolean equals(Object o); @Override int hashCode(); boolean isValid(); boolean contains(Location other); Location toAbsoluteLocation(); String toUserReadableString(); static Location fromUserReadableString(String str); LocationScopeProvider createScope(); static final int NaN; @XmlAttribute(name = "startLine") public int startLine; @XmlAttribute(name = "startOffset") public int startOffset; @XmlAttribute(name = "endLine") public int endLine; @XmlAttribute(name = "endOffset") public int endOffset; @XmlElement(name = "parentLocation") public Location parentLocation; }
@Test public void testContains() { Location location1 = new Location(0, 0, 10, 1); Location location2 = new Location(0, 0, 9, 1); assertTrue(location1.contains(location2)); location2.endLine = 10; assertTrue(location1.contains(location2)); location2.endOffset = 2; assertFalse(location1.contains(location2)); }
DiffUtil { public static DiffResult calculateDiff(Callback cb) { return calculateDiff(cb, true); } private DiffUtil(); static DiffResult calculateDiff(Callback cb); static DiffResult calculateDiff(Callback cb, boolean detectMoves); }
@Test public void testDisableMoveDetection() { initWithSize(5); move(0, 4); List<Item> applied = applyUpdates(mBefore, DiffUtil.calculateDiff(mCallback, false)); assertThat(applied.size(), is(5)); assertThat(applied.get(4).newItem, is(true)); assertThat(applied.contains(mBefore.get(0)), is(false)); }
ViewInfoStore { void addToPreLayout(ViewHolder holder, ItemHolderInfo info) { InfoRecord record = mLayoutHolderMap.get(holder); if (record == null) { record = InfoRecord.obtain(); mLayoutHolderMap.put(holder, record); } record.preInfo = info; record.flags |= FLAG_PRE; } void onViewDetached(ViewHolder viewHolder); }
@Test public void addOverridePre() { RecyclerView.ViewHolder vh = new MockViewHolder(); MockInfo info = new MockInfo(); mStore.addToPreLayout(vh, info); MockInfo info2 = new MockInfo(); mStore.addToPreLayout(vh, info2); assertSame(info2, find(vh, FLAG_PRE)); } @Test public void addToPreLayout() { RecyclerView.ViewHolder vh = new MockViewHolder(); MockInfo info = new MockInfo(); mStore.addToPreLayout(vh, info); assertSame(info, find(vh, FLAG_PRE)); assertTrue(mStore.isInPreLayout(vh)); mStore.removeViewHolder(vh); assertFalse(mStore.isInPreLayout(vh)); }
ViewInfoStore { void addToPostLayout(ViewHolder holder, ItemHolderInfo info) { InfoRecord record = mLayoutHolderMap.get(holder); if (record == null) { record = InfoRecord.obtain(); mLayoutHolderMap.put(holder, record); } record.postInfo = info; record.flags |= FLAG_POST; } void onViewDetached(ViewHolder viewHolder); }
@Test public void addOverridePost() { RecyclerView.ViewHolder vh = new MockViewHolder(); MockInfo info = new MockInfo(); mStore.addToPostLayout(vh, info); MockInfo info2 = new MockInfo(); mStore.addToPostLayout(vh, info2); assertSame(info2, find(vh, FLAG_POST)); } @Test public void addToPostLayout() { RecyclerView.ViewHolder vh = new MockViewHolder(); MockInfo info = new MockInfo(); mStore.addToPostLayout(vh, info); assertSame(info, find(vh, FLAG_POST)); mStore.removeViewHolder(vh); assertNull(find(vh, FLAG_POST)); }