src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
ProjectParser extends GeneratedProjectParser { public ProjectColumnList getExpList () { return m_expList; } private ProjectParser(); ProjectColumnList getExpList(); static ProjectColumnList getExpList(String str); }
@Test public void test1 () throws Exception { Assert.assertNotNull (ProjectParser.getExpList ("a")); Assert.assertNotNull (ProjectParser.getExpList ("a, b")); Assert.assertNotNull (ProjectParser.getExpList ("a as b, c")); Assert.assertNotNull (ProjectParser.getExpList ("a as b, c as d")); Assert.assertNotNull (ProjectParser.getExpList ("\"a\" as \" b \", c as d")); } @Test (expected = IOException.class) public void testError1 () throws IOException { ProjectParser.getExpList (""); } @Test (expected = IOException.class) public void testError2 () throws IOException { ProjectParser.getExpList ("a b"); }
AzureUtils { public static CloudBlobClient getBlobClient(JaqyInterpreter interpreter, String account) throws IOException { VariableManager vm = interpreter.getVariableManager (); Object o = vm.get (WASB_CLIENT_VAR); if (o instanceof CloudBlobClient) { CloudBlobClient client = (CloudBlobClient)o; if (account == null) { return client; } StorageCredentials credential = client.getCredentials (); if (credential != null && account.equals (credential.getAccountName ())) { return client; } } try { CloudStorageAccount storageAccount = CloudStorageAccount.parse(getAccountString (interpreter, account)); CloudBlobClient client = storageAccount.createCloudBlobClient(); vm.setVariable (WASB_CLIENT_VAR, client); return client; } catch (Exception ex) { throw new IOException (ex.getMessage (), ex); } } static void setAccount(String account, JaqyInterpreter interpreter); static void setContainer(String container, JaqyInterpreter interpreter); static void setKey(String key, JaqyInterpreter interpreter); static void setEndPoint(String endPoint, JaqyInterpreter interpreter); static CloudBlobClient getBlobClient(JaqyInterpreter interpreter, String account); static AzurePathInfo getPathInfo(String path); static CloudBlobContainer getContainer(String account, String container, JaqyInterpreter interpreter); static void createContainer(String container, JaqyInterpreter interpreter); static void deleteContainer(String container, JaqyInterpreter interpreter); final static String WASB_CLIENT_VAR; final static String WASB_ACCOUNT_VAR; final static String WASB_CONTAINER_VAR; final static String WASB_KEY_VAR; final static String WASB_ENDPOINT_VAR; }
@Test public void testGetBlobClient () throws Exception { Globals globals = new Globals (); JaqyInterpreter interpreter = new JaqyInterpreter (globals, null, null); AzureUtils.setAccount ("devstoreaccount1", interpreter); AzureUtils.setKey ("Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==", interpreter); AzureUtils.setEndPoint ("http: CloudBlobClient client = AzureUtils.getBlobClient (interpreter, "devstoreaccount1"); CloudBlobContainer container = client.getContainerReference ("testcontainer"); if (!container.exists ()) { container.create (); } CloudBlobClient client2 = AzureUtils.getBlobClient (interpreter, "devstoreaccount1"); Assert.assertSame (client2, client); client2 = AzureUtils.getBlobClient (interpreter, null); Assert.assertSame (client2, client); client2 = AzureUtils.getBlobClient (interpreter, "abcdefg"); Assert.assertNotSame (client2, client); container.delete (); }
JsonExpFactory { public static JsonEventVisitor createVisitor (String rowExp, String[] colExps, JsonValueVisitor[] valueVisitors, JsonRowEndListener listener, boolean rootAsArray) { String[] exps; if (rowExp == null || rowExp.length () == 0) exps = new String[0]; else exps = rowExp.split ("[.]"); JsonEventVisitor rootVisitor; JsonRowVisitor rv = new JsonRowVisitor (listener, rootAsArray); if (exps.length == 0) { rootVisitor = rv; } else { rootVisitor = createVisitors (null, exps, rv); } TreeMap<String, JsonValueVisitor> expMap = new TreeMap<String, JsonValueVisitor> (); JsonEventVisitor colRootVisitor = null; for (int i = 0; i < colExps.length; ++i) { String colExp = colExps[i]; JsonValueVisitor prevVV = expMap.get (colExp); if (prevVV != null) { valueVisitors[i] = prevVV; continue; } JsonValueVisitor vv = new JsonValueVisitor (); valueVisitors[i] = vv; expMap.put (colExp, vv); exps = colExp.split ("[.]"); colRootVisitor = createVisitors (colRootVisitor, exps, vv); } rv.setColVisitor (colRootVisitor); return rootVisitor; } static JsonEventVisitor createVisitor(String rowExp, String[] colExps, JsonValueVisitor[] valueVisitors, JsonRowEndListener listener, boolean rootAsArray); }
@Test public void test1 () { JsonValueVisitor[] vvs; String[] colExps; colExps = new String[] { "a", "b" }; vvs = new JsonValueVisitor[colExps.length]; TestRowEndListener listener = new TestRowEndListener (); JsonEventVisitor ev = JsonExpFactory.createVisitor ("", colExps, vvs, listener, false); for (int i = 0; i < vvs.length; ++i) { Assert.assertNotNull(vvs[i]); } TextJsonParser p = new TextJsonParser (new StringReader ("[{\"a\":123,\"b\":234}]")); int depth = 0; while (p.hasNext ()) { JsonParser.Event e = p.next (); switch (e) { case START_OBJECT: case START_ARRAY: ev.visit (e, p, depth); ++depth; break; case END_OBJECT: case END_ARRAY: --depth; ev.visit (e, p, depth); break; default: ev.visit (e, p, depth); break; } } Assert.assertTrue (listener.isRowEnd ()); Assert.assertEquals ("123", vvs[0].getValue ().toString ()); Assert.assertEquals ("234", vvs[1].getValue ().toString ()); } @Test public void test2 () { JsonValueVisitor[] vvs; String[] colExps; colExps = new String[] { "a", "b" }; vvs = new JsonValueVisitor[colExps.length]; TestRowEndListener listener = new TestRowEndListener (); JsonEventVisitor ev = JsonExpFactory.createVisitor ("", colExps, vvs, listener, false); for (int i = 0; i < vvs.length; ++i) { Assert.assertNotNull(vvs[i]); } TextJsonParser p = new TextJsonParser (new StringReader ("[{\"a\":123,\"b\":234},{\"a\":123,\"b\":234},{\"a\":1234,\"b\":2345}]")); int depth = 0; int rowCount = 0; while (p.hasNext ()) { JsonParser.Event e = p.next (); switch (e) { case START_OBJECT: case START_ARRAY: ev.visit (e, p, depth); ++depth; break; case END_OBJECT: case END_ARRAY: --depth; ev.visit (e, p, depth); break; default: ev.visit (e, p, depth); break; } if (listener.isRowEnd ()) { ++rowCount; listener.setRowEnd (false); } } Assert.assertEquals (3, rowCount); Assert.assertEquals ("1234", vvs[0].getValue ().toString ()); Assert.assertEquals ("2345", vvs[1].getValue ().toString ()); } @Test public void test3 () { JsonValueVisitor[] vvs; String[] colExps; colExps = new String[] { "a", "b" }; vvs = new JsonValueVisitor[colExps.length]; TestRowEndListener listener = new TestRowEndListener (); JsonEventVisitor ev = JsonExpFactory.createVisitor ("items", colExps, vvs, listener, false); for (int i = 0; i < vvs.length; ++i) { Assert.assertNotNull(vvs[i]); } TextJsonParser p = new TextJsonParser (new StringReader ("{\"items\":[{\"a\":123,\"b\":234},{\"a\":123,\"b\":234},{\"a\":1234,\"b\":2345}]}")); int depth = 0; int rowCount = 0; while (p.hasNext ()) { JsonParser.Event e = p.next (); switch (e) { case START_OBJECT: case START_ARRAY: ev.visit (e, p, depth); ++depth; break; case END_OBJECT: case END_ARRAY: --depth; ev.visit (e, p, depth); break; default: ev.visit (e, p, depth); break; } if (listener.isRowEnd ()) { ++rowCount; listener.setRowEnd (false); } } Assert.assertEquals (3, rowCount); Assert.assertEquals ("1234", vvs[0].getValue ().toString ()); Assert.assertEquals ("2345", vvs[1].getValue ().toString ()); } @Test public void test4 () { JsonValueVisitor[] vvs; String[] colExps; colExps = new String[] { "a", "b" }; vvs = new JsonValueVisitor[colExps.length]; TestRowEndListener listener = new TestRowEndListener (); JsonEventVisitor ev = JsonExpFactory.createVisitor ("items", colExps, vvs, listener, false); for (int i = 0; i < vvs.length; ++i) { Assert.assertNotNull(vvs[i]); } TextJsonParser p = new TextJsonParser (new StringReader ("{\"items\":{\"a\":123,\"b\":234}}")); int depth = 0; int rowCount = 0; while (p.hasNext ()) { JsonParser.Event e = p.next (); switch (e) { case START_OBJECT: case START_ARRAY: ev.visit (e, p, depth); ++depth; break; case END_OBJECT: case END_ARRAY: --depth; ev.visit (e, p, depth); break; default: ev.visit (e, p, depth); break; } if (listener.isRowEnd ()) { ++rowCount; listener.setRowEnd (false); } } Assert.assertEquals (0, rowCount); } @Test public void test5 () { JsonValueVisitor[] vvs; String[] colExps; colExps = new String[] { "a", "b" }; vvs = new JsonValueVisitor[colExps.length]; TestRowEndListener listener = new TestRowEndListener (); JsonEventVisitor ev = JsonExpFactory.createVisitor ("", colExps, vvs, listener, true); for (int i = 0; i < vvs.length; ++i) { Assert.assertNotNull(vvs[i]); } TextJsonParser p = new TextJsonParser (new StringReader ("{\"1\":{\"a\":1,\"b\":2},\"2\":{\"a\":3,\"b\":4},\"3\":{\"a\":5,\"b\":6}}")); int depth = 0; int rowCount = 0; while (p.hasNext ()) { JsonParser.Event e = p.next (); switch (e) { case START_OBJECT: case START_ARRAY: ev.visit (e, p, depth); ++depth; break; case END_OBJECT: case END_ARRAY: --depth; ev.visit (e, p, depth); break; default: ev.visit (e, p, depth); break; } if (listener.isRowEnd ()) { ++rowCount; listener.setRowEnd (false); } } Assert.assertEquals (3, rowCount); Assert.assertEquals ("5", vvs[0].getValue ().toString ()); Assert.assertEquals ("6", vvs[1].getValue ().toString ()); }
System extends Thread implements PlugIn { public System(EngineManager engine) { setName("System"); this.engine = engine; } System(EngineManager engine); @Override void reciveMsg(SCSMsg msg); @Override void run(); static final int MUST_WHO; }
@Test public void testSystem() { System sys = new System(null); assertNotNull("System must be create", sys); }
System extends Thread implements PlugIn { @Override public void reciveMsg(SCSMsg msg) { LOGGER.debug("System recived msg: {}", msg); Value value = null; SCSMsg msgResonse = null; if (msg.getWho().getMain() == MUST_WHO && msg.isStatus()) { switch (msg.getProperty().getMain()) { case PM_TIME: if (msg.isStatusProperty()) { msgResonse = this.setTime(msg); } else { value = this.getTime(); } break; case PM_DATE: value = this.getDate(); break; case PM_IP: value = this.getIP(); break; case PM_NETMASK: value = this.getNetMask(); break; case PM_MAC_ADDRESS: value = this.getMac(); break; case PM_SERVER_MODEL: value = this.getModel(); break; case PM_FIRMWARE_VERSION: value = this.getFirmware(); break; case PM_STARTUP_TIME: value = this.getStartUpTime(); break; case PM_TIME_DATE: value = this.getTimeDate(); break; case PM_KERNEL_VERSION: value = this.getKernel(); break; case PM_DISTRIBUTION_VERSION: value = this.getVersion(); break; default: LOGGER.warn("Function not implemented: {}", msg.getProperty().getMain()); } if (value != null) { final Who who = new Who(Integer.toString(MUST_WHO)); msgResonse = new SCSMsg(who, true, msg.getWhere(), null, msg.getProperty(), value); } if (msgResonse != null) { if (this.engine == null) { LOGGER.debug("msg: {}", msgResonse); } else { this.engine.sendCommand(msgResonse, this); } } } else { } } System(EngineManager engine); @Override void reciveMsg(SCSMsg msg); @Override void run(); static final int MUST_WHO; }
@Test public void testReciveMsg() { TestEngine engine = new TestEngine(); System sys = new System(engine); try { SCSMsg msg = new SCSMsg("*#13**0##"); Calendar now = Config.getInstance().getCurentTime(); sys.reciveMsg(msg); String time = getTime(now); assertEquals("wrong Time",new SCSMsg("*#13**0*" + time + "##"), engine.msgs.poll()); msg = new SCSMsg("*#13**1##"); sys.reciveMsg(msg); String date = getDate(now); assertEquals("wrong Date",new SCSMsg("*#13**1*" + date + "##"), engine.msgs.poll()); } catch (MessageFormatException e) { e.printStackTrace(); } }
GeoHashSizeTable { protected static final double dLat(int bits) { return 180d / Math.pow(2, bits / 2); } static final int numberOfBitsForOverlappingGeoHash(BoundingBox boundingBox); }
@Test public void testDLat() { assertDLatIs(180d, 0); assertDLatIs(180d, 1); assertDLatIs(90d, 2); assertDLatIs(0.3515625, 18); assertDLatIs(0.3515625, 19); }
BoundingBox implements Serializable { @Override public int hashCode() { int result = 17; result = 37 * result + hashCode(minLat); result = 37 * result + hashCode(maxLat); result = 37 * result + hashCode(minLon); result = 37 * result + hashCode(maxLon); return result; } BoundingBox(WGS84Point p1, WGS84Point p2); BoundingBox(double y1, double y2, double x1, double x2); BoundingBox(BoundingBox that); WGS84Point getUpperLeft(); WGS84Point getLowerRight(); double getLatitudeSize(); double getLongitudeSize(); @Override boolean equals(Object obj); @Override int hashCode(); boolean contains(WGS84Point point); boolean intersects(BoundingBox other); @Override String toString(); WGS84Point getCenterPoint(); void expandToInclude(BoundingBox other); double getMinLon(); double getMinLat(); double getMaxLat(); double getMaxLon(); }
@Test public void testHashCode() { assertEquals(a.hashCode(), b.hashCode()); assertFalse(a.hashCode() == c.hashCode()); }
BoundingBox implements Serializable { @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (obj instanceof BoundingBox) { BoundingBox that = (BoundingBox) obj; return minLat == that.minLat && minLon == that.minLon && maxLat == that.maxLat && maxLon == that.maxLon; } else { return false; } } BoundingBox(WGS84Point p1, WGS84Point p2); BoundingBox(double y1, double y2, double x1, double x2); BoundingBox(BoundingBox that); WGS84Point getUpperLeft(); WGS84Point getLowerRight(); double getLatitudeSize(); double getLongitudeSize(); @Override boolean equals(Object obj); @Override int hashCode(); boolean contains(WGS84Point point); boolean intersects(BoundingBox other); @Override String toString(); WGS84Point getCenterPoint(); void expandToInclude(BoundingBox other); double getMinLon(); double getMinLat(); double getMaxLat(); double getMaxLon(); }
@Test public void testEqualsObject() { assertEquals(a, b); assertEquals(b, a); assertFalse(a.equals(c)); }
BoundingBox implements Serializable { public boolean contains(WGS84Point point) { return (point.getLatitude() >= minLat) && (point.getLongitude() >= minLon) && (point.getLatitude() <= maxLat) && (point.getLongitude() <= maxLon); } BoundingBox(WGS84Point p1, WGS84Point p2); BoundingBox(double y1, double y2, double x1, double x2); BoundingBox(BoundingBox that); WGS84Point getUpperLeft(); WGS84Point getLowerRight(); double getLatitudeSize(); double getLongitudeSize(); @Override boolean equals(Object obj); @Override int hashCode(); boolean contains(WGS84Point point); boolean intersects(BoundingBox other); @Override String toString(); WGS84Point getCenterPoint(); void expandToInclude(BoundingBox other); double getMinLon(); double getMinLat(); double getMaxLat(); double getMaxLon(); }
@Test public void testContains() { BoundingBox bbox = new BoundingBox(45, 46, 121, 120); assertContains(bbox, new WGS84Point(45.5, 120.5)); assertNotContains(bbox, new WGS84Point(90, 90)); }
BoundingBox implements Serializable { public boolean intersects(BoundingBox other) { return !(other.minLon > maxLon || other.maxLon < minLon || other.minLat > maxLat || other.maxLat < minLat); } BoundingBox(WGS84Point p1, WGS84Point p2); BoundingBox(double y1, double y2, double x1, double x2); BoundingBox(BoundingBox that); WGS84Point getUpperLeft(); WGS84Point getLowerRight(); double getLatitudeSize(); double getLongitudeSize(); @Override boolean equals(Object obj); @Override int hashCode(); boolean contains(WGS84Point point); boolean intersects(BoundingBox other); @Override String toString(); WGS84Point getCenterPoint(); void expandToInclude(BoundingBox other); double getMinLon(); double getMinLat(); double getMaxLat(); double getMaxLon(); }
@Test public void testIntersects() { BoundingBox bbox = new BoundingBox(10, -10, 41, 40); assertIntersects(bbox, new BoundingBox(5, -15, 40.5, 43)); assertDoesNotIntersect(bbox, new BoundingBox(5, -15, 42, 43)); }
GeoHashSizeTable { protected static final double dLon(int bits) { return 360d / Math.pow(2, (bits + 1) / 2); } static final int numberOfBitsForOverlappingGeoHash(BoundingBox boundingBox); }
@Test public void testDLon() { assertDLonIs(360, 0); assertDLonIs(180, 1); assertDLonIs(0.0439453125, 25); assertDLonIs(0.0439453125, 26); }
LongUtil { public static final int commonPrefixLength(long a, long b) { int result = 0; while (result < 64 && (a & FIRST_BIT) == (b & FIRST_BIT)) { result++; a <<= 1; b <<= 1; } return result; } static final int commonPrefixLength(long a, long b); static final long FIRST_BIT; }
@Test public void testSameNumbersHave64BitsPrefix() { for (long a = 0; a < 120000000; a += 101) { long b = a; assertEquals(64, LongUtil.commonPrefixLength(a, b)); } }
GeoHash implements Comparable<GeoHash>, Serializable { public String toBase32() { if (significantBits % 5 != 0) { return ""; } StringBuilder buf = new StringBuilder(); long firstFiveBitsMask = 0xf800000000000000l; long bitsCopy = bits; int partialChunks = (int) Math.ceil(((double) significantBits / 5)); for (int i = 0; i < partialChunks; i++) { int pointer = (int) ((bitsCopy & firstFiveBitsMask) >>> 59); buf.append(base32[pointer]); bitsCopy <<= 5; } return buf.toString(); } protected GeoHash(); private GeoHash(double latitude, double longitude, int desiredPrecision); static void main(String[] args); static int getBitsToShift(int b1, int b2); static GeoHash withUnprecisionOfDistanceInMeter(double centerLat, double centerLong, double distanceInMeter); static GeoHash withCharacterPrecision(double latitude, double longitude, int numberOfCharacters); static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits); static GeoHash fromBinaryString(String binaryString); static GeoHash fromGeohashString(String geohash); static GeoHash fromIntValue(int hashVal); static GeoHash fromLongValue(long hashVal, int significantBits); GeoHash next(int step); GeoHash next(); GeoHash prev(); long ord(); static GeoHash fromOrd(long ord, int significantBits); static long stepsBetween(GeoHash one, GeoHash two); GeoHash[] getAdjacent(); int significantBits(); long longValue(); int intValue(); String toBase32(); boolean within(GeoHash boundingBox); boolean contains(WGS84Point point); WGS84Point getPoint(); WGS84Point getBoundingBoxCenterPoint(); BoundingBox getBoundingBox(); boolean enclosesCircleAroundPoint(WGS84Point point, double radius); GeoHash getNorthernNeighbour(); GeoHash getSouthernNeighbour(); GeoHash getEasternNeighbour(); GeoHash getWesternNeighbour(); @Override String toString(); String toBinaryString(); @Override boolean equals(Object obj); @Override int hashCode(); @Override int compareTo(GeoHash o); static double distFromInMeter(double lat1, double lng1, double lat2, double lng2); static final long FIRST_BIT_FLAGGED; }
@Test public void testToBase32() { hash.bits = 0x6ff0414000000000l; hash.significantBits = 25; String base32 = hash.toBase32(); assertEquals("ezs42", base32); }
GeoHash implements Comparable<GeoHash>, Serializable { public boolean within(GeoHash boundingBox) { return (bits & boundingBox.mask()) == boundingBox.bits; } protected GeoHash(); private GeoHash(double latitude, double longitude, int desiredPrecision); static void main(String[] args); static int getBitsToShift(int b1, int b2); static GeoHash withUnprecisionOfDistanceInMeter(double centerLat, double centerLong, double distanceInMeter); static GeoHash withCharacterPrecision(double latitude, double longitude, int numberOfCharacters); static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits); static GeoHash fromBinaryString(String binaryString); static GeoHash fromGeohashString(String geohash); static GeoHash fromIntValue(int hashVal); static GeoHash fromLongValue(long hashVal, int significantBits); GeoHash next(int step); GeoHash next(); GeoHash prev(); long ord(); static GeoHash fromOrd(long ord, int significantBits); static long stepsBetween(GeoHash one, GeoHash two); GeoHash[] getAdjacent(); int significantBits(); long longValue(); int intValue(); String toBase32(); boolean within(GeoHash boundingBox); boolean contains(WGS84Point point); WGS84Point getPoint(); WGS84Point getBoundingBoxCenterPoint(); BoundingBox getBoundingBox(); boolean enclosesCircleAroundPoint(WGS84Point point, double radius); GeoHash getNorthernNeighbour(); GeoHash getSouthernNeighbour(); GeoHash getEasternNeighbour(); GeoHash getWesternNeighbour(); @Override String toString(); String toBinaryString(); @Override boolean equals(Object obj); @Override int hashCode(); @Override int compareTo(GeoHash o); static double distFromInMeter(double lat1, double lng1, double lat2, double lng2); static final long FIRST_BIT_FLAGGED; }
@Test public void testWithin() { hash.bits = 0x6ff0414000000000l; hash.significantBits = 25; System.out.println(hash.toBase32()); assertEquals("ezs42", hash.toBase32()); GeoHash bbox = new GeoHash(); bbox.bits = 0x6ff0000000000000l; bbox.significantBits = 12; assertWithin(hash, bbox); }
GeoHash implements Comparable<GeoHash>, Serializable { public static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits) { if (Math.abs(latitude) > 90.0 || Math.abs(longitude) > 180.0) { throw new IllegalArgumentException("Can't have lat/lon values out of (-90,90)/(-180/180)"); } return new GeoHash(latitude, longitude, numberOfBits); } protected GeoHash(); private GeoHash(double latitude, double longitude, int desiredPrecision); static void main(String[] args); static int getBitsToShift(int b1, int b2); static GeoHash withUnprecisionOfDistanceInMeter(double centerLat, double centerLong, double distanceInMeter); static GeoHash withCharacterPrecision(double latitude, double longitude, int numberOfCharacters); static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits); static GeoHash fromBinaryString(String binaryString); static GeoHash fromGeohashString(String geohash); static GeoHash fromIntValue(int hashVal); static GeoHash fromLongValue(long hashVal, int significantBits); GeoHash next(int step); GeoHash next(); GeoHash prev(); long ord(); static GeoHash fromOrd(long ord, int significantBits); static long stepsBetween(GeoHash one, GeoHash two); GeoHash[] getAdjacent(); int significantBits(); long longValue(); int intValue(); String toBase32(); boolean within(GeoHash boundingBox); boolean contains(WGS84Point point); WGS84Point getPoint(); WGS84Point getBoundingBoxCenterPoint(); BoundingBox getBoundingBox(); boolean enclosesCircleAroundPoint(WGS84Point point, double radius); GeoHash getNorthernNeighbour(); GeoHash getSouthernNeighbour(); GeoHash getEasternNeighbour(); GeoHash getWesternNeighbour(); @Override String toString(); String toBinaryString(); @Override boolean equals(Object obj); @Override int hashCode(); @Override int compareTo(GeoHash o); static double distFromInMeter(double lat1, double lng1, double lat2, double lng2); static final long FIRST_BIT_FLAGGED; }
@Test public void testSimpleWithin() { GeoHash hash = GeoHash.withBitPrecision(70, -120, 8); GeoHash inside = GeoHash.withBitPrecision(74, -130, 64); assertWithin(inside, hash); }
GeoHash implements Comparable<GeoHash>, Serializable { public GeoHash next(int step) { return fromOrd(ord() + step, significantBits); } protected GeoHash(); private GeoHash(double latitude, double longitude, int desiredPrecision); static void main(String[] args); static int getBitsToShift(int b1, int b2); static GeoHash withUnprecisionOfDistanceInMeter(double centerLat, double centerLong, double distanceInMeter); static GeoHash withCharacterPrecision(double latitude, double longitude, int numberOfCharacters); static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits); static GeoHash fromBinaryString(String binaryString); static GeoHash fromGeohashString(String geohash); static GeoHash fromIntValue(int hashVal); static GeoHash fromLongValue(long hashVal, int significantBits); GeoHash next(int step); GeoHash next(); GeoHash prev(); long ord(); static GeoHash fromOrd(long ord, int significantBits); static long stepsBetween(GeoHash one, GeoHash two); GeoHash[] getAdjacent(); int significantBits(); long longValue(); int intValue(); String toBase32(); boolean within(GeoHash boundingBox); boolean contains(WGS84Point point); WGS84Point getPoint(); WGS84Point getBoundingBoxCenterPoint(); BoundingBox getBoundingBox(); boolean enclosesCircleAroundPoint(WGS84Point point, double radius); GeoHash getNorthernNeighbour(); GeoHash getSouthernNeighbour(); GeoHash getEasternNeighbour(); GeoHash getWesternNeighbour(); @Override String toString(); String toBinaryString(); @Override boolean equals(Object obj); @Override int hashCode(); @Override int compareTo(GeoHash o); static double distFromInMeter(double lat1, double lng1, double lat2, double lng2); static final long FIRST_BIT_FLAGGED; }
@Test public void testNext() { double lat = 37.7; double lon = -122.52; GeoHash hash = GeoHash.withBitPrecision(lat, lon, 10); GeoHash next = hash.next(); assertTrue(hash.compareTo(next) < 0); }
GeoHash implements Comparable<GeoHash>, Serializable { public static long stepsBetween(GeoHash one, GeoHash two) { if (one.significantBits() != two.significantBits()) { throw new IllegalArgumentException( "It is only valid to compare the number of steps between two hashes if they have the same number of significant bits"); } return two.ord() - one.ord(); } protected GeoHash(); private GeoHash(double latitude, double longitude, int desiredPrecision); static void main(String[] args); static int getBitsToShift(int b1, int b2); static GeoHash withUnprecisionOfDistanceInMeter(double centerLat, double centerLong, double distanceInMeter); static GeoHash withCharacterPrecision(double latitude, double longitude, int numberOfCharacters); static GeoHash withBitPrecision(double latitude, double longitude, int numberOfBits); static GeoHash fromBinaryString(String binaryString); static GeoHash fromGeohashString(String geohash); static GeoHash fromIntValue(int hashVal); static GeoHash fromLongValue(long hashVal, int significantBits); GeoHash next(int step); GeoHash next(); GeoHash prev(); long ord(); static GeoHash fromOrd(long ord, int significantBits); static long stepsBetween(GeoHash one, GeoHash two); GeoHash[] getAdjacent(); int significantBits(); long longValue(); int intValue(); String toBase32(); boolean within(GeoHash boundingBox); boolean contains(WGS84Point point); WGS84Point getPoint(); WGS84Point getBoundingBoxCenterPoint(); BoundingBox getBoundingBox(); boolean enclosesCircleAroundPoint(WGS84Point point, double radius); GeoHash getNorthernNeighbour(); GeoHash getSouthernNeighbour(); GeoHash getEasternNeighbour(); GeoHash getWesternNeighbour(); @Override String toString(); String toBinaryString(); @Override boolean equals(Object obj); @Override int hashCode(); @Override int compareTo(GeoHash o); static double distFromInMeter(double lat1, double lng1, double lat2, double lng2); static final long FIRST_BIT_FLAGGED; }
@Test public void testStepsBetween() { GeoHash bl = GeoHash.withBitPrecision(37.7, -122.52, 35); GeoHash ur = GeoHash.withBitPrecision(37.84, -122.35, 35); long steps = GeoHash.stepsBetween(bl, bl); assertEquals(steps, 0); steps = GeoHash.stepsBetween(bl, bl.next(4)); assertEquals(steps, 4); BoundingBoxGeoHashIterator iter = new BoundingBoxGeoHashIterator(new TwoGeoHashBoundingBox(bl, ur)); int count = 0; while (iter.hasNext()) { iter.next(); count++; } assertEquals(12875, count); int allHashes = 0; int inBbox = 1; int latMore = 0; int lonMore = 0; int bothMore = 0; int latLess = 0; int lonLess = 0; int bothLess = 0; int latLessLonMore = 0; int latMoreLonLess = 0; GeoHash idx = bl; BoundingBox iterBbox = iter.getBoundingBox().getBoundingBox(); while (idx.compareTo(ur) < 0) { idx = idx.next(); allHashes++; if (iterBbox.contains(idx.getPoint())) { inBbox++; } boolean latIsMore = false; boolean latIsLess = false; if (idx.getPoint().getLatitude() > iterBbox.getMaxLat()) { latIsMore = true; latMore++; } else if (idx.getPoint().getLatitude() < iterBbox.getMinLat()) { latIsLess = true; latLess++; } if (idx.getPoint().getLongitude() > iterBbox.getMaxLon()) { lonMore++; if (latIsMore) { bothMore++; } if (latIsLess) { latLessLonMore++; } } else if (idx.getPoint().getLongitude() < iterBbox.getMinLon()) { lonLess++; if (latIsLess) { bothLess++; } if (latIsMore) { latMoreLonLess++; } } } steps = GeoHash.stepsBetween(bl, ur); assertEquals(48472, steps); assertEquals(steps, allHashes); assertEquals(count, inBbox); assertEquals(14938, latMore); assertEquals(640, lonMore); assertEquals(0, bothMore); assertEquals(7680, latLess); assertEquals(24391, lonLess); assertEquals(0, bothLess); assertEquals(240, latLessLonMore); assertEquals(11811, latMoreLonLess); assertEquals(steps, lonLess + latLess + latMore + lonMore + inBbox - latLessLonMore - latMoreLonLess - 1); }
WGS84Point implements Serializable { @Override public boolean equals(Object obj) { if (obj instanceof WGS84Point) { WGS84Point other = (WGS84Point) obj; return latitude == other.latitude && longitude == other.longitude; } return false; } WGS84Point(double latitude, double longitude); WGS84Point(WGS84Point other); double getLatitude(); double getLongitude(); @Override String toString(); @Override boolean equals(Object obj); @Override int hashCode(); }
@Test public void testEquals() { assertEquals(a, a); assertEquals(a, b); assertEquals(b, a); assertNotSame(a, b); assertFalse(a.equals(c)); assertFalse(c.equals(a)); assertFalse(d.equals(c)); assertFalse(d.equals(a)); assertFalse(d.equals(new Integer(10))); }
WGS84Point implements Serializable { @Override public int hashCode() { int result = 42; long latBits = Double.doubleToLongBits(latitude); long lonBits = Double.doubleToLongBits(longitude); result = 31 * result + (int) (latBits ^ (latBits >>> 32)); result = 31 * result + (int) (lonBits ^ (lonBits >>> 32)); return result; } WGS84Point(double latitude, double longitude); WGS84Point(WGS84Point other); double getLatitude(); double getLongitude(); @Override String toString(); @Override boolean equals(Object obj); @Override int hashCode(); }
@Test public void testHashCode() { assertEquals(a.hashCode(), a.hashCode()); assertEquals(a.hashCode(), b.hashCode()); assertFalse(a.hashCode() == c.hashCode()); assertFalse(d.hashCode() == c.hashCode()); assertFalse(d.hashCode() == new Integer(10).hashCode()); }
HiveMetaStoreServiceImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Iterable<String> getAllDatabases() { Iterable<String> results = null; try { if (client == null) { LOGGER.warn("Hive client is null. " + "Please check your hive config."); return new ArrayList<>(); } results = client.getAllDatabases(); } catch (Exception e) { reconnect(); LOGGER.error("Can not get databases : {}", e); } return results; } HiveMetaStoreServiceImpl(); void setClient(IMetaStoreClient client); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") List<Table> getAllTable(String db); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") Map<String, List<String>> getAllTableNames(); @Override @Cacheable(unless = "#result==null") Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "hive", allEntries = true, beforeInvocation = true) void evictHiveCache(); }
@Test public void testGetAllDatabasesForNormalRun() throws TException { given(client.getAllDatabases()).willReturn(Arrays.asList("default")); assertEquals(service.getAllDatabases().iterator().hasNext(), true); }
MeasureController { @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) public Measure updateMeasure(@RequestBody Measure measure) { return measureService.updateMeasure(measure); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testUpdateMeasureForNotFound() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage .MEASURE_ID_DOES_NOT_EXIST)) .when(service).updateMeasure(measure); mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isNotFound()); } @Test public void testUpdateMeasureForTypeMismatch() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); doThrow(new GriffinException.BadRequestException(GriffinExceptionMessage .MEASURE_TYPE_DOES_NOT_MATCH)) .when(service).updateMeasure(measure); mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isBadRequest()); } @Test public void testUpdateMeasureForSuccess() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); doReturn(measure).when(service).updateMeasure(measure); mvc.perform(put(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", is("view_item_hourly"))); }
MeasureController { @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) public List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner) { return measureService.getAliveMeasuresByOwner(owner); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testGetAllMeasuresByOwner() throws Exception { String owner = "test"; List<Measure> measureList = new LinkedList<>(); Measure measure = createGriffinMeasure("view_item_hourly"); measureList.add(measure); given(service.getAliveMeasuresByOwner(owner)).willReturn(measureList); mvc.perform(get(URLHelper.API_VERSION_PATH + "/measures/owner/" + owner) .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0].name", is("view_item_hourly"))) ; }
MeasureController { @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) public Measure createMeasure(@RequestBody Measure measure) { return measureService.createMeasure(measure); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testCreateNewMeasureForSuccess() throws Exception { GriffinMeasure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); given(service.createMeasure(measure)).willReturn(measure); mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isCreated()) .andExpect(jsonPath("$.name", is("view_item_hourly"))); } @Test public void testCreateNewMeasureForFailWithDuplicate() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); doThrow(new GriffinException.ConflictException(GriffinExceptionMessage .MEASURE_NAME_ALREADY_EXIST)) .when(service).createMeasure(measure); mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isConflict()); } @Test public void testCreateNewMeasureForFailWithInvalidParams() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); String measureJson = JsonUtil.toJson(measure); doThrow(new GriffinException.BadRequestException(GriffinExceptionMessage .MISSING_METRIC_NAME)) .when(service).createMeasure(measure); mvc.perform(post(URLHelper.API_VERSION_PATH + "/measures") .contentType(MediaType.APPLICATION_JSON).content(measureJson)) .andExpect(status().isBadRequest()); }
GriffinMeasureOperatorImpl implements MeasureOperator { @Override public Measure create(Measure measure) { validateMeasure(measure); return measureRepo.save(measure); } @Autowired GriffinMeasureOperatorImpl(MeasureRepo<Measure> measureRepo, JobServiceImpl jobService); @Override Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test public void testCreateForSuccess() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.save(measure)).willReturn(measure); Measure m = operator.create(measure); assertEquals(m.getName(), measure.getName()); } @Test(expected = GriffinException.BadRequestException.class) public void testCreateForFailureWithConnectorNull() throws Exception { String measureName = "view_item_hourly"; DataConnector dcSource = createDataConnector(null, "default", "test_data_src", "dt=#YYYYMMdd# AND hour =#HH#"); DataConnector dcTarget = createDataConnector(null, "default", "test_data_tgt", "dt=#YYYYMMdd# AND hour =#HH#"); GriffinMeasure measure = createGriffinMeasure(measureName, dcSource, dcTarget); operator.create(measure); }
GriffinMeasureOperatorImpl implements MeasureOperator { @Override public Measure update(Measure measure) { validateMeasure(measure); measure.setDeleted(false); measure = measureRepo.save(measure); return measure; } @Autowired GriffinMeasureOperatorImpl(MeasureRepo<Measure> measureRepo, JobServiceImpl jobService); @Override Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test public void testUpdateForSuccess() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.save(measure)).willReturn(measure); operator.update(measure); verify(measureRepo, times(1)).save(measure); } @Test public void testDeleteForSuccess() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); measure.setId(1L); doNothing().when(jobService).deleteJobsRelateToMeasure(1L); given(measureRepo.save(measure)).willReturn(measure); operator.update(measure); verify(measureRepo, times(1)).save(measure); }
HiveMetaStoreController { @RequestMapping(value = "/tables", method = RequestMethod.GET) public List<Table> getAllTables(@RequestParam("db") String dbName) { return hiveMetaStoreService.getAllTable(dbName); } @RequestMapping(value = "/dbs", method = RequestMethod.GET) Iterable<String> getAllDatabases(); @RequestMapping(value = "/tables/names", method = RequestMethod.GET) Iterable<String> getAllTableNames(@RequestParam("db") String dbName); @RequestMapping(value = "/tables", method = RequestMethod.GET) List<Table> getAllTables(@RequestParam("db") String dbName); @RequestMapping(value = "/dbs/tables", method = RequestMethod.GET) Map<String, List<Table>> getAllTables(); @RequestMapping(value = "/dbs/tables/names", method = RequestMethod.GET) Map<String, List<String>> getAllTableNames(); @RequestMapping(value = "/table", method = RequestMethod.GET) Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName); }
@Test public void testGetAllTables() throws Exception { Map<String, List<Table>> results = new HashMap<>(); results.put("table", new ArrayList<>()); given(hiveMetaStoreService.getAllTable()).willReturn(results); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/dbs/tables")) .andExpect(status().isOk()) .andExpect(jsonPath("$.table", hasSize(0))); }
GriffinMeasureOperatorImpl implements MeasureOperator { @Override public void delete(Measure measure) throws SchedulerException { jobService.deleteJobsRelateToMeasure(measure.getId()); measure.setDeleted(true); measureRepo.save(measure); } @Autowired GriffinMeasureOperatorImpl(MeasureRepo<Measure> measureRepo, JobServiceImpl jobService); @Override Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test(expected = GriffinException.ServiceException.class) public void testDeleteForFailureWithPauseJob() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); measure.setId(1L); doThrow(new GriffinException.ServiceException("Service exception", new RuntimeException())) .when(jobService).deleteJobsRelateToMeasure(1L); operator.delete(measure); }
MeasureOrgServiceImpl implements MeasureOrgService { @Override public List<String> getOrgs() { return measureRepo.findOrganizations(false); } @Override List<String> getOrgs(); @Override List<String> getMetricNameListByOrg(String org); @Override Map<String, List<String>> getMeasureNamesGroupByOrg(); @Override Map<String, Map<String, List<Map<String, Object>>>> getMeasureWithJobDetailsGroupByOrg(Map<String, List<Map<String, Object>>> jobDetails); }
@Test public void testGetOrgs() { String orgName = "orgName"; given(measureRepo.findOrganizations(false)).willReturn(Arrays .asList(orgName)); List<String> orgs = service.getOrgs(); assertThat(orgs.size()).isEqualTo(1); assertThat(orgs.get(0)).isEqualTo(orgName); }
MeasureOrgServiceImpl implements MeasureOrgService { @Override public List<String> getMetricNameListByOrg(String org) { List<String> orgs = measureRepo.findNameByOrganization(org, false); if (CollectionUtils.isEmpty(orgs)) { throw new GriffinException.NotFoundException (ORGANIZATION_NAME_DOES_NOT_EXIST); } return orgs; } @Override List<String> getOrgs(); @Override List<String> getMetricNameListByOrg(String org); @Override Map<String, List<String>> getMeasureNamesGroupByOrg(); @Override Map<String, Map<String, List<Map<String, Object>>>> getMeasureWithJobDetailsGroupByOrg(Map<String, List<Map<String, Object>>> jobDetails); }
@Test public void testGetMetricNameListByOrg() { String orgName = "orgName"; String measureName = "measureName"; given(measureRepo.findNameByOrganization(orgName, false)) .willReturn(Arrays.asList(measureName)); List<String> measureNames = service.getMetricNameListByOrg(orgName); assertThat(measureNames.size()).isEqualTo(1); assertThat(measureNames.get(0)).isEqualTo(measureName); }
MeasureOrgServiceImpl implements MeasureOrgService { @Override public Map<String, List<String>> getMeasureNamesGroupByOrg() { Map<String, List<String>> orgWithMetricsMap = new HashMap<>(); List<GriffinMeasure> measures = measureRepo.findByDeleted(false); for (Measure measure : measures) { String orgName = measure.getOrganization(); orgName = orgName == null ? "null" : orgName; String measureName = measure.getName(); List<String> measureList = orgWithMetricsMap.getOrDefault(orgName, new ArrayList<>()); measureList.add(measureName); orgWithMetricsMap.put(orgName, measureList); } return orgWithMetricsMap; } @Override List<String> getOrgs(); @Override List<String> getMetricNameListByOrg(String org); @Override Map<String, List<String>> getMeasureNamesGroupByOrg(); @Override Map<String, Map<String, List<Map<String, Object>>>> getMeasureWithJobDetailsGroupByOrg(Map<String, List<Map<String, Object>>> jobDetails); }
@Test public void testGetMeasureNamesGroupByOrg() throws Exception { GriffinMeasure measure = createGriffinMeasure("measure"); when(measureRepo.findByDeleted(false)).thenReturn(Arrays .asList(measure)); Map<String, List<String>> map = service.getMeasureNamesGroupByOrg(); assertThat(map.size()).isEqualTo(1); } @Test public void testGetMeasureNamesGroupByOrgWithNull() { when(measureRepo.findByDeleted(false)).thenReturn(new ArrayList<>()); Map<String, List<String>> map = service.getMeasureNamesGroupByOrg(); assert map.size() == 0; }
MeasureOrgServiceImpl implements MeasureOrgService { @Override public Map<String, Map<String, List<Map<String, Object>>>> getMeasureWithJobDetailsGroupByOrg(Map<String, List<Map<String, Object>>> jobDetails) { Map<String, Map<String, List<Map<String, Object>>>> result = new HashMap<>(); List<GriffinMeasure> measures = measureRepo.findByDeleted(false); if (measures == null) { return null; } for (Measure measure : measures) { String orgName = measure.getOrganization(); String measureName = measure.getName(); String measureId = measure.getId().toString(); List<Map<String, Object>> jobList = jobDetails .getOrDefault(measureId, new ArrayList<>()); Map<String, List<Map<String, Object>>> measureWithJobs = result .getOrDefault(orgName, new HashMap<>()); measureWithJobs.put(measureName, jobList); result.put(orgName, measureWithJobs); } return result; } @Override List<String> getOrgs(); @Override List<String> getMetricNameListByOrg(String org); @Override Map<String, List<String>> getMeasureNamesGroupByOrg(); @Override Map<String, Map<String, List<Map<String, Object>>>> getMeasureWithJobDetailsGroupByOrg(Map<String, List<Map<String, Object>>> jobDetails); }
@Test public void testGetMeasureWithJobDetailsGroupByOrgForSuccess() throws Exception { String measureName = "measureName"; String measureId = "1"; GriffinMeasure measure = createGriffinMeasure(measureName); measure.setOrganization("org"); measure.setId(Long.valueOf(measureId)); given(measureRepo.findByDeleted(false)).willReturn(Arrays .asList(measure)); Map<String, Object> jobDetail = createJobDetailMap(); List<Map<String, Object>> jobList = Arrays.asList(jobDetail); Map<String, List<Map<String, Object>>> measuresById = new HashMap<>(); measuresById.put(measureId, jobList); Map<String, Map<String, List<Map<String, Object>>>> map = service .getMeasureWithJobDetailsGroupByOrg(measuresById); assertThat(map.size()).isEqualTo(1); assertThat(map).containsKey("org"); assertThat(map.get("org").get(measureName)).isEqualTo(jobList); } @Test public void testGetMeasureWithJobDetailsGroupByOrgForFailure() throws Exception { Map detail = new HashMap(); given(measureRepo.findByDeleted(false)).willReturn(null); Map map = service.getMeasureWithJobDetailsGroupByOrg(detail); assert map == null; }
ExternalMeasureOperatorImpl implements MeasureOperator { @Override @Transactional public Measure create(Measure measure) { ExternalMeasure em = (ExternalMeasure) measure; validateMeasure(em); em.setVirtualJob(new VirtualJob()); em = measureRepo.save(em); VirtualJob vj = genVirtualJob(em, em.getVirtualJob()); jobRepo.save(vj); return em; } @Override @Transactional Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test public void testCreateForSuccess() { ExternalMeasure measure = createExternalMeasure("view_item_hourly"); given(measureRepo.save(measure)).willReturn(measure); given(jobRepo.save(Matchers.any(VirtualJob.class))).willReturn( new VirtualJob()); operator.create(measure); verify(jobRepo, times(1)).save(new VirtualJob()); } @Test(expected = GriffinException.BadRequestException.class) public void testCreateForFailureWithBlankMetricName() { String measureName = "view_item_hourly"; ExternalMeasure measure = createExternalMeasure(measureName); measure.setMetricName(" "); operator.create(measure); } @Test public void testUpdateForSuccess() { ExternalMeasure measure = createExternalMeasure("view_item_hourly"); measure.setId(1L); given(measureRepo.findOne(1L)).willReturn(measure); given(measureRepo.save(Matchers.any(ExternalMeasure.class))) .willReturn(measure); operator.create(measure); verify(measureRepo, times(1)).save( Matchers.any(ExternalMeasure.class)); }
HiveMetaStoreController { @RequestMapping(value = "/table", method = RequestMethod.GET) public Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName) { return hiveMetaStoreService.getTable(dbName, tableName); } @RequestMapping(value = "/dbs", method = RequestMethod.GET) Iterable<String> getAllDatabases(); @RequestMapping(value = "/tables/names", method = RequestMethod.GET) Iterable<String> getAllTableNames(@RequestParam("db") String dbName); @RequestMapping(value = "/tables", method = RequestMethod.GET) List<Table> getAllTables(@RequestParam("db") String dbName); @RequestMapping(value = "/dbs/tables", method = RequestMethod.GET) Map<String, List<Table>> getAllTables(); @RequestMapping(value = "/dbs/tables/names", method = RequestMethod.GET) Map<String, List<String>> getAllTableNames(); @RequestMapping(value = "/table", method = RequestMethod.GET) Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName); }
@Test public void testGetTable() throws Exception { String dbName = "default"; String tableName = "table"; given(hiveMetaStoreService.getTable(dbName, tableName)).willReturn( new Table(tableName, null, null, 0, 0, 0, null, null, null, null, null, null)); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/table") .param("db", dbName).param("table", tableName)) .andExpect(status().isOk()) .andExpect(jsonPath("$.tableName", is(tableName))); }
ExternalMeasureOperatorImpl implements MeasureOperator { @Override public Measure update(Measure measure) { ExternalMeasure latestMeasure = (ExternalMeasure) measure; validateMeasure(latestMeasure); ExternalMeasure originMeasure = measureRepo.findOne( latestMeasure.getId()); VirtualJob vj = genVirtualJob(latestMeasure, originMeasure.getVirtualJob()); latestMeasure.setVirtualJob(vj); measure = measureRepo.save(latestMeasure); return measure; } @Override @Transactional Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test(expected = GriffinException.BadRequestException.class) public void testUpdateForFailureWithBlankMetricName() { String measureName = "view_item_hourly"; ExternalMeasure measure = createExternalMeasure(measureName); measure.setMetricName(" "); operator.update(measure); }
ExternalMeasureOperatorImpl implements MeasureOperator { @Override public void delete(Measure measure) { ExternalMeasure em = (ExternalMeasure) measure; em.setDeleted(true); em.getVirtualJob().setDeleted(true); measureRepo.save(em); } @Override @Transactional Measure create(Measure measure); @Override Measure update(Measure measure); @Override void delete(Measure measure); }
@Test public void testDeleteForSuccess() { ExternalMeasure measure = createExternalMeasure("view_item_hourly"); given(measureRepo.save(measure)).willReturn(measure); operator.delete(measure); verify(measureRepo, times(1)).save(measure); }
PredicatorFactory { public static Predicator newPredicateInstance(SegmentPredicate segPredicate) { Predicator predicate; switch (segPredicate.getType()) { case "file.exist": predicate = new FileExistPredicator(segPredicate); break; case "custom": predicate = getPredicateBean(segPredicate); break; default: throw new GriffinException.NotFoundException(PREDICATE_TYPE_NOT_FOUND); } return predicate; } static Predicator newPredicateInstance(SegmentPredicate segPredicate); }
@Test public void testFileExistPredicatorCreation() throws IOException { Predicator predicator = PredicatorFactory.newPredicateInstance(createFileExistPredicate()); assertNotNull(predicator); assertTrue(predicator instanceof FileExistPredicator); } @Test(expected = GriffinException.NotFoundException.class) public void testUnknownPredicator() throws JsonProcessingException { PredicatorFactory.newPredicateInstance( new SegmentPredicate("unknown", null)); } @Test public void testPluggablePredicator() throws JsonProcessingException { String predicatorClass = "org.apache.griffin.core.util.PredicatorMock"; HashMap<String, Object> map = new HashMap<>(); map.put("class", predicatorClass); SegmentPredicate segmentPredicate = new SegmentPredicate("custom", null); segmentPredicate.setConfigMap(map); Predicator predicator = PredicatorFactory.newPredicateInstance(segmentPredicate); assertNotNull(predicator); assertTrue(predicator instanceof PredicatorMock); }
JobController { @RequestMapping(value = "/jobs", method = RequestMethod.GET) public List<AbstractJob> getJobs(@RequestParam(value = "type", defaultValue = "") String type) { return jobService.getAliveJobs(type); } @RequestMapping(value = "/jobs", method = RequestMethod.GET) List<AbstractJob> getJobs(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/jobs", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) AbstractJob addJob(@RequestBody AbstractJob job); @RequestMapping(value = "/jobs/config") AbstractJob getJobConfig(@RequestParam("jobId") Long jobId); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) AbstractJob onActions( @PathVariable("id") Long jobId, @RequestParam String action); @RequestMapping(value = "/jobs", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@RequestParam("jobName") String jobName); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@PathVariable("id") Long id); @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET) List<JobInstanceBean> findInstancesOfJob( @RequestParam("jobId") Long id, @RequestParam("page") int page, @RequestParam("size") int size); @RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET) JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id); @RequestMapping(value = "/jobs/health", method = RequestMethod.GET) JobHealth getHealthInfo(); @RequestMapping(path = "/jobs/download", method = RequestMethod.GET) ResponseEntity<Resource> download( @RequestParam("jobName") String jobName, @RequestParam("ts") long timestamp); @RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request); @RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET) List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey); }
@Test public void testGetJobs() throws Exception { AbstractJob jobBean = createGriffinJob(); jobBean.setJobName("job_name"); given(service.getAliveJobs("")) .willReturn(Collections.singletonList(jobBean)); mvc.perform( get(URLHelper.API_VERSION_PATH + "/jobs") .contentType(MediaType.APPLICATION_JSON)) .andExpect(status().isOk()) .andExpect(jsonPath("$[0]['job.name']", is("job_name"))); }
JobController { @RequestMapping(value = "/jobs", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteJob(@RequestParam("jobName") String jobName) throws SchedulerException { jobService.deleteJob(jobName); } @RequestMapping(value = "/jobs", method = RequestMethod.GET) List<AbstractJob> getJobs(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/jobs", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) AbstractJob addJob(@RequestBody AbstractJob job); @RequestMapping(value = "/jobs/config") AbstractJob getJobConfig(@RequestParam("jobId") Long jobId); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) AbstractJob onActions( @PathVariable("id") Long jobId, @RequestParam String action); @RequestMapping(value = "/jobs", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@RequestParam("jobName") String jobName); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@PathVariable("id") Long id); @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET) List<JobInstanceBean> findInstancesOfJob( @RequestParam("jobId") Long id, @RequestParam("page") int page, @RequestParam("size") int size); @RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET) JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id); @RequestMapping(value = "/jobs/health", method = RequestMethod.GET) JobHealth getHealthInfo(); @RequestMapping(path = "/jobs/download", method = RequestMethod.GET) ResponseEntity<Resource> download( @RequestParam("jobName") String jobName, @RequestParam("ts") long timestamp); @RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request); @RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET) List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey); }
@Test public void testDeleteJobByIdForSuccess() throws Exception { doNothing().when(service).deleteJob(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1")) .andExpect(status().isNoContent()); } @Test public void testDeleteJobByIdForFailureWithNotFound() throws Exception { doThrow(new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST)) .when(service).deleteJob(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1")) .andExpect(status().isNotFound()); } @Test public void testDeleteJobByIdForFailureWithException() throws Exception { doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(service).deleteJob(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs/1")) .andExpect(status().isInternalServerError()); } @Test public void testDeleteJobByNameForSuccess() throws Exception { String jobName = "jobName"; doNothing().when(service).deleteJob(jobName); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName" , jobName)) .andExpect(status().isNoContent()); } @Test public void testDeleteJobByNameForFailureWithNotFound() throws Exception { String jobName = "jobName"; doThrow(new GriffinException.NotFoundException(JOB_NAME_DOES_NOT_EXIST)) .when(service).deleteJob(jobName); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName" , jobName)) .andExpect(status().isNotFound()); } @Test public void testDeleteJobByNameForFailureWithException() throws Exception { String jobName = "jobName"; doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(service).deleteJob(jobName); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/jobs").param("jobName" , jobName)) .andExpect(status().isInternalServerError()); }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public SchemaString getSchemaString(Integer id) { String path = "/schemas/ids/" + id; String regUrl = registryUrl(path); ResponseEntity<SchemaString> res = restTemplate.getForEntity(regUrl, SchemaString.class); SchemaString result = res.getBody(); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetSchemaString() { try { SchemaString ss = new SchemaString(); ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/schemas/ids/1", SchemaString.class)).thenReturn(entity); when(entity.getBody()).thenReturn(ss); service.getSchemaString(1); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
JobController { @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET) public List<JobInstanceBean> findInstancesOfJob( @RequestParam("jobId") Long id, @RequestParam("page") int page, @RequestParam("size") int size) { return jobService.findInstancesOfJob(id, page, size); } @RequestMapping(value = "/jobs", method = RequestMethod.GET) List<AbstractJob> getJobs(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/jobs", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) AbstractJob addJob(@RequestBody AbstractJob job); @RequestMapping(value = "/jobs/config") AbstractJob getJobConfig(@RequestParam("jobId") Long jobId); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) AbstractJob onActions( @PathVariable("id") Long jobId, @RequestParam String action); @RequestMapping(value = "/jobs", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@RequestParam("jobName") String jobName); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@PathVariable("id") Long id); @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET) List<JobInstanceBean> findInstancesOfJob( @RequestParam("jobId") Long id, @RequestParam("page") int page, @RequestParam("size") int size); @RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET) JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id); @RequestMapping(value = "/jobs/health", method = RequestMethod.GET) JobHealth getHealthInfo(); @RequestMapping(path = "/jobs/download", method = RequestMethod.GET) ResponseEntity<Resource> download( @RequestParam("jobName") String jobName, @RequestParam("ts") long timestamp); @RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request); @RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET) List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey); }
@Test public void testFindInstancesOfJob() throws Exception { int page = 0; int size = 2; JobInstanceBean jobInstance = new JobInstanceBean(1L, LivySessionStates .State.RUNNING, "", "", null, null); given(service.findInstancesOfJob(1L, page, size)).willReturn(Arrays .asList(jobInstance)); mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/instances").param ("jobId", String.valueOf(1L)) .param("page", String.valueOf(page)).param("size", String.valueOf(size))) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0].state", is("RUNNING"))); }
JobController { @RequestMapping(value = "/jobs/health", method = RequestMethod.GET) public JobHealth getHealthInfo() { return jobService.getHealthInfo(); } @RequestMapping(value = "/jobs", method = RequestMethod.GET) List<AbstractJob> getJobs(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/jobs", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) AbstractJob addJob(@RequestBody AbstractJob job); @RequestMapping(value = "/jobs/config") AbstractJob getJobConfig(@RequestParam("jobId") Long jobId); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) AbstractJob onActions( @PathVariable("id") Long jobId, @RequestParam String action); @RequestMapping(value = "/jobs", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@RequestParam("jobName") String jobName); @RequestMapping(value = "/jobs/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteJob(@PathVariable("id") Long id); @RequestMapping(value = "/jobs/instances", method = RequestMethod.GET) List<JobInstanceBean> findInstancesOfJob( @RequestParam("jobId") Long id, @RequestParam("page") int page, @RequestParam("size") int size); @RequestMapping(value = "/jobs/instances/{instanceId}", method = RequestMethod.GET) JobInstanceBean findInstanceByInstanceId(@PathVariable("instanceId") Long id); @RequestMapping(value = "/jobs/health", method = RequestMethod.GET) JobHealth getHealthInfo(); @RequestMapping(path = "/jobs/download", method = RequestMethod.GET) ResponseEntity<Resource> download( @RequestParam("jobName") String jobName, @RequestParam("ts") long timestamp); @RequestMapping(value = "/jobs/trigger/{id}", method = RequestMethod.POST) @ResponseStatus(HttpStatus.OK) Map<String, Object> triggerJob(@PathVariable("id") Long id, @RequestBody(required = false) String request); @RequestMapping(value = "jobs/triggerKeys/{triggerKey:.+}", method = RequestMethod.GET) List<JobInstanceBean> findInstanceByTriggerKey(@PathVariable("triggerKey") String triggerKey); }
@Test public void testGetHealthInfo() throws Exception { JobHealth jobHealth = new JobHealth(1, 3); given(service.getHealthInfo()).willReturn(jobHealth); mvc.perform(get(URLHelper.API_VERSION_PATH + "/jobs/health")) .andExpect(status().isOk()) .andExpect(jsonPath("$.healthyJobCount", is(1))); }
JobServiceImpl implements JobService { @Override public String triggerJobById(Long id) throws SchedulerException { AbstractJob job = jobRepo.findByIdAndDeleted(id, false); validateJobExist(job); Scheduler scheduler = factory.getScheduler(); JobKey jobKey = jobKey(job.getName(), job.getGroup()); if (scheduler.checkExists(jobKey)) { Trigger trigger = TriggerBuilder.newTrigger() .forJob(jobKey) .startNow() .build(); scheduler.scheduleJob(trigger); return trigger.getKey().toString(); } else { throw new GriffinException.NotFoundException(JOB_ID_DOES_NOT_EXIST); } } JobServiceImpl(); @Override List<AbstractJob> getAliveJobs(String type); @Override AbstractJob addJob(AbstractJob job); @Override AbstractJob getJobConfig(Long jobId); @Override AbstractJob onAction(Long jobId, String action); @Override void deleteJob(Long jobId); @Override void deleteJob(String name); @Override List<JobInstanceBean> findInstancesOfJob( Long jobId, int page, int size); @Override JobInstanceBean findInstance(Long id); @Override List<JobInstanceBean> findInstancesByTriggerKey(String triggerKey); @Override JobHealth getHealthInfo(); @Scheduled(fixedDelayString = "${jobInstance.expired.milliseconds}") void deleteExpiredJobInstance(); void deleteJobsRelateToMeasure(Long measureId); @Scheduled(fixedDelayString = "${jobInstance.fixedDelay.in.milliseconds}") void syncInstancesOfAllJobs(); Boolean isJobHealthy(Long jobId); @Override String getJobHdfsSinksPath(String jobName, long timestamp); @Override String triggerJobById(Long id); static final String GRIFFIN_JOB_ID; }
@Test public void testTriggerJobById() throws SchedulerException { Long jobId = 1L; AbstractJob job = createGriffinJob(); given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(job); Scheduler scheduler = mock(Scheduler.class); given(scheduler.checkExists(any(JobKey.class))).willReturn(true); ListenerManager listenerManager = mock(ListenerManager.class); given(scheduler.getListenerManager()).willReturn(listenerManager); given(factory.getScheduler()).willReturn(scheduler); JobInstanceBean jobInstanceBean = createJobInstance(); given(instanceRepo.findByTriggerKey(anyString())).willReturn(Collections.singletonList(jobInstanceBean)); String result = jobService.triggerJobById(jobId); assertTrue(result.matches("DEFAULT\\.[0-9a-f\\-]{49}")); verify(scheduler, times(1)).scheduleJob(any()); } @Test(expected = GriffinException.NotFoundException.class) public void testTriggerJobByIdFail() throws SchedulerException { Long jobId = 1L; given(jobRepo.findByIdAndDeleted(jobId,false)).willReturn(null); jobService.triggerJobById(jobId); }
FileExistPredicator implements Predicator { @Override public boolean predicate() throws IOException { Map<String, Object> config = predicate.getConfigMap(); String[] paths = null; String rootPath = null; if (config != null && !StringUtils.isEmpty((String) config.get(PREDICT_PATH))) { paths = ((String) config.get(PREDICT_PATH)) .split(PATH_CONNECTOR_CHARACTER); rootPath = (String) config.get(PREDICT_ROOT_PATH); } if (ArrayUtils.isEmpty(paths) || StringUtils.isEmpty(rootPath)) { LOGGER.error("Predicate path is null.Please check predicates " + "config root.path and path."); throw new NullPointerException(); } for (String path : paths) { String hdfsPath = rootPath + path; LOGGER.info("Predicate path: {}", hdfsPath); if (!FSUtil.isFileExist(hdfsPath)) { LOGGER.info("Predicate path: " + hdfsPath + " doesn't exist."); return false; } LOGGER.info("Predicate path: " + hdfsPath + " exists."); } return true; } FileExistPredicator(SegmentPredicate predicate); @Override boolean predicate(); }
@Test(expected = NullPointerException.class) public void test_predicate_null() throws IOException { SegmentPredicate predicate = new SegmentPredicate(); predicate.setConfig("test config"); Map<String, Object> configMap = new HashMap<>(); predicate.setConfigMap(configMap); FileExistPredicator predicator = new FileExistPredicator(predicate); assertTrue(predicator.predicate()); } @Test public void test_predicate() throws IOException { SegmentPredicate predicate = new SegmentPredicate(); predicate.setConfig("test config"); Map<String, Object> configMap = new HashMap<>(); configMap.put("path", fileName); configMap.put("root.path", rootPath); predicate.setConfigMap(configMap); FileExistPredicator predicator = new FileExistPredicator(predicate); assertTrue(predicator.predicate()); configMap.put("path", "fileName"); predicate.setConfigMap(configMap); assertFalse(predicator.predicate()); }
SparkSubmitJob implements Job { @Override public void execute(JobExecutionContext context) { JobDetail jd = context.getJobDetail(); try { if (isNeedLivyQueue) { livyTaskSubmitHelper.addTaskToWaitingQueue(jd); } else { saveJobInstance(jd); } } catch (Exception e) { LOGGER.error("Post spark task ERROR.", e); } } @Override void execute(JobExecutionContext context); }
@Test public void testExecuteWithPredicateTriggerGreaterThanRepeat() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); JobInstanceBean instance = createJobInstance(); GriffinMeasure measure = createGriffinMeasure("measureName"); SegmentPredicate predicate = createFileExistPredicate(); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson (Collections.singletonList(predicate))); given(context.getJobDetail()).willReturn(jd); given(context.getTrigger()).willReturn(createSimpleTrigger(4, 5)); given(jobInstanceRepo.findByPredicateName(Matchers.anyString())) .willReturn(instance); sparkSubmitJob.execute(context); verify(context, times(1)).getJobDetail(); verify(jobInstanceRepo, times(1)).findByPredicateName( Matchers.anyString()); } @Test public void testExecuteWithPredicateTriggerLessThanRepeat() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); JobInstanceBean instance = createJobInstance(); GriffinMeasure measure = createGriffinMeasure("measureName"); SegmentPredicate predicate = createFileExistPredicate(); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson (Collections.singletonList(predicate))); given(context.getJobDetail()).willReturn(jd); given(context.getTrigger()).willReturn(createSimpleTrigger(4, 4)); given(jobInstanceRepo.findByPredicateName(Matchers.anyString())) .willReturn(instance); sparkSubmitJob.execute(context); verify(context, times(1)).getJobDetail(); verify(jobInstanceRepo, times(1)).findByPredicateName( Matchers.anyString()); } @Test public void testExecuteWithNoPredicateSuccess() throws Exception { String result = "{\"id\":1,\"state\":\"starting\",\"appId\":null," + "\"appInfo\":{\"driverLogUrl\":null," + "\"sparkUiUrl\":null},\"log\":[]}"; JobExecutionContext context = mock(JobExecutionContext.class); JobInstanceBean instance = createJobInstance(); GriffinMeasure measure = createGriffinMeasure("measureName"); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); given(context.getJobDetail()).willReturn(jd); given(jobInstanceRepo.findByPredicateName(Matchers.anyString())) .willReturn(instance); sparkSubmitJob.execute(context); verify(context, times(1)).getJobDetail(); verify(jobInstanceRepo, times(1)).findByPredicateName( Matchers.anyString()); } @Test public void testExecuteWithPost2LivyException() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); JobInstanceBean instance = createJobInstance(); GriffinMeasure measure = createGriffinMeasure("measureName"); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); given(context.getJobDetail()).willReturn(jd); given(jobInstanceRepo.findByPredicateName(Matchers.anyString())) .willReturn(instance); sparkSubmitJob.execute(context); verify(context, times(1)).getJobDetail(); verify(jobInstanceRepo, times(1)).findByPredicateName( Matchers.anyString()); } @Test public void testExecuteWithNullException() { JobExecutionContext context = mock(JobExecutionContext.class); sparkSubmitJob.execute(context); } @Test public void testMultiplePredicatesWhichReturnsTrue() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); JobInstanceBean instance = createJobInstance(); GriffinMeasure measure = createGriffinMeasure("measureName"); SegmentPredicate predicate = createMockPredicate(); SegmentPredicate secondPredicate = createMockPredicate(); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), JsonUtil.toJson (Arrays.asList(predicate, secondPredicate))); given(context.getJobDetail()).willReturn(jd); given(context.getTrigger()).willReturn(createSimpleTrigger(4, 5)); given(jobInstanceRepo.findByPredicateName(Matchers.anyString())) .willReturn(instance); sparkSubmitJob.execute(context); verify(context, times(1)).getJobDetail(); verify(jobInstanceRepo, times(1)).findByPredicateName( Matchers.anyString()); verify(jobInstanceRepo, times(1)).save(instance); }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public Iterable<String> getSubjects() { String path = "/subjects"; String regUrl = registryUrl(path); ResponseEntity<String[]> res = restTemplate.getForEntity(regUrl, String[].class); Iterable<String> result = Arrays.asList(res.getBody()); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetSubjects() { try { ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/subjects", String[].class)).thenReturn(entity); when(entity.getBody()).thenReturn(new String[]{"aaa", "bbb"}); service.getSubjects(); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
JobInstance implements Job { @Override @Transactional public void execute(JobExecutionContext context) { try { initParam(context); setSourcesPartitionsAndPredicates(measure.getDataSources()); createJobInstance(job.getConfigMap()); } catch (Exception e) { LOGGER.error("Create predicate job failure.", e); } } @Override @Transactional void execute(JobExecutionContext context); static final String MEASURE_KEY; static final String PREDICATES_KEY; static final String PREDICATE_JOB_NAME; static final String INTERVAL; static final String REPEAT; static final String CHECK_DONEFILE_SCHEDULE; }
@Test @SuppressWarnings("unchecked") public void testExecute() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); Scheduler scheduler = mock(Scheduler.class); GriffinMeasure measure = createGriffinMeasure("measureName"); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false); job.setConfigMap(new HashMap<>()); List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0)); given(context.getJobDetail()).willReturn(jd); given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure); given(repo.findOne(Matchers.anyLong())).willReturn(job); given(factory.getScheduler()).willReturn(scheduler); given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any( JobKey.class))).willReturn(triggers); given(scheduler.checkExists(Matchers.any(TriggerKey.class))) .willReturn(false); given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job); given(scheduler.checkExists(Matchers.any(JobKey.class))) .willReturn(false); Trigger trigger = mock(Trigger.class); given(context.getTrigger()).willReturn(trigger); given(trigger.getKey()).willReturn(new TriggerKey("test")); jobInstance.execute(context); verify(measureRepo, times(1)).findOne(Matchers.anyLong()); verify(factory, times(4)).getScheduler(); verify(scheduler, times(1)).getTriggersOfJob(Matchers.any( JobKey.class)); } @SuppressWarnings("unchecked") @Test public void testExecuteWithRangeLessThanZero() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); Scheduler scheduler = mock(Scheduler.class); GriffinMeasure measure = createGriffinMeasure("measureName"); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false); List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0)); given(context.getJobDetail()).willReturn(jd); given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure); given(jobRepo.findOne(Matchers.anyLong())).willReturn(job); given(factory.getScheduler()).willReturn(scheduler); given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any( JobKey.class))).willReturn(triggers); given(scheduler.checkExists(Matchers.any(TriggerKey.class))) .willReturn(false); given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job); given(scheduler.checkExists(Matchers.any(JobKey.class))) .willReturn(false); jobInstance.execute(context); } @SuppressWarnings("unchecked") @Test public void testExecuteWithRangeGreaterThanDataUnit() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); Scheduler scheduler = mock(Scheduler.class); GriffinMeasure measure = createGriffinMeasure("measureName"); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false); List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0)); given(context.getJobDetail()).willReturn(jd); given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure); given(jobRepo.findOne(Matchers.anyLong())).willReturn(job); given(factory.getScheduler()).willReturn(scheduler); given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any( JobKey.class))).willReturn(triggers); given(scheduler.checkExists(Matchers.any(TriggerKey.class))) .willReturn(false); given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job); given(scheduler.checkExists(Matchers.any(JobKey.class))) .willReturn(false); jobInstance.execute(context); verify(context, times(1)).getJobDetail(); } @SuppressWarnings("unchecked") @Test public void testExecuteWithPredicate() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); Scheduler scheduler = mock(Scheduler.class); GriffinMeasure measure = createGriffinMeasure("measureName", createFileExistPredicate(), createFileExistPredicate()); JobDetail jd = createJobDetail(JsonUtil.toJson(measure), ""); BatchJob job = new BatchJob(1L, "jobName", "qName", "qGroup", false); List<Trigger> triggers = Arrays.asList(createSimpleTrigger(2, 0)); given(context.getJobDetail()).willReturn(jd); given(measureRepo.findOne(Matchers.anyLong())).willReturn(measure); given(jobRepo.findOne(Matchers.anyLong())).willReturn(job); given(factory.getScheduler()).willReturn(scheduler); given((List<Trigger>) scheduler.getTriggersOfJob(Matchers.any( JobKey.class))).willReturn(triggers); given(scheduler.checkExists(Matchers.any(TriggerKey.class))) .willReturn(false); given(jobRepo.save(Matchers.any(BatchJob.class))).willReturn(job); given(scheduler.checkExists(Matchers.any(JobKey.class))) .willReturn(false); jobInstance.execute(context); verify(context, times(1)).getJobDetail(); } @Test public void testExecuteWithNullException() throws Exception { JobExecutionContext context = mock(JobExecutionContext.class); jobInstance.execute(context); assertTrue(true); }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public Iterable<Integer> getSubjectVersions(String subject) { String path = "/subjects/" + subject + "/versions"; String regUrl = registryUrl(path); ResponseEntity<Integer[]> res = restTemplate.getForEntity(regUrl, Integer[].class); Iterable<Integer> result = Arrays.asList(res.getBody()); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetSubjectVersions() { try { ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/subjects/sub/versions", Integer[].class)).thenReturn(entity); when(entity.getBody()).thenReturn(new Integer[]{1, 2}); service.getSubjectVersions("sub"); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public Schema getSubjectSchema(String subject, String version) { String path = "/subjects/" + subject + "/versions/" + version; String regUrl = registryUrl(path); ResponseEntity<Schema> res = restTemplate.getForEntity(regUrl, Schema.class); Schema result = res.getBody(); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetSubjectSchema() { try { Schema schema = mock(Schema.class); ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/subjects/sub/versions/ver", Schema.class)).thenReturn(entity); when(entity.getBody()).thenReturn(schema); service.getSubjectSchema("sub", "ver"); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public Config getTopLevelConfig() { String path = "/config"; String regUrl = registryUrl(path); ResponseEntity<Config> res = restTemplate.getForEntity(regUrl, Config.class); Config result = res.getBody(); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetTopLevelConfig() { try { Config config = mock(Config.class); ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/config", Config.class)).thenReturn(entity); when(entity.getBody()).thenReturn(config); service.getTopLevelConfig(); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
KafkaSchemaServiceImpl implements KafkaSchemaService { @Override public Config getSubjectLevelConfig(String subject) { String path = "/config/" + subject; String regUrl = registryUrl(path); ResponseEntity<Config> res = restTemplate.getForEntity(regUrl, Config.class); Config result = res.getBody(); return result; } @Override SchemaString getSchemaString(Integer id); @Override Iterable<String> getSubjects(); @Override Iterable<Integer> getSubjectVersions(String subject); @Override Schema getSubjectSchema(String subject, String version); @Override Config getTopLevelConfig(); @Override Config getSubjectLevelConfig(String subject); }
@Test public void testGetSubjectLevelConfig() { try { Config config = mock(Config.class); ResponseEntity entity = mock(ResponseEntity.class); when(service.restTemplate.getForEntity( "${kafka.schema.registry.url}/config/subject", Config.class)).thenReturn(entity); when(entity.getBody()).thenReturn(config); service.getSubjectLevelConfig("subject"); assertTrue(true); } catch (Throwable t) { fail("Cannot get all tables from all dbs"); } }
KafkaSchemaController { @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) public SchemaString getSchemaString(@PathVariable("id") Integer id) { return kafkaSchemaService.getSchemaString(id); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getSubjects() throws Exception { int id = 1; SchemaString ss = new SchemaString(); when(kafkaSchemaService.getSchemaString(id)).thenReturn(ss); mockMvc.perform(get(API_PATH + "/schema/{id}", id)) .andExpect(status().isOk()); verify(kafkaSchemaService).getSchemaString(id); }
KafkaSchemaController { @RequestMapping(value = "/subject", method = RequestMethod.GET) public Iterable<String> getSubjects() { return kafkaSchemaService.getSubjects(); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getSchemaString() throws Exception { when(kafkaSchemaService.getSubjects()).thenReturn(null); mockMvc.perform(get(API_PATH + "/subject")) .andExpect(status().isOk()); verify(kafkaSchemaService).getSubjects(); }
HiveMetaStoreServiceImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Iterable<String> getAllTableNames(String dbName) { Iterable<String> results = null; try { if (client == null) { LOGGER.warn("Hive client is null. " + "Please check your hive config."); return new ArrayList<>(); } results = client.getAllTables(getUseDbName(dbName)); } catch (Exception e) { reconnect(); LOGGER.error("Exception fetching tables info: {}", e); return null; } return results; } HiveMetaStoreServiceImpl(); void setClient(IMetaStoreClient client); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") List<Table> getAllTable(String db); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") Map<String, List<String>> getAllTableNames(); @Override @Cacheable(unless = "#result==null") Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "hive", allEntries = true, beforeInvocation = true) void evictHiveCache(); }
@Test public void testGetAllTableNamesForNormalRun() throws MetaException { String dbName = "default"; given(client.getAllTables(dbName)).willReturn(Arrays.asList(dbName)); assertEquals(service.getAllTableNames(dbName).iterator().hasNext(), true); }
KafkaSchemaController { @RequestMapping(value = "/versions", method = RequestMethod.GET) public Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject) { return kafkaSchemaService.getSubjectVersions(subject); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getSubjectVersions() throws Exception { String subject = "s-1"; when(kafkaSchemaService.getSubjectVersions(subject)).thenReturn(Arrays .asList(1, 2, 3)); mockMvc.perform(get(API_PATH + "/versions") .param("subject", subject)) .andExpect(status().isOk()); verify(kafkaSchemaService).getSubjectVersions(subject); }
KafkaSchemaController { @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) public Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version) { return kafkaSchemaService.getSubjectSchema(subject, version); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getSubjectSchema() throws Exception { String subject = "ss.s"; String version = "ss"; when(kafkaSchemaService.getSubjectSchema(subject, version)) .thenReturn(null); mockMvc.perform(get(API_PATH + "/subjectSchema", subject, version) .param("subject", subject) .param("version", version)) .andExpect(status().isOk()); verify(kafkaSchemaService).getSubjectSchema(subject, version); }
KafkaSchemaController { @RequestMapping(value = "/config", method = RequestMethod.GET) public Config getTopLevelConfig() { return kafkaSchemaService.getTopLevelConfig(); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getTopLevelConfig() throws Exception { when(kafkaSchemaService.getTopLevelConfig()).thenReturn(null); mockMvc.perform(get(API_PATH + "/config")) .andExpect(status().isOk()); verify(kafkaSchemaService).getTopLevelConfig(); }
KafkaSchemaController { @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) public Config getSubjectLevelConfig(@PathVariable("subject") String subject) { return kafkaSchemaService.getSubjectLevelConfig(subject); } @RequestMapping(value = "/schema/{id}", method = RequestMethod.GET) SchemaString getSchemaString(@PathVariable("id") Integer id); @RequestMapping(value = "/subject", method = RequestMethod.GET) Iterable<String> getSubjects(); @RequestMapping(value = "/versions", method = RequestMethod.GET) Iterable<Integer> getSubjectVersions( @RequestParam("subject") String subject); @RequestMapping(value = "/subjectSchema", method = RequestMethod.GET) Schema getSubjectSchema(@RequestParam("subject") String subject, @RequestParam("version") String version); @RequestMapping(value = "/config", method = RequestMethod.GET) Config getTopLevelConfig(); @RequestMapping(value = "/config/{subject}", method = RequestMethod.GET) Config getSubjectLevelConfig(@PathVariable("subject") String subject); }
@Test public void test_getSubjectLevelConfig() throws Exception { String subject = "sss"; when(kafkaSchemaService.getSubjectLevelConfig(subject)) .thenReturn(null); mockMvc.perform(get(API_PATH + "/config/{subject}", subject)) .andExpect(status().isOk()); verify(kafkaSchemaService).getSubjectLevelConfig(subject); }
PropertiesUtil { public static Properties getProperties(String path, Resource resource) { PropertiesFactoryBean propFactoryBean = new PropertiesFactoryBean(); Properties properties = null; try { propFactoryBean.setLocation(resource); propFactoryBean.afterPropertiesSet(); properties = propFactoryBean.getObject(); LOGGER.info("Read properties successfully from {}.", path); } catch (IOException e) { LOGGER.error("Get properties from {} failed. {}", path, e); } return properties; } static Properties getProperties(String path, Resource resource); static Properties getConf(String name, String defaultPath, String location); static String getConfPath(String name, String location); }
@Test public void testGetPropertiesForSuccess() { String path = "/quartz.properties"; Properties properties = PropertiesUtil.getProperties(path, new ClassPathResource(path)); assertEquals(properties .get("org.quartz.jobStore.isClustered"), "true"); } @Test public void testGetPropertiesForFailureWithWrongPath() { String path = ".././quartz.properties"; Properties properties = PropertiesUtil.getProperties(path, new ClassPathResource(path)); assertEquals(properties, null); }
PropertiesUtil { public static Properties getConf(String name, String defaultPath, String location) throws FileNotFoundException { String path = getConfPath(name, location); Resource resource; if (path == null) { resource = new ClassPathResource(defaultPath); path = defaultPath; } else { resource = new InputStreamResource(new FileInputStream(path)); } return getProperties(path, resource); } static Properties getProperties(String path, Resource resource); static Properties getConf(String name, String defaultPath, String location); static String getConfPath(String name, String location); }
@Test public void testGetConfWithLocation() throws FileNotFoundException { String name = "sparkJob.properties"; String defaultPath = "/" + name; String location = "src/test/resources"; Properties properties = getConf(name, defaultPath, location); assertNotNull(properties); } @Test public void testGetConfWithLocationEmpty() throws FileNotFoundException { String name = "sparkJob.properties"; String defaultPath = "/" + name; String location = "src/main"; Properties properties = getConf(name, defaultPath, location); assertNotNull(properties); } @Test public void testGetConfWithNoLocation() throws FileNotFoundException { String name = "sparkJob.properties"; String defaultPath = "/" + name; Properties properties = getConf(name, defaultPath, null); assertNotNull(properties); }
JsonUtil { public static String toJson(Object obj) throws JsonProcessingException { if (obj == null) { LOGGER.warn("Object cannot be empty!"); return null; } ObjectMapper mapper = new ObjectMapper(); return mapper.writeValueAsString(obj); } static String toJson(Object obj); static String toJsonWithFormat(Object obj); static T toEntity(String jsonStr, Class<T> type); static T toEntity(File file, TypeReference type); static T toEntity(InputStream in, TypeReference type); static T toEntity(String jsonStr, TypeReference type); }
@Test public void testToJson() throws JsonProcessingException { JobHealth jobHealth = new JobHealth(5, 10); String jobHealthStr = JsonUtil.toJson(jobHealth); assertEquals(jobHealthStr, JOB_HEALTH_JSON); }
HiveMetaStoreServiceImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Table getTable(String dbName, String tableName) { Table result = null; try { if (client == null) { LOGGER.warn("Hive client is null. " + "Please check your hive config."); return null; } result = client.getTable(getUseDbName(dbName), tableName); } catch (Exception e) { reconnect(); LOGGER.error("Exception fetching table info : {}. {}", tableName, e); } return result; } HiveMetaStoreServiceImpl(); void setClient(IMetaStoreClient client); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") List<Table> getAllTable(String db); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") Map<String, List<String>> getAllTableNames(); @Override @Cacheable(unless = "#result==null") Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "hive", allEntries = true, beforeInvocation = true) void evictHiveCache(); }
@Test public void testGetTableForNormalRun() throws Exception { String dbName = "default"; String tableName = "tableName"; given(client.getTable(dbName, tableName)).willReturn(new Table()); assertTrue(service.getTable(dbName, tableName) != null); }
JsonUtil { public static String toJsonWithFormat(Object obj) throws JsonProcessingException { if (obj == null) { LOGGER.warn("Object to be formatted cannot be empty!"); return null; } ObjectWriter mapper = new ObjectMapper().writer() .withDefaultPrettyPrinter(); return mapper.writeValueAsString(obj); } static String toJson(Object obj); static String toJsonWithFormat(Object obj); static T toEntity(String jsonStr, Class<T> type); static T toEntity(File file, TypeReference type); static T toEntity(InputStream in, TypeReference type); static T toEntity(String jsonStr, TypeReference type); }
@Test public void testToJsonWithFormat() throws JsonProcessingException { JobHealth jobHealth = new JobHealth(5, 10); String jobHealthStr = JsonUtil.toJsonWithFormat(jobHealth); assertNotEquals(jobHealthStr, JOB_HEALTH_JSON); }
JsonUtil { public static <T> T toEntity(String jsonStr, Class<T> type) throws IOException { if (StringUtils.isEmpty(jsonStr)) { LOGGER.warn("Json string {} is empty!", type); return null; } ObjectMapper mapper = new ObjectMapper(); mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); return mapper.readValue(jsonStr, type); } static String toJson(Object obj); static String toJsonWithFormat(Object obj); static T toEntity(String jsonStr, Class<T> type); static T toEntity(File file, TypeReference type); static T toEntity(InputStream in, TypeReference type); static T toEntity(String jsonStr, TypeReference type); }
@Test public void testToEntityWithParamClass() throws IOException { JobHealth jobHealth = JsonUtil.toEntity(JOB_HEALTH_JSON, JobHealth.class); assertEquals(jobHealth.getJobCount(), 10); assertEquals(jobHealth.getHealthyJobCount(), 5); } @Test public void testToEntityWithNullParamClass() throws IOException { String str = null; JobHealth jobHealth = JsonUtil.toEntity(str, JobHealth.class); assertNull(jobHealth); } @Test public void testToEntityWithParamTypeReference() throws IOException { TypeReference<HashMap<String, Integer>> type = new TypeReference<HashMap<String, Integer>>() { }; Map map = JsonUtil.toEntity(JOB_HEALTH_JSON, type); assertEquals(map.get("jobCount"), 10); } @Test public void testToEntityWithNullParamTypeReference() throws IOException { String str = null; TypeReference<HashMap<String, Integer>> type = new TypeReference<HashMap<String, Integer>>() { }; Map map = JsonUtil.toEntity(str, type); assertNull(map); }
TimeUtil { public static Long str2Long(String timeStr) { if (timeStr == null) { LOGGER.warn("Time string can not be empty."); return 0L; } String trimTimeStr = timeStr.trim(); boolean positive = true; if (trimTimeStr.startsWith("-")) { trimTimeStr = trimTimeStr.substring(1); positive = false; } List<TimeUnitPair> list = getTimeUnitPairs(trimTimeStr); return str2Long(positive, list); } static Long str2Long(String timeStr); static String format(String timeFormat, long time, TimeZone timeZone); static TimeZone getTimeZone(String timezone); }
@Test public void testStr2LongWithPositive() { String time = "2hr3m4s"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "7384000"); } @Test public void testStr2LongWithNegative() { String time = "-2hr3min4s"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "-7384000"); } @Test public void testStr2LongWithNull() { String time = null; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "0"); } @Test public void testStr2LongWithDay() { String time = "1DAY"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "86400000"); } @Test public void testStr2LongWithHour() { String time = "1h"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "3600000"); } @Test public void testStr2LongWithMinute() { String time = "1m"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "60000"); } @Test public void testStr2LongWithSecond() { String time = "1s"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "1000"); } @Test public void testStr2LongWithMillisecond() { String time = "1ms"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "1"); } @Test public void testStr2LongWithIllegalFormat() { String time = "1y2m3s"; assertEquals(String.valueOf(TimeUtil.str2Long(time)), "123000"); }
HiveMetaStoreServiceImpl implements HiveMetaStoreService { @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "hive", allEntries = true, beforeInvocation = true) public void evictHiveCache() { LOGGER.info("Evict hive cache"); } HiveMetaStoreServiceImpl(); void setClient(IMetaStoreClient client); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") List<Table> getAllTable(String db); @Override @Cacheable(unless = "#result==null || #result.isEmpty()") Map<String, List<String>> getAllTableNames(); @Override @Cacheable(unless = "#result==null") Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "hive", allEntries = true, beforeInvocation = true) void evictHiveCache(); }
@Test public void testEvictHiveCache() throws Exception { String useDbName = "default"; String tableName = "tableName"; List<String> databases = Arrays.asList(useDbName); given(client.getAllDatabases()).willReturn(databases); given(client.getAllTables(databases.get(0))).willReturn(Arrays .asList(tableName)); given(client.getTable(useDbName, tableName)).willReturn(new Table()); assertEquals(service.getAllTable().size(), 1); verify(client).getAllDatabases(); verify(client).getAllTables(useDbName); verify(client).getTable(useDbName, tableName); service.getAllTable(); verifyNoMoreInteractions(client); service.evictHiveCache(); service.getAllTable().size(); service.getAllTable().size(); verify(client, times(2)).getAllDatabases(); verify(client, times(2)).getAllTables(useDbName); verify(client, times(2)).getTable(useDbName, tableName); }
TimeUtil { public static String format(String timeFormat, long time, TimeZone timeZone) { String timePattern = "#(?:\\\\#|[^#])*#"; Date t = new Date(time); Pattern ptn = Pattern.compile(timePattern); Matcher matcher = ptn.matcher(timeFormat); StringBuffer sb = new StringBuffer(); while (matcher.find()) { String group = matcher.group(); String content = group.substring(1, group.length() - 1); String pattern = refreshEscapeHashTag(content); SimpleDateFormat sdf = new SimpleDateFormat(pattern); sdf.setTimeZone(timeZone); matcher.appendReplacement(sb, sdf.format(t)); } matcher.appendTail(sb); return refreshEscapeHashTag(sb.toString()); } static Long str2Long(String timeStr); static String format(String timeFormat, long time, TimeZone timeZone); static TimeZone getTimeZone(String timezone); }
@Test public void testFormat() { String format = "dt=#YYYYMMdd#"; Long time = 1516186620155L; String timeZone = "GMT+8:00"; assertEquals(TimeUtil.format(format, time, TimeZone .getTimeZone(timeZone)), "dt=20180117"); }
TimeUtil { public static TimeZone getTimeZone(String timezone) { if (StringUtils.isEmpty(timezone)) { return TimeZone.getDefault(); } return TimeZone.getTimeZone(timezone); } static Long str2Long(String timeStr); static String format(String timeFormat, long time, TimeZone timeZone); static TimeZone getTimeZone(String timezone); }
@Test public void testGetTimeZone() { Map<String, String> tests = new HashMap<>(); tests.put("", TimeZone.getDefault().getID()); tests.put("GMT", "GMT"); tests.put("GMT+1", "GMT+01:00"); tests.put("GMT+1:00", "GMT+01:00"); tests.put("GMT+01:00", "GMT+01:00"); tests.put("GMT-1", "GMT-01:00"); tests.put("GMT-1:00", "GMT-01:00"); tests.put("GMT-01:00", "GMT-01:00"); tests.put("GMT1", "GMT"); tests.put("GMT1:00", "GMT"); tests.put("GMT01:00", "GMT"); tests.put("UTC1", "GMT"); tests.put("UTC1:00", "GMT"); tests.put("UTC01:00", "GMT"); tests.put("UTC-1", "GMT"); tests.put("UTC-1:00", "GMT"); tests.put("UTC-01:00", "GMT"); tests.put("CST", "CST"); tests.put("CDT", "GMT"); tests.put("America/Los_Angeles", "America/Los_Angeles"); tests.forEach((input, expected) -> { String actual = TimeUtil.getTimeZone(input).getID(); assertEquals(String.format("For input: %s", input), expected, actual); }); }
GriffinInfoController { @RequestMapping(value = "/version", method = RequestMethod.GET) public String greeting() { return "0.5.0"; } @RequestMapping(value = "/version", method = RequestMethod.GET) String greeting(); }
@Test public void testGreeting() throws Exception { mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/version")) .andExpect(status().isOk()) .andExpect(jsonPath("$", is("0.5.0"))); }
PropertiesConfig { @Bean(name = "quartzConf") public Properties quartzConf() throws FileNotFoundException { String name = "quartz.properties"; String defaultPath = "/" + name; return getConf(name, defaultPath, configLocation); } PropertiesConfig( @Value("${external.config.location}") String configLocation, @Value("${external.env.location}") String envLocation); @PostConstruct void init(); @Bean(name = "quartzConf") Properties quartzConf(); static Map<String, Object> livyConfMap; }
@Test public void quartzConfWithLocationNotNull() throws Exception { Properties conf = quartzConf.quartzConf(); assertEquals(conf.get("org.quartz.scheduler.instanceName"), "spring-boot-quartz-test"); } @Test public void quartzConfWithLocationNull() throws Exception { Properties conf = noQuartzConf.quartzConf(); assertEquals(conf.get("org.quartz.scheduler.instanceName"), "spring-boot-quartz-test"); }
MetricController { @RequestMapping(value = "/metrics", method = RequestMethod.GET) public Map<String, List<Metric>> getAllMetrics() { return metricService.getAllMetrics(); } @RequestMapping(value = "/metrics", method = RequestMethod.GET) Map<String, List<Metric>> getAllMetrics(); @RequestMapping(value = "/metrics/values", method = RequestMethod.GET) List<MetricValue> getMetricValues(@RequestParam("metricName") String metricName, @RequestParam("size") int size, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "tmst", defaultValue = "0") long tmst); @RequestMapping(value = "/metrics/values", method = RequestMethod.POST) ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue> values); @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) ResponseEntity<?> deleteMetricValues(@RequestParam("metricName") String metricName); @RequestMapping(value = "/metrics/values/{instanceId}", method = RequestMethod.GET) MetricValue getMetric(@PathVariable("instanceId") Long id); }
@Test public void testGetAllMetricsSuccess() throws Exception { Metric metric = new Metric("metricName", ACCURACY, "owner", Collections .emptyList()); given(service.getAllMetrics()).willReturn( Collections.singletonMap("measureName", Collections .singletonList(metric))); mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics")) .andExpect(status().isOk()) .andExpect(jsonPath("$.measureName", hasSize(1))); } @Test public void testGetAllMetricsFailureWithException() throws Exception { given(service.getAllMetrics()) .willThrow(new GriffinException.ServiceException( "Failed to get metrics", new RuntimeException())); mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics")) .andExpect(status().isInternalServerError()); }
HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService { public String getComment(String colStr) { String pattern = "'([^\"|^\']|\"|\')*'"; Matcher m = Pattern.compile(pattern).matcher(colStr.toLowerCase()); if (m.find()) { String text = m.group(); String result = text.substring(1, text.length() - 1); if (!result.isEmpty()) { LOGGER.info("Found value: " + result); } return result; } else { LOGGER.info("NO MATCH"); return ""; } } void setConn(Connection conn); void setHiveClassName(String hiveClassName); void setNeedKerberos(String needKerberos); void setKeytabUser(String keytabUser); void setKeytabPath(String keytabPath); @PostConstruct void init(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null") Map<String, List<String>> getAllTableNames(); @Override List<Table> getAllTable(String db); @Override Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "jdbcHive", allEntries = true, beforeInvocation = true) void evictHiveCache(); String getLocation(String tableMetadata); List<FieldSchema> getColums(String tableMetadata); String getComment(String colStr); }
@Test public void testGetComment() { String colStr = "`session_date` string COMMENT 'this is session date'"; String comment = serviceJdbc.getComment(colStr); assert (comment.equals("this is session date")); colStr = "`session_date` string COMMENT ''"; comment = serviceJdbc.getComment(colStr); Assert.assertTrue(comment.isEmpty()); }
MetricController { @RequestMapping(value = "/metrics/values", method = RequestMethod.GET) public List<MetricValue> getMetricValues(@RequestParam("metricName") String metricName, @RequestParam("size") int size, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "tmst", defaultValue = "0") long tmst) { return metricService.getMetricValues(metricName, offset, size, tmst); } @RequestMapping(value = "/metrics", method = RequestMethod.GET) Map<String, List<Metric>> getAllMetrics(); @RequestMapping(value = "/metrics/values", method = RequestMethod.GET) List<MetricValue> getMetricValues(@RequestParam("metricName") String metricName, @RequestParam("size") int size, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "tmst", defaultValue = "0") long tmst); @RequestMapping(value = "/metrics/values", method = RequestMethod.POST) ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue> values); @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) ResponseEntity<?> deleteMetricValues(@RequestParam("metricName") String metricName); @RequestMapping(value = "/metrics/values/{instanceId}", method = RequestMethod.GET) MetricValue getMetric(@PathVariable("instanceId") Long id); }
@Test public void testGetMetricValuesSuccess() throws Exception { MetricValue value = new MetricValue("jobName", 1L, new HashMap<>()); given(service.getMetricValues(Matchers.anyString(), Matchers.anyInt(), Matchers.anyInt(), Matchers.anyLong())) .willReturn(Collections.singletonList(value)); mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics/values") .param("metricName", "jobName") .param("size", "5")) .andExpect(jsonPath("$.[0].name", is("jobName"))); } @Test public void testGetMetricValuesFailureWithException() throws Exception { given(service.getMetricValues(Matchers.anyString(), Matchers.anyInt(), Matchers.anyInt(), Matchers.anyLong())) .willThrow(new GriffinException.ServiceException( "Failed to get metric values", new IOException())); mvc.perform(get(URLHelper.API_VERSION_PATH + "/metrics/values") .param("metricName", "jobName") .param("size", "5")) .andExpect(status().isInternalServerError()); }
MetricController { @RequestMapping(value = "/metrics/values", method = RequestMethod.POST) public ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue> values) { return metricService.addMetricValues(values); } @RequestMapping(value = "/metrics", method = RequestMethod.GET) Map<String, List<Metric>> getAllMetrics(); @RequestMapping(value = "/metrics/values", method = RequestMethod.GET) List<MetricValue> getMetricValues(@RequestParam("metricName") String metricName, @RequestParam("size") int size, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "tmst", defaultValue = "0") long tmst); @RequestMapping(value = "/metrics/values", method = RequestMethod.POST) ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue> values); @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) ResponseEntity<?> deleteMetricValues(@RequestParam("metricName") String metricName); @RequestMapping(value = "/metrics/values/{instanceId}", method = RequestMethod.GET) MetricValue getMetric(@PathVariable("instanceId") Long id); }
@Test public void testAddMetricValuesSuccess() throws Exception { List<MetricValue> values = Collections.singletonList(new MetricValue()); given(service.addMetricValues(Matchers.any())) .willReturn( new ResponseEntity<>( "{\"errors\": false, \"items\": []}", HttpStatus.OK)); mvc.perform( post(URLHelper.API_VERSION_PATH + "/metrics/values") .contentType(MediaType.APPLICATION_JSON) .content(JsonUtil.toJson(values))) .andExpect(status().isOk()) .andExpect(jsonPath("$.errors", is(false))); } @Test public void testAddMetricValuesFailureWithException() throws Exception { List<MetricValue> values = Collections.singletonList(new MetricValue()); given(service.addMetricValues(Matchers.any())) .willThrow(new GriffinException.ServiceException( "Failed to add metric values", new IOException())); mvc.perform(post(URLHelper.API_VERSION_PATH + "/metrics/values") .contentType(MediaType.APPLICATION_JSON) .content(JsonUtil.toJson(values))) .andExpect(status().isInternalServerError()); } @Test public void testAddMetricValuesFailureWithInvalidFormat() throws Exception { List<MetricValue> values = Collections.singletonList(new MetricValue()); given(service.addMetricValues(Matchers.any())) .willThrow(new GriffinException.BadRequestException (INVALID_METRIC_VALUE_FORMAT)); mvc.perform(post(URLHelper.API_VERSION_PATH + "/metrics/values") .contentType(MediaType.APPLICATION_JSON) .content(JsonUtil.toJson(values))) .andExpect(status().isBadRequest()); }
MetricController { @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) public ResponseEntity<?> deleteMetricValues(@RequestParam("metricName") String metricName) { return metricService.deleteMetricValues(metricName); } @RequestMapping(value = "/metrics", method = RequestMethod.GET) Map<String, List<Metric>> getAllMetrics(); @RequestMapping(value = "/metrics/values", method = RequestMethod.GET) List<MetricValue> getMetricValues(@RequestParam("metricName") String metricName, @RequestParam("size") int size, @RequestParam(value = "offset", defaultValue = "0") int offset, @RequestParam(value = "tmst", defaultValue = "0") long tmst); @RequestMapping(value = "/metrics/values", method = RequestMethod.POST) ResponseEntity<?> addMetricValues(@RequestBody List<MetricValue> values); @RequestMapping(value = "/metrics/values", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) ResponseEntity<?> deleteMetricValues(@RequestParam("metricName") String metricName); @RequestMapping(value = "/metrics/values/{instanceId}", method = RequestMethod.GET) MetricValue getMetric(@PathVariable("instanceId") Long id); }
@Test public void testDeleteMetricValuesSuccess() throws Exception { given(service.deleteMetricValues("metricName")) .willReturn(new ResponseEntity<>("{\"failures\": []}", HttpStatus.OK)); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/metrics/values") .param("metricName", "metricName")) .andExpect(status().isOk()) .andExpect(jsonPath("$.failures", hasSize(0))); } @Test public void testDeleteMetricValuesFailureWithException() throws Exception { given(service.deleteMetricValues("metricName")) .willThrow(new GriffinException.ServiceException( "Failed to delete metric values.", new IOException())); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/metrics/values") .param("metricName", "metricName")) .andExpect(status().isInternalServerError()); }
MetricStoreImpl implements MetricStore { private static String buildBasicAuthString(String user, String password) { String auth = user + ":" + password; return String.format("Basic %s", Base64.getEncoder().encodeToString( auth.getBytes())); } MetricStoreImpl(@Value("${elasticsearch.host}") String host, @Value("${elasticsearch.port}") int port, @Value("${elasticsearch.scheme:http}") String scheme, @Value("${elasticsearch.user:}") String user, @Value("${elasticsearch.password:}") String password); @Override List<MetricValue> getMetricValues(String metricName, int from, int size, long tmst); @Override ResponseEntity<?> addMetricValues(List<MetricValue> metricValues); @Override ResponseEntity<?> deleteMetricValues(String metricName); @Override MetricValue getMetric(String applicationId); }
@Test public void testBuildBasicAuthString() throws NoSuchMethodException, InvocationTargetException, IllegalAccessException { Method m = MetricStoreImpl.class.getDeclaredMethod ("buildBasicAuthString", String.class, String.class); m.setAccessible(true); String authStr = (String) m.invoke(null, "user", "password"); assertTrue(authStr.equals("Basic dXNlcjpwYXNzd29yZA==")); }
MetricStoreImpl implements MetricStore { @Override public MetricValue getMetric(String applicationId) throws IOException { Response response = client.performRequest( "GET", urlGet, Collections.singletonMap( "q", "metadata.applicationId:" + applicationId)); List<MetricValue> metricValues = getMetricValuesFromResponse(response); return metricValues.get(0); } MetricStoreImpl(@Value("${elasticsearch.host}") String host, @Value("${elasticsearch.port}") int port, @Value("${elasticsearch.scheme:http}") String scheme, @Value("${elasticsearch.user:}") String user, @Value("${elasticsearch.password:}") String password); @Override List<MetricValue> getMetricValues(String metricName, int from, int size, long tmst); @Override ResponseEntity<?> addMetricValues(List<MetricValue> metricValues); @Override ResponseEntity<?> deleteMetricValues(String metricName); @Override MetricValue getMetric(String applicationId); }
@Test public void testMetricGetting() throws IOException, URISyntaxException { Response responseMock = PowerMockito.mock(Response.class); HttpEntity httpEntityMock = PowerMockito.mock(HttpEntity.class); InputStream is = Thread.currentThread().getContextClassLoader() .getResourceAsStream("metricvalue.json"); Map<String, String> map = new HashMap<>(); map.put("q", "metadata.applicationId:application_1549876136110_0018"); Map<String, Object> value = new HashMap<String, Object>(){{ put("total", 74); put("miss", 0); put("matched", 74); put("matchedFraction", 1); }}; MetricValue expectedMetric = new MetricValue("de_demo_results_comparision", 1549985089648L, Collections.singletonMap("applicationId", "application_1549876136110_0018"), value); given(restClientMock.performRequest(eq("GET"), eq(urlGet), eq(map), anyVararg())).willReturn(responseMock); given(responseMock.getEntity()).willReturn(httpEntityMock); given(httpEntityMock.getContent()).willReturn(is); MetricStoreImpl metricStore = new MetricStoreImpl("localhost", 0, "", "", ""); MetricValue metric = metricStore.getMetric("application_1549876136110_0018"); assertEquals(expectedMetric, metric); }
HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Iterable<String> getAllDatabases() { return queryHiveString(SHOW_DATABASE); } void setConn(Connection conn); void setHiveClassName(String hiveClassName); void setNeedKerberos(String needKerberos); void setKeytabUser(String keytabUser); void setKeytabPath(String keytabPath); @PostConstruct void init(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null") Map<String, List<String>> getAllTableNames(); @Override List<Table> getAllTable(String db); @Override Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "jdbcHive", allEntries = true, beforeInvocation = true) void evictHiveCache(); String getLocation(String tableMetadata); List<FieldSchema> getColums(String tableMetadata); String getComment(String colStr); }
@Test public void testgetAllDatabases() throws SQLException { when(conn.createStatement()).thenReturn(stmt); when(stmt.executeQuery(anyString())).thenReturn(rs); when(rs.next()).thenReturn(true).thenReturn(false); when(rs.getString(anyInt())).thenReturn("default"); Iterable<String> res = serviceJdbc.getAllDatabases(); for (String s : res) { Assert.assertEquals(s, "default"); break; } }
MetricServiceImpl implements MetricService { @Override public List<MetricValue> getMetricValues(String metricName, int offset, int size, long tmst) { if (offset < 0) { throw new GriffinException.BadRequestException (INVALID_METRIC_RECORDS_OFFSET); } if (size < 0) { throw new GriffinException.BadRequestException (INVALID_METRIC_RECORDS_SIZE); } try { return metricStore.getMetricValues(metricName, offset, size, tmst); } catch (IOException e) { LOGGER.error("Failed to get metric values named {}. {}", metricName, e.getMessage()); throw new GriffinException.ServiceException( "Failed to get metric values", e); } } @Override Map<String, List<Metric>> getAllMetrics(); @Override List<MetricValue> getMetricValues(String metricName, int offset, int size, long tmst); @SuppressWarnings("rawtypes") @Override ResponseEntity addMetricValues(List<MetricValue> values); @SuppressWarnings("rawtypes") @Override ResponseEntity deleteMetricValues(String metricName); @Override MetricValue findMetric(Long id); }
@Test public void testGetMetricValuesSuccess() throws IOException { MetricValue value = new MetricValue("jobName", 1L, new HashMap<>()); given(metricStore.getMetricValues(Matchers.anyString(), Matchers.anyInt(), Matchers.anyInt(), Matchers.anyLong())) .willReturn(Collections.singletonList(value)); List<MetricValue> values = service.getMetricValues("jobName", 0, 300, 0); assertEquals(values.size(), 1); assertEquals(values.get(0).getName(), "jobName"); } @Test(expected = GriffinException.ServiceException.class) public void testGetMetricValuesFailureWithException() throws IOException { given(metricStore.getMetricValues(Matchers.anyString(), Matchers.anyInt(), Matchers.anyInt(), Matchers.anyLong())) .willThrow(new IOException()); service.getMetricValues("jobName", 0, 300, 0); }
MetricServiceImpl implements MetricService { @SuppressWarnings("rawtypes") @Override public ResponseEntity addMetricValues(List<MetricValue> values) { for (MetricValue value : values) { checkFormat(value); } try { return metricStore.addMetricValues(values); } catch (JsonProcessingException e) { LOGGER.warn("Failed to parse metric value.", e.getMessage()); throw new GriffinException.BadRequestException (INVALID_METRIC_VALUE_FORMAT); } catch (IOException e) { LOGGER.error("Failed to add metric values", e); throw new GriffinException.ServiceException( "Failed to add metric values", e); } } @Override Map<String, List<Metric>> getAllMetrics(); @Override List<MetricValue> getMetricValues(String metricName, int offset, int size, long tmst); @SuppressWarnings("rawtypes") @Override ResponseEntity addMetricValues(List<MetricValue> values); @SuppressWarnings("rawtypes") @Override ResponseEntity deleteMetricValues(String metricName); @Override MetricValue findMetric(Long id); }
@Test public void testAddMetricValuesSuccess() throws IOException { Map<String, Object> value = new HashMap<>(); value.put("total", 10000); value.put("matched", 10000); List<MetricValue> values = Collections.singletonList( new MetricValue("jobName", 1L, value)); given(metricStore.addMetricValues(values)) .willReturn( new ResponseEntity( "{\"errors\": false, \"items\": []}", HttpStatus.OK)); ResponseEntity response = service.addMetricValues(values); Map body = JsonUtil.toEntity(response.getBody().toString(), Map.class); assertEquals(response.getStatusCode(), HttpStatus.OK); assertNotNull(body); assertEquals(body.get("errors").toString(), "false"); } @Test(expected = GriffinException.BadRequestException.class) public void testAddMetricValuesFailureWithInvalidFormat() { List<MetricValue> values = Collections.singletonList(new MetricValue()); service.addMetricValues(values); } @Test(expected = GriffinException.ServiceException.class) public void testAddMetricValuesFailureWithException() throws IOException { Map<String, Object> value = new HashMap<>(); value.put("total", 10000); value.put("matched", 10000); List<MetricValue> values = Collections.singletonList( new MetricValue("jobName", 1L, value)); given(metricStore.addMetricValues(values)).willThrow(new IOException()); service.addMetricValues(values); }
MetricServiceImpl implements MetricService { @SuppressWarnings("rawtypes") @Override public ResponseEntity deleteMetricValues(String metricName) { try { return metricStore.deleteMetricValues(metricName); } catch (IOException e) { LOGGER.error("Failed to delete metric values named {}. {}", metricName, e.getMessage()); throw new GriffinException.ServiceException( "Failed to delete metric values.", e); } } @Override Map<String, List<Metric>> getAllMetrics(); @Override List<MetricValue> getMetricValues(String metricName, int offset, int size, long tmst); @SuppressWarnings("rawtypes") @Override ResponseEntity addMetricValues(List<MetricValue> values); @SuppressWarnings("rawtypes") @Override ResponseEntity deleteMetricValues(String metricName); @Override MetricValue findMetric(Long id); }
@Test public void testDeleteMetricValuesSuccess() throws IOException { given(metricStore.deleteMetricValues("metricName")) .willReturn(new ResponseEntity("{\"failures\": []}", HttpStatus.OK)); ResponseEntity response = service.deleteMetricValues("metricName"); Map body = JsonUtil.toEntity(response.getBody().toString(), Map.class); assertEquals(response.getStatusCode(), HttpStatus.OK); assertNotNull(body); assertEquals(body.get("failures"), Collections.emptyList()); } @Test(expected = GriffinException.ServiceException.class) public void testDeleteMetricValuesFailureWithException() throws IOException { given(metricStore.deleteMetricValues("metricName")) .willThrow(new IOException()); service.deleteMetricValues("metricName"); }
MetricServiceImpl implements MetricService { @Override public MetricValue findMetric(Long id) { JobInstanceBean jobInstanceBean = jobInstanceRepo.findByInstanceId(id); if (jobInstanceBean == null) { LOGGER.warn("There are no job instances with id {} ", id); throw new GriffinException .NotFoundException(JOB_INSTANCE_NOT_FOUND); } String appId = jobInstanceBean.getAppId(); try { return metricStore.getMetric(appId); } catch (IOException e) { LOGGER.warn("Failed to get metric for applicationId {} ", appId); throw new GriffinException.ServiceException("Failed to find metric", e); } } @Override Map<String, List<Metric>> getAllMetrics(); @Override List<MetricValue> getMetricValues(String metricName, int offset, int size, long tmst); @SuppressWarnings("rawtypes") @Override ResponseEntity addMetricValues(List<MetricValue> values); @SuppressWarnings("rawtypes") @Override ResponseEntity deleteMetricValues(String metricName); @Override MetricValue findMetric(Long id); }
@Test public void testFindMetricSuccess() throws IOException { Long id = 1L; String appId = "application"; MetricValue expectedMetric = new MetricValue( "name", 1234L, Collections.singletonMap("applicationId", appId), new HashMap<>()); given(jobInstanceRepo.findByInstanceId(id)) .willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId)); given(metricStore.getMetric(appId)) .willReturn(expectedMetric); MetricValue actualMetric = service.findMetric(id); assertEquals(expectedMetric, actualMetric); } @Test(expected = GriffinException.NotFoundException.class) public void testFailedToFindJobInstance() throws IOException { Long id = 1L; given(jobInstanceRepo.findByInstanceId(id)) .willReturn(null); service.findMetric(id); } @Test(expected = GriffinException.ServiceException.class) public void testFindMetricFailure() throws IOException { Long id = 1L; String appId = "application"; given(jobInstanceRepo.findByInstanceId(id)) .willReturn(new JobInstanceBean(LivySessionStates.State.RUNNING, 12L, 32L, appId)); given(metricStore.getMetric(appId)) .willThrow(new GriffinException.ServiceException("", new RuntimeException())); service.findMetric(id); }
HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Iterable<String> getAllTableNames(String dbName) { return queryHiveString(SHOW_TABLES_IN + dbName); } void setConn(Connection conn); void setHiveClassName(String hiveClassName); void setNeedKerberos(String needKerberos); void setKeytabUser(String keytabUser); void setKeytabPath(String keytabPath); @PostConstruct void init(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null") Map<String, List<String>> getAllTableNames(); @Override List<Table> getAllTable(String db); @Override Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "jdbcHive", allEntries = true, beforeInvocation = true) void evictHiveCache(); String getLocation(String tableMetadata); List<FieldSchema> getColums(String tableMetadata); String getComment(String colStr); }
@Test public void testGetAllTableNames() throws SQLException { when(conn.createStatement()).thenReturn(stmt); when(stmt.executeQuery(anyString())).thenReturn(rs); when(rs.next()).thenReturn(true).thenReturn(true).thenReturn(false); when(rs.getString(anyInt())).thenReturn("session_data").thenReturn("session_summary"); Iterable<String> res = serviceJdbc.getAllTableNames("default"); StringBuilder sb = new StringBuilder(); for (String s : res) { sb.append(s).append(","); } Assert.assertEquals(sb.toString(), "session_data,session_summary,"); }
MeasureServiceImpl implements MeasureService { @Override public List<? extends Measure> getAllAliveMeasures(String type) { if (type.equals(GRIFFIN)) { return griffinMeasureRepo.findByDeleted(false); } else if (type.equals(EXTERNAL)) { return externalMeasureRepo.findByDeleted(false); } return measureRepo.findByDeleted(false); } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testGetAllMeasures() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.findByDeleted(false)).willReturn(Collections .singletonList(measure)); List<? extends Measure> measures = service.getAllAliveMeasures(""); assertEquals(measures.size(), 1); assertEquals(measures.get(0).getName(), "view_item_hourly"); }
MeasureServiceImpl implements MeasureService { @Override public Measure getMeasureById(long id) { Measure measure = measureRepo.findByIdAndDeleted(id, false); if (measure == null) { throw new GriffinException .NotFoundException(MEASURE_ID_DOES_NOT_EXIST); } return measure; } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testGetMeasuresById() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(measure); Measure m = service.getMeasureById(1); assertEquals(m.getName(), measure.getName()); } @Test(expected = GriffinException.NotFoundException.class) public void testGetMeasuresByIdWithFileNotFoundException() { given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(null); service.getMeasureById(1); }
MeasureServiceImpl implements MeasureService { @Override public List<Measure> getAliveMeasuresByOwner(String owner) { return measureRepo.findByOwnerAndDeleted(owner, false); } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testGetAliveMeasuresByOwner() throws Exception { String owner = "test"; Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.findByOwnerAndDeleted(owner, false)) .willReturn(Collections.singletonList(measure)); List<Measure> measures = service.getAliveMeasuresByOwner(owner); assertEquals(measures.get(0).getName(), measure.getName()); }
MeasureServiceImpl implements MeasureService { @Override public void deleteMeasureById(Long measureId) throws SchedulerException { Measure measure = measureRepo.findByIdAndDeleted(measureId, false); if (measure == null) { throw new GriffinException.NotFoundException( MEASURE_ID_DOES_NOT_EXIST); } MeasureOperator op = getOperation(measure); op.delete(measure); } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testDeleteMeasureByIdForGriffinSuccess() throws Exception { GriffinMeasure measure = createGriffinMeasure("view_item_hourly"); measure.setId(1L); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(measure); doNothing().when(griffinOp).delete(measure); service.deleteMeasureById(measure.getId()); verify(griffinOp, times(1)).delete(measure); } @Test public void testDeleteMeasureByIdForExternalSuccess() throws SchedulerException { ExternalMeasure measure = createExternalMeasure("externalMeasure"); measure.setId(1L); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(measure); doNothing().when(externalOp).delete(measure); service.deleteMeasureById(1L); verify(externalOp, times(1)).delete(measure); } @Test(expected = GriffinException.NotFoundException.class) public void testDeleteMeasureByIdFailureWithNotFound() throws SchedulerException { given(measureRepo.findByIdAndDeleted(1L, false)).willReturn(null); service.deleteMeasureById(1L); } @Test(expected = GriffinException.ServiceException.class) public void testDeleteMeasureByIdForGriffinFailureWithException() throws Exception { GriffinMeasure measure = createGriffinMeasure("externalMeasure"); measure.setId(1L); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(measure); doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(griffinOp).delete(measure); service.deleteMeasureById(1L); }
MeasureServiceImpl implements MeasureService { @Override public void deleteMeasures() throws SchedulerException { List<Measure> measures = measureRepo.findByDeleted(false); for (Measure m : measures) { MeasureOperator op = getOperation(m); op.delete(m); } } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testDeleteMeasuresForGriffinSuccess() throws Exception { GriffinMeasure measure = createGriffinMeasure("view_item_hourly"); measure.setId(1L); given(measureRepo.findByDeleted(false)).willReturn(Arrays .asList(measure)); doNothing().when(griffinOp).delete(measure); service.deleteMeasures(); } @Test public void testDeleteMeasuresForExternalSuccess() throws SchedulerException { ExternalMeasure measure = createExternalMeasure("externalMeasure"); measure.setId(1L); given(measureRepo.findByDeleted(false)).willReturn(Arrays .asList(measure)); doNothing().when(externalOp).delete(measure); service.deleteMeasures(); } @Test(expected = GriffinException.ServiceException.class) public void testDeleteMeasuresForGriffinFailureWithException() throws Exception { GriffinMeasure measure = createGriffinMeasure("externalMeasure"); measure.setId(1L); given(measureRepo.findByDeleted(false)).willReturn(Arrays .asList(measure)); doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(griffinOp).delete(measure); service.deleteMeasures(); }
HiveMetaStoreServiceJdbcImpl implements HiveMetaStoreService { @Override @Cacheable(unless = "#result==null") public Table getTable(String dbName, String tableName) { Table result = new Table(); result.setDbName(dbName); result.setTableName(tableName); String sql = SHOW_CREATE_TABLE + dbName + "." + tableName; Statement stmt = null; ResultSet rs = null; StringBuilder sb = new StringBuilder(); try { Class.forName(hiveClassName); if (conn == null) { conn = DriverManager.getConnection(hiveUrl); } LOGGER.info("got connection"); stmt = conn.createStatement(); rs = stmt.executeQuery(sql); while (rs.next()) { String s = rs.getString(1); sb.append(s); } String location = getLocation(sb.toString()); List<FieldSchema> cols = getColums(sb.toString()); StorageDescriptor sd = new StorageDescriptor(); sd.setLocation(location); sd.setCols(cols); result.setSd(sd); } catch (Exception e) { LOGGER.error("Query Hive Table metadata has error. {}", e.getMessage()); } finally { closeConnection(stmt, rs); } return result; } void setConn(Connection conn); void setHiveClassName(String hiveClassName); void setNeedKerberos(String needKerberos); void setKeytabUser(String keytabUser); void setKeytabPath(String keytabPath); @PostConstruct void init(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllDatabases(); @Override @Cacheable(unless = "#result==null") Iterable<String> getAllTableNames(String dbName); @Override @Cacheable(unless = "#result==null") Map<String, List<String>> getAllTableNames(); @Override List<Table> getAllTable(String db); @Override Map<String, List<Table>> getAllTable(); @Override @Cacheable(unless = "#result==null") Table getTable(String dbName, String tableName); @Scheduled(fixedRateString = "${cache.evict.hive.fixedRate.in.milliseconds}") @CacheEvict( cacheNames = "jdbcHive", allEntries = true, beforeInvocation = true) void evictHiveCache(); String getLocation(String tableMetadata); List<FieldSchema> getColums(String tableMetadata); String getComment(String colStr); }
@Test public void testGetTable() throws SQLException { String meta = "CREATE EXTERNAL TABLE `default.session_data`( `session_date` string COMMENT 'this is session date', `site_id` int COMMENT '', `guid` string COMMENT '', `user_id` string COMMENT '')COMMENT 'session_data for session team' PARTITIONED BY ( `dt` string, `place` int) ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe' STORED AS INPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerInputFormat' OUTPUTFORMAT 'org.apache.hadoop.hive.ql.io.avro.AvroContainerOutputFormat' LOCATION 'hdfs: when(conn.createStatement()).thenReturn(stmt); when(stmt.executeQuery(anyString())).thenReturn(rs); when(rs.next()).thenReturn(true).thenReturn(false); when(rs.getString(anyInt())).thenReturn(meta); Table res = serviceJdbc.getTable("default", "session_data"); assert (res.getDbName().equals("default")); assert (res.getTableName().equals("session_data")); assert (res.getSd().getLocation().equals("hdfs: List<FieldSchema> fieldSchemas = res.getSd().getCols(); for (FieldSchema fieldSchema : fieldSchemas) { Assert.assertEquals(fieldSchema.getName(),"session_date"); Assert.assertEquals(fieldSchema.getType(),"string"); Assert.assertEquals(fieldSchema.getComment(),"this is session date"); break; } }
MeasureServiceImpl implements MeasureService { @Override public Measure createMeasure(Measure measure) { List<Measure> aliveMeasureList = measureRepo .findByNameAndDeleted(measure.getName(), false); if (!CollectionUtils.isEmpty(aliveMeasureList)) { LOGGER.warn("Failed to create new measure {}, it already exists.", measure.getName()); throw new GriffinException.ConflictException( MEASURE_NAME_ALREADY_EXIST); } MeasureOperator op = getOperation(measure); return op.create(measure); } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testCreateMeasureForGriffinSuccess() throws Exception { String measureName = "view_item_hourly"; GriffinMeasure griffinMeasure = createGriffinMeasure(measureName); given(measureRepo.findByNameAndDeleted(measureName, false)) .willReturn(new ArrayList<>()); given(griffinOp.create(griffinMeasure)).willReturn(griffinMeasure); Measure measure = service.createMeasure(griffinMeasure); assertEquals(measure.getName(), griffinMeasure.getName()); } @Test public void testCreateMeasureForExternalSuccess() { String measureName = "view_item_hourly"; ExternalMeasure externalMeasure = createExternalMeasure(measureName); given(measureRepo.findByNameAndDeleted(measureName, false)) .willReturn(new ArrayList<>()); given(externalOp.create(externalMeasure)).willReturn(externalMeasure); Measure measure = service.createMeasure(externalMeasure); assertEquals(measure.getName(), externalMeasure.getName()); } @Test(expected = GriffinException.ConflictException.class) public void testCreateMeasureForFailureWithDuplicate() throws Exception { String measureName = "view_item_hourly"; GriffinMeasure measure = createGriffinMeasure(measureName); given(measureRepo.findByNameAndDeleted(measureName, false)) .willReturn(Collections.singletonList(measure)); service.createMeasure(measure); }
MeasureServiceImpl implements MeasureService { @Override public Measure updateMeasure(Measure measure) { Measure m = measureRepo.findByIdAndDeleted(measure.getId(), false); if (m == null) { throw new GriffinException.NotFoundException( MEASURE_ID_DOES_NOT_EXIST); } if (!m.getType().equals(measure.getType())) { LOGGER.warn("Can't update measure to different type."); throw new GriffinException.BadRequestException( MEASURE_TYPE_DOES_NOT_MATCH); } MeasureOperator op = getOperation(measure); return op.update(measure); } @Override List<? extends Measure> getAllAliveMeasures(String type); @Override Measure getMeasureById(long id); @Override List<Measure> getAliveMeasuresByOwner(String owner); @Override Measure createMeasure(Measure measure); @Override Measure updateMeasure(Measure measure); @Override void deleteMeasureById(Long measureId); @Override void deleteMeasures(); }
@Test public void testUpdateMeasureForGriffinSuccess() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(measure); doReturn(measure).when(externalOp).update(measure); service.updateMeasure(measure); verify(griffinOp, times(1)).update(measure); } @Test(expected = GriffinException.BadRequestException.class) public void testUpdateMeasureForGriffinFailureWithDiffType() throws Exception { Measure griffinMeasure = createGriffinMeasure("view_item_hourly"); Measure externalMeasure = createExternalMeasure("externalName"); given(measureRepo.findByIdAndDeleted(griffinMeasure.getId(), false)) .willReturn(externalMeasure); service.updateMeasure(griffinMeasure); } @Test(expected = GriffinException.NotFoundException.class) public void testUpdateMeasureForFailureWithNotFound() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(null); service.updateMeasure(measure); } @Test public void testUpdateMeasureForExternal() { ExternalMeasure measure = createExternalMeasure ("external_view_item_hourly"); given(measureRepo.findByIdAndDeleted(measure.getId(), false)) .willReturn(measure); doReturn(measure).when(externalOp).update(measure); service.updateMeasure(measure); verify(externalOp, times(1)).update(measure); }
MeasureOrgController { @RequestMapping(value = "/org", method = RequestMethod.GET) public List<String> getOrgs() { return measureOrgService.getOrgs(); } @RequestMapping(value = "/org", method = RequestMethod.GET) List<String> getOrgs(); @RequestMapping(value = "/org/{org}", method = RequestMethod.GET) List<String> getMetricNameListByOrg(@PathVariable("org") String org); @RequestMapping(value = "/org/measure/names", method = RequestMethod.GET) Map<String, List<String>> getMeasureNamesGroupByOrg(); }
@Test public void testGetOrgs() throws Exception { String org = "orgName"; when(measureOrgService.getOrgs()).thenReturn(Arrays.asList(org)); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org")) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0]", is(org))); }
MeasureOrgController { @RequestMapping(value = "/org/{org}", method = RequestMethod.GET) public List<String> getMetricNameListByOrg(@PathVariable("org") String org) { return measureOrgService.getMetricNameListByOrg(org); } @RequestMapping(value = "/org", method = RequestMethod.GET) List<String> getOrgs(); @RequestMapping(value = "/org/{org}", method = RequestMethod.GET) List<String> getMetricNameListByOrg(@PathVariable("org") String org); @RequestMapping(value = "/org/measure/names", method = RequestMethod.GET) Map<String, List<String>> getMeasureNamesGroupByOrg(); }
@Test public void testGetMetricNameListByOrg() throws Exception { String org = "hadoop"; when(measureOrgService.getMetricNameListByOrg(org)).thenReturn(Arrays .asList(org)); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org/{org}", org)) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0]", is(org))); }
HiveMetaStoreController { @RequestMapping(value = "/dbs", method = RequestMethod.GET) public Iterable<String> getAllDatabases() { return hiveMetaStoreService.getAllDatabases(); } @RequestMapping(value = "/dbs", method = RequestMethod.GET) Iterable<String> getAllDatabases(); @RequestMapping(value = "/tables/names", method = RequestMethod.GET) Iterable<String> getAllTableNames(@RequestParam("db") String dbName); @RequestMapping(value = "/tables", method = RequestMethod.GET) List<Table> getAllTables(@RequestParam("db") String dbName); @RequestMapping(value = "/dbs/tables", method = RequestMethod.GET) Map<String, List<Table>> getAllTables(); @RequestMapping(value = "/dbs/tables/names", method = RequestMethod.GET) Map<String, List<String>> getAllTableNames(); @RequestMapping(value = "/table", method = RequestMethod.GET) Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName); }
@Test public void testGetAllDatabases() throws Exception { String dbName = "default"; given(hiveMetaStoreService.getAllDatabases()).willReturn(Arrays .asList(dbName)); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/dbs")) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0]", is(dbName))); }
MeasureOrgController { @RequestMapping(value = "/org/measure/names", method = RequestMethod.GET) public Map<String, List<String>> getMeasureNamesGroupByOrg() { return measureOrgService.getMeasureNamesGroupByOrg(); } @RequestMapping(value = "/org", method = RequestMethod.GET) List<String> getOrgs(); @RequestMapping(value = "/org/{org}", method = RequestMethod.GET) List<String> getMetricNameListByOrg(@PathVariable("org") String org); @RequestMapping(value = "/org/measure/names", method = RequestMethod.GET) Map<String, List<String>> getMeasureNamesGroupByOrg(); }
@Test public void testGetMeasureNamesGroupByOrg() throws Exception { List<String> measures = Arrays.asList("measureName"); Map<String, List<String>> map = new HashMap<>(); map.put("orgName", measures); when(measureOrgService.getMeasureNamesGroupByOrg()).thenReturn(map); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/org/measure/names")) .andExpect(status().isOk()) .andExpect(jsonPath("$.orgName", hasSize(1))); }
MeasureController { @RequestMapping(value = "/measures", method = RequestMethod.GET) public List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type) { return measureService.getAllAliveMeasures(type); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testGetAllMeasures() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); Mockito.<List<? extends Measure>>when(service.getAllAliveMeasures("")) .thenReturn(Collections.singletonList(measure)); mvc.perform(get(URLHelper.API_VERSION_PATH + "/measures")) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0].name", is("view_item_hourly"))); }
MeasureController { @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) public Measure getMeasureById(@PathVariable("id") long id) { return measureService.getMeasureById(id); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testGetMeasuresById() throws Exception { Measure measure = createGriffinMeasure("view_item_hourly"); given(service.getMeasureById(1L)).willReturn(measure); mvc.perform(get(URLHelper.API_VERSION_PATH + "/measures/1")) .andExpect(status().isOk()) .andExpect(jsonPath("$.name", is("view_item_hourly"))); }
MeasureController { @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteMeasureById(@PathVariable("id") Long id) throws SchedulerException { measureService.deleteMeasureById(id); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testDeleteMeasureByIdForSuccess() throws Exception { doNothing().when(service).deleteMeasureById(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1")) .andExpect(status().isNoContent()); } @Test public void testDeleteMeasureByIdForNotFound() throws Exception { doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage .MEASURE_ID_DOES_NOT_EXIST)) .when(service).deleteMeasureById(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1")) .andExpect(status().isNotFound()); } @Test public void testDeleteMeasureByIdForGriffinFailureWithException() throws Exception { doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(service).deleteMeasureById(1L); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures/1")) .andExpect(status().isInternalServerError()); }
MeasureController { @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) public void deleteMeasures() throws SchedulerException { measureService.deleteMeasures(); } @RequestMapping(value = "/measures", method = RequestMethod.GET) List<? extends Measure> getAllAliveMeasures(@RequestParam(value = "type", defaultValue = "") String type); @RequestMapping(value = "/measures/{id}", method = RequestMethod.GET) Measure getMeasureById(@PathVariable("id") long id); @RequestMapping(value = "/measures/{id}", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasureById(@PathVariable("id") Long id); @RequestMapping(value = "/measures", method = RequestMethod.DELETE) @ResponseStatus(HttpStatus.NO_CONTENT) void deleteMeasures(); @RequestMapping(value = "/measures", method = RequestMethod.PUT) @ResponseStatus(HttpStatus.OK) Measure updateMeasure(@RequestBody Measure measure); @RequestMapping(value = "/measures/owner/{owner}", method = RequestMethod.GET) List<Measure> getAliveMeasuresByOwner(@PathVariable("owner") @Valid String owner); @RequestMapping(value = "/measures", method = RequestMethod.POST) @ResponseStatus(HttpStatus.CREATED) Measure createMeasure(@RequestBody Measure measure); }
@Test public void testDeleteMeasuresForSuccess() throws Exception { doNothing().when(service).deleteMeasures(); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures")) .andExpect(status().isNoContent()); } @Test public void testDeleteMeasuresForNotFound() throws Exception { doThrow(new GriffinException.NotFoundException(GriffinExceptionMessage .MEASURE_ID_DOES_NOT_EXIST)) .when(service).deleteMeasures(); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures")) .andExpect(status().isNotFound()); } @Test public void testDeleteMeasuresForGriffinFailureWithException() throws Exception { doThrow(new GriffinException.ServiceException("Failed to delete job", new Exception())) .when(service).deleteMeasures(); mvc.perform(delete(URLHelper.API_VERSION_PATH + "/measures")) .andExpect(status().isInternalServerError()); }
HiveMetaStoreController { @RequestMapping(value = "/tables/names", method = RequestMethod.GET) public Iterable<String> getAllTableNames(@RequestParam("db") String dbName) { return hiveMetaStoreService.getAllTableNames(dbName); } @RequestMapping(value = "/dbs", method = RequestMethod.GET) Iterable<String> getAllDatabases(); @RequestMapping(value = "/tables/names", method = RequestMethod.GET) Iterable<String> getAllTableNames(@RequestParam("db") String dbName); @RequestMapping(value = "/tables", method = RequestMethod.GET) List<Table> getAllTables(@RequestParam("db") String dbName); @RequestMapping(value = "/dbs/tables", method = RequestMethod.GET) Map<String, List<Table>> getAllTables(); @RequestMapping(value = "/dbs/tables/names", method = RequestMethod.GET) Map<String, List<String>> getAllTableNames(); @RequestMapping(value = "/table", method = RequestMethod.GET) Table getTable(@RequestParam("db") String dbName, @RequestParam("table") String tableName); }
@Test public void testGetAllTableNames() throws Exception { String dbName = "default"; String tableName = "table"; given(hiveMetaStoreService.getAllTableNames(dbName)).willReturn(Arrays .asList(tableName)); mockMvc.perform(get(URLHelper.API_VERSION_PATH + "/metadata/hive/tables/names").param("db", dbName)) .andExpect(status().isOk()) .andExpect(jsonPath("$.[0]", is(tableName))); }
EmptyContent extends Content { @Override public void draw(Graphics2D graphics) {} @Override void draw(Graphics2D graphics); @Override void setMinWidth(double minWidth); @Override void setMinHeight(double minHeight); }
@Test public void testDraw() throws Exception { }