src_fm_fc_ms_ff
stringlengths
43
86.8k
target
stringlengths
20
276k
StaxUtils { public static XMLEventWriter newXMLEventWriter(OutputStream out) { try { return XMLOutputFactory.newFactory().createXMLEventWriter(out); } catch (XMLStreamException ex) { throw new IllegalStateException(ex); } } private StaxUtils(); static StAXSource newStAXSource(XMLEventReader in); static XMLStreamWriter newXMLStreamWriter(OutputStream out); static XMLEventWriter newXMLEventWriter(OutputStream out); }
@Test public void newXMLEventWriter() throws Exception { XMLEventWriter writer = StaxUtils.newXMLEventWriter(outputStream); assertThat(writer, is(not(nullValue()))); }
FluentXPathContext { XPathExpression compile(String xPathQuery, NamespaceContext namespaceContext) { try { XPath xPath = this.xPathConfigurer.getXPath(namespaceContext); return xPath.compile(xPathQuery); } catch (XPathExpressionException ex) { throw new FluentXmlProcessingException(ex); } } }
@Test public void compile() throws XPathExpressionException { when(this.xPathConfigurer.getXPath(this.immutableNamespaceContext)).thenReturn(this.xpath); when(this.xpath.compile("aQuery")).thenReturn(this.xpathExpr); XPathExpression xpathExprReturned = this.fluentXPathContext.compile("aQuery", this.immutableNamespaceContext); assertThat(xpathExprReturned, is(xpathExpr)); }
FromNodeImpl implements QueryFromNode { @Override public Optional<String> selectString(String xPathQuery) { return selectUniqueNode(xPathQuery, ToStringConverter::toString); } FromNodeImpl(Node baseNode, FluentXPathContext context); @Override QueryFromNode withNamespaceContext(NamespaceContext namespaceContext); @Override QueryFromNode withNamespaceMappings(String... namespaceMappings); @Override QueryFromNode withNamespaceMapping(String prefix, String namespaceURI); @Override QueryFromNode withXPath(XPathConfigurer xPathConfigurer); @Override SelectMultipleFromNode<String> selectStrings(String xPathQuery); @Override SelectMultipleFromNode<Node> selectNodes(String xPathQuery); @Override SelectMultipleFromNode<Element> selectElements(String xPathQuery); @Override Optional<String> selectString(String xPathQuery); @Override Optional<Element> selectElement(String xPathQuery); @Override Optional<Node> selectNode(String xPathQuery); @Override Optional<Integer> selectInteger(String xPathQuery); @Override Optional<Boolean> selectBoolean(String xPathQuery); @Override SelectMultipleFromNode<Integer> selectIntegers(String xPathQuery); @Override SelectMultipleFromNode<Boolean> selectBooleans(String xPathQuery); }
@Test public void selectStringGivesEmptyOptionalWhenNoResultFound() throws XPathExpressionException { givenResultNodeSetIsEmpty(); boolean present = this.fromNode.selectString("aQuery").isPresent(); assertThat(present, is(false)); } @Test public void singleStringWhenSingleNodeFound() throws XPathExpressionException { givenResultNodeSetContains("string1"); String result = this.fromNode.selectString("aQuery").get(); assertThat(result, is("string1")); } @Test public void selectStringFailsWhenMultipleNodesFound() throws XPathExpressionException { expectedException.expect(FluentXmlProcessingException.class); givenResultNodeSetContains("string1", "string2"); this.fromNode.selectString("aQuery"); }
FromNodeImpl implements QueryFromNode { @Override public Optional<Node> selectNode(String xPathQuery) { return selectUniqueNode(xPathQuery, Function.identity()); } FromNodeImpl(Node baseNode, FluentXPathContext context); @Override QueryFromNode withNamespaceContext(NamespaceContext namespaceContext); @Override QueryFromNode withNamespaceMappings(String... namespaceMappings); @Override QueryFromNode withNamespaceMapping(String prefix, String namespaceURI); @Override QueryFromNode withXPath(XPathConfigurer xPathConfigurer); @Override SelectMultipleFromNode<String> selectStrings(String xPathQuery); @Override SelectMultipleFromNode<Node> selectNodes(String xPathQuery); @Override SelectMultipleFromNode<Element> selectElements(String xPathQuery); @Override Optional<String> selectString(String xPathQuery); @Override Optional<Element> selectElement(String xPathQuery); @Override Optional<Node> selectNode(String xPathQuery); @Override Optional<Integer> selectInteger(String xPathQuery); @Override Optional<Boolean> selectBoolean(String xPathQuery); @Override SelectMultipleFromNode<Integer> selectIntegers(String xPathQuery); @Override SelectMultipleFromNode<Boolean> selectBooleans(String xPathQuery); }
@Test public void selectNodeGivesEmptyOptionalWhenNoResultFound() throws XPathExpressionException { givenResultNodeSetIsEmpty(); boolean present = this.fromNode.selectNode("aQuery").isPresent(); assertThat(present, is(false)); } @Test public void singleNodeWhenSingleNodeFound() throws XPathExpressionException { givenResultNodeSetContains("string1"); Text result = (Text) this.fromNode.selectNode("aQuery").get(); assertThat(result.getData(), is("string1")); } @Test public void selectNodeFailsWhenMultipleNodesFound() throws XPathExpressionException { expectedException.expect(FluentXmlProcessingException.class); givenResultNodeSetContains("string1", "string2"); this.fromNode.selectNode("aQuery"); }
FromNodeImpl implements QueryFromNode { @Override public Optional<Element> selectElement(String xPathQuery) { return selectUniqueNode(xPathQuery, ToElementConverter::toElement); } FromNodeImpl(Node baseNode, FluentXPathContext context); @Override QueryFromNode withNamespaceContext(NamespaceContext namespaceContext); @Override QueryFromNode withNamespaceMappings(String... namespaceMappings); @Override QueryFromNode withNamespaceMapping(String prefix, String namespaceURI); @Override QueryFromNode withXPath(XPathConfigurer xPathConfigurer); @Override SelectMultipleFromNode<String> selectStrings(String xPathQuery); @Override SelectMultipleFromNode<Node> selectNodes(String xPathQuery); @Override SelectMultipleFromNode<Element> selectElements(String xPathQuery); @Override Optional<String> selectString(String xPathQuery); @Override Optional<Element> selectElement(String xPathQuery); @Override Optional<Node> selectNode(String xPathQuery); @Override Optional<Integer> selectInteger(String xPathQuery); @Override Optional<Boolean> selectBoolean(String xPathQuery); @Override SelectMultipleFromNode<Integer> selectIntegers(String xPathQuery); @Override SelectMultipleFromNode<Boolean> selectBooleans(String xPathQuery); }
@Test public void selectElementGivesEmptyOptionalWhenNoResultFound() throws XPathExpressionException { givenResultNodeSetIsEmpty(); boolean present = this.fromNode.selectElement("aQuery").isPresent(); assertThat(present, is(false)); }
FromNodeImpl implements QueryFromNode { @Override public SelectMultipleFromNode<String> selectStrings(String xPathQuery) { XPathExpression xPathExpression = this.context.compile(xPathQuery, this.namespaceContext); return new SelectMultipleFromNodeImpl<>(baseNode, xPathExpression, ToStringConverter::toString); } FromNodeImpl(Node baseNode, FluentXPathContext context); @Override QueryFromNode withNamespaceContext(NamespaceContext namespaceContext); @Override QueryFromNode withNamespaceMappings(String... namespaceMappings); @Override QueryFromNode withNamespaceMapping(String prefix, String namespaceURI); @Override QueryFromNode withXPath(XPathConfigurer xPathConfigurer); @Override SelectMultipleFromNode<String> selectStrings(String xPathQuery); @Override SelectMultipleFromNode<Node> selectNodes(String xPathQuery); @Override SelectMultipleFromNode<Element> selectElements(String xPathQuery); @Override Optional<String> selectString(String xPathQuery); @Override Optional<Element> selectElement(String xPathQuery); @Override Optional<Node> selectNode(String xPathQuery); @Override Optional<Integer> selectInteger(String xPathQuery); @Override Optional<Boolean> selectBoolean(String xPathQuery); @Override SelectMultipleFromNode<Integer> selectIntegers(String xPathQuery); @Override SelectMultipleFromNode<Boolean> selectBooleans(String xPathQuery); }
@Test public void selectStringsGivesListOfStrings() throws XPathExpressionException { givenResultNodeSetContains("string1", "string2"); List<String> result = this.fromNode.selectStrings("aQuery").asList(); assertThat(result, is(Arrays.asList("string1", "string2"))); }
XmlSource extends ExternalResource { public String asString(String charSet) { try { return new String(this.data, charSet); } catch (IOException ex) { throw new RuntimeException(ex); } } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asString() { assertThat(xml.asString(), is("<test/>")); }
FromNodeImpl implements QueryFromNode { @Override public SelectMultipleFromNode<Node> selectNodes(String xPathQuery) { XPathExpression xPathExpression = this.context.compile(xPathQuery, this.namespaceContext); return new SelectMultipleFromNodeImpl<>(baseNode, xPathExpression, Function.identity()); } FromNodeImpl(Node baseNode, FluentXPathContext context); @Override QueryFromNode withNamespaceContext(NamespaceContext namespaceContext); @Override QueryFromNode withNamespaceMappings(String... namespaceMappings); @Override QueryFromNode withNamespaceMapping(String prefix, String namespaceURI); @Override QueryFromNode withXPath(XPathConfigurer xPathConfigurer); @Override SelectMultipleFromNode<String> selectStrings(String xPathQuery); @Override SelectMultipleFromNode<Node> selectNodes(String xPathQuery); @Override SelectMultipleFromNode<Element> selectElements(String xPathQuery); @Override Optional<String> selectString(String xPathQuery); @Override Optional<Element> selectElement(String xPathQuery); @Override Optional<Node> selectNode(String xPathQuery); @Override Optional<Integer> selectInteger(String xPathQuery); @Override Optional<Boolean> selectBoolean(String xPathQuery); @Override SelectMultipleFromNode<Integer> selectIntegers(String xPathQuery); @Override SelectMultipleFromNode<Boolean> selectBooleans(String xPathQuery); }
@Test public void selectNodesGivesListOfNodes() throws XPathExpressionException { givenResultNodeSetContains("string1", "string2"); List<Node> result = this.fromNode.selectNodes("aQuery").asList(); assertThat(result.size(), is(2)); assertThat(result.get(0), instanceOf(Text.class)); assertThat(result.get(1), instanceOf(Text.class)); }
TransformWithSerializerNodeImpl implements SerializeWithTransformerNode { @Override public void to(OutputStream out) { this.transformationChain.transformTo(out); } TransformWithSerializerNodeImpl(TransformationChain transformationChain); @Override void to(OutputStream out); @Override void to(Writer out); @Override void to(XMLStreamWriter out); @Override void to(XMLEventWriter out); @Override void to(File file); @Override void to(Result out); @Override String toString(); @Override byte[] toBytes(); }
@Test public void toWriter() { node.to(writer); verify(transformationChain).transformTo(writer); } @Test public void toOutputStream() { node.to(outputStream); verify(transformationChain).transformTo(outputStream); }
TransformWithSerializerNodeImpl implements SerializeWithTransformerNode { @Override public String toString() { return this.transformationChain.transformToString(); } TransformWithSerializerNodeImpl(TransformationChain transformationChain); @Override void to(OutputStream out); @Override void to(Writer out); @Override void to(XMLStreamWriter out); @Override void to(XMLEventWriter out); @Override void to(File file); @Override void to(Result out); @Override String toString(); @Override byte[] toBytes(); }
@Test public void transformToString() { node.toString(); verify(transformationChain).transformToString(); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public void setParameter(String name, Object o) { throw new IllegalArgumentException("Unsupported param " + name + "."); } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void noParametersSupported() { expectedException.expect(IllegalArgumentException.class); filter.setParameter("name", "value"); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public void clearParameters() { } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void clearParametersSupportedNoException() { filter.clearParameters(); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public Object getParameter(String name) { return null; } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void getParameterReturnsNull() { Object value = filter.getParameter("name"); assertThat(value, is(nullValue())); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public void transform(Source source, Result result) throws TransformerException { throw new UnsupportedOperationException(); } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void transformNotSupported() throws TransformerException { expectedException.expect(UnsupportedOperationException.class); this.filter.transform(source, result); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public void setOutputProperty(String name, String value) throws IllegalArgumentException { throw new IllegalArgumentException("Unsupported param " + name + "."); } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void noOutputPropertySupported() { expectedException.expect(IllegalArgumentException.class); filter.setOutputProperty("name", "value"); }
AbstractSAXFilter extends Transformer implements TransformerHandler { @Override public String getOutputProperty(String name) throws IllegalArgumentException { return null; } protected AbstractSAXFilter(); protected AbstractSAXFilter(ContentHandler nextContentHandler); @Override void setDocumentLocator(Locator locator); @Override void startDocument(); @Override void endDocument(); @Override void startPrefixMapping(String prefix, String nsURI); @Override void endPrefixMapping(String prefix); @Override void startElement(String nsURI, String localName, String qName, Attributes attributes); @Override void endElement(String nsURI, String localName, String qName); @Override void characters(char[] ch, int start, int length); @Override void ignorableWhitespace(char[] ch, int start, int length); @Override void processingInstruction(String target, String data); @Override void skippedEntity(String name); @Override Transformer getTransformer(); @Override void setResult(Result result); @Override void setSystemId(String systemId); @Override String getSystemId(); @Override void notationDecl(String name, String publicId, String systemId); @Override void unparsedEntityDecl(String name, String publicId, String systemId, String notationName); @Override void startDTD(String name, String publicId, String systemId); @Override void endDTD(); @Override void startEntity(String name); @Override void endEntity(String name); @Override void startCDATA(); @Override void endCDATA(); @Override void comment(char[] ch, int start, int length); @Override void transform(Source source, Result result); @Override void setParameter(String name, Object o); @Override Object getParameter(String name); @Override void clearParameters(); @Override void setURIResolver(URIResolver uriResolver); @Override URIResolver getURIResolver(); @Override void setOutputProperties(Properties properties); @Override Properties getOutputProperties(); @Override void setOutputProperty(String name, String value); @Override String getOutputProperty(String name); @Override void setErrorListener(ErrorListener errorListener); @Override ErrorListener getErrorListener(); }
@Test public void getOutputPropertyReturnsNull() { Object value = filter.getOutputProperty("name"); assertThat(value, is(nullValue())); }
XmlSource extends ExternalResource { public Document asDocument() { try { DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory.newInstance(); documentBuilderFactory.setNamespaceAware(true); return documentBuilderFactory.newDocumentBuilder().parse(new InputSource(asInputStream())); } catch (ParserConfigurationException | SAXException | IOException ex) { throw new RuntimeException(ex); } } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asDocument() { assertThat(xml.asDocument().getDocumentElement().getLocalName(), is("test")); }
SerializerConfigurerAdapter implements SerializerConfigurer { @Override public TransformerHandler getSerializer() { try { SAXTransformerFactory transformerFactory = buildTransformerFactory(); configure(transformerFactory); TransformerHandler transformer = buildTransformer(transformerFactory); configure(transformer.getTransformer()); return transformer; } catch (TransformerConfigurationException ex) { throw new FluentXmlConfigurationException(ex); } } @Override TransformerHandler getSerializer(); }
@Test public void getSerializerWithMock() throws Exception { TransformerHandler serializerBuilt = this.serializerConfigurerAdapter.getSerializer(); assertThat(serializerBuilt, is(this.serializer)); verify(serializerTransformer).setOutputProperty(OutputKeys.INDENT, "yes"); } @Test public void getSerializer() throws Exception { TransformerHandler serializerBuilt = this.serializerConfigurerAdapter.getSerializer(); assertThat(serializerBuilt, not(is(nullValue()))); }
XmlSource extends ExternalResource { public File asFile() { return exportXmlToTempFile(); } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asFile() throws IOException { String xmlRead = IOUtils.toString(new FileInputStream(xml.asFile()), "UTF-8"); assertThat(xmlRead, is("<test/>")); }
XmlSource extends ExternalResource { public URL asUrl() { File tempFile = exportXmlToTempFile(); return toURL(tempFile); } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asURL() throws IOException { String xmlRead = IOUtils.toString(xml.asUrl(), "UTF-8"); assertThat(xmlRead, is("<test/>")); }
XmlSource extends ExternalResource { public Reader asReader(String charSet) { try { return new InputStreamReader(asInputStream(), charSet); } catch (IOException ex) { throw new RuntimeException(ex); } } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asReader() throws IOException { String xmlRead = IOUtils.toString(xml.asReader("UTF-8")); assertThat(xmlRead, is("<test/>")); }
XmlSource extends ExternalResource { public InputStream asInputStream() { return new ByteArrayInputStream(this.data); } private XmlSource(byte[] data); static XmlSource withData(String xml); static XmlSource withData(byte[] xml); static XmlSource withDataFrom(Class<?> clazz, String path); URL asUrl(); InputStream asInputStream(); Document asDocument(); byte[] asBytes(); File asFile(); Reader asReader(String charSet); String asString(String charSet); String asString(); XMLStreamReader asXMLStreamReader(); XMLEventReader asXMLEventReader(); }
@Test public void asStream() throws IOException { String xmlRead = IOUtils.toString(xml.asInputStream(), "UTF-8"); assertThat(xmlRead, is("<test/>")); }
StaxUtils { public static StAXSource newStAXSource(XMLEventReader in) { try { return new StAXSource(in); } catch (XMLStreamException ex) { throw new FluentXmlProcessingException(ex); } } private StaxUtils(); static StAXSource newStAXSource(XMLEventReader in); static XMLStreamWriter newXMLStreamWriter(OutputStream out); static XMLEventWriter newXMLEventWriter(OutputStream out); }
@Test public void newStAXSource() throws Exception { givenXMLEventReaderAtStartOfDocument(); StAXSource source = StaxUtils.newStAXSource(xmlEventReader); assertThat(source, is(not(nullValue()))); } @Test public void newStAXSourceWithXMLEventReaderInInvalidState() throws Exception { expectedException.expect(IllegalStateException.class); givenXMLEventReaderAtEndOfDocument(); StaxUtils.newStAXSource(xmlEventReader); }
StaxUtils { public static XMLStreamWriter newXMLStreamWriter(OutputStream out) { try { return XMLOutputFactory.newFactory().createXMLStreamWriter(out); } catch (XMLStreamException ex) { throw new IllegalStateException(ex); } } private StaxUtils(); static StAXSource newStAXSource(XMLEventReader in); static XMLStreamWriter newXMLStreamWriter(OutputStream out); static XMLEventWriter newXMLEventWriter(OutputStream out); }
@Test public void newXMLStreamWriter() throws Exception { XMLStreamWriter writer = StaxUtils.newXMLStreamWriter(outputStream); assertThat(writer, is(not(nullValue()))); }
ServerOption implements Serializable { public static Gson getGson() { Gson gson = new GsonBuilder(). registerTypeAdapter(ServerOption.class, new ServerOptionParentDeserializer()). excludeFieldsWithoutExposeAnnotation(). create(); return gson; } ServerOption(); ServerOption(final String title); ServerOption(final String title, final String summary); static Gson getGson(); ServerOption getParent(); void setParent(ServerOption parent); String getTitle(); void setTitle(String title); String getSummary(); void setSummary(String summary); List<ServerOption> getEnabledOptionList(); List<ServerOption> getOptionList(); void setOptionList(List<ServerOption> optionList); void addOption(final ServerOption option); Map<String, Object> getParameterMap(); void setParameterMap(Map<String, Object> parameterMap); void addParameter(final String key, final Object value); List<OptionFunction> getFunctionList(); void setFunctionMap(List<OptionFunction> functionList); void addFunction(final OptionFunction value); Map<String, Object> getDependsOnMap(); void setDependsOnMap(Map<String, Object> dependsOnMap); void addDependsOn(final String key, final Object value); boolean isEnabled(); void setEnabled(boolean isEnabled); boolean isChecked(); void setChecked(boolean isChecked); boolean isDefault(); void setDefault(boolean isDefault); boolean isInHierarchyBelow(ServerOption option); @Override String toString(); }
@Test public void testOptionSelectedParametersWithInheritance() throws Exception { URL url = Thread.currentThread().getContextClassLoader().getResource("at/alladin/rmbt/util/model/option/test2.json"); File file = new File(url.getPath()); ServerOptionContainer options = new ServerOptionContainer( new ArrayList<>(Arrays.asList(ServerOption.getGson().fromJson(new FileReader(file), ServerOption.class)))); List<ServerOption> list = options.select(options.getRootOptions().get(0)); assertNotNull("selected option sublist not null", list); List<ServerOption> subList = options.select(list.get(0)); Map<String, Object> paramMap = options.getSelectedParams(); assertNull("selected option 0 sublist is null", subList); assertEquals("selected option 0 param 'option'", "a1", paramMap.get("option")); assertNull("selected option 0 dropped param 'title'", paramMap.get("title")); assertEquals("selected option 0 params size", 2, paramMap.size()); assertEquals("selected option 0 overriden param 'parent_param'", true, paramMap.get("parent_param")); subList = options.select(list.get(1)); paramMap = options.getSelectedParams(); assertNull("selected option 1 sublist is null", subList); assertEquals("selected option 1 param 'option'", "a2", paramMap.get("option")); assertNull("selected option 1 dropped param 'title'", paramMap.get("title")); assertEquals("selected option 1 params size", 2, paramMap.size()); assertEquals("selected option 1 inherited param 'parent_param'", false, paramMap.get("parent_param")); subList = options.select(list.get(2)); paramMap = options.getSelectedParams(); assertNull("selected option 2 sublist is null", subList); assertEquals("selected option 2 param 'option'", "a3", paramMap.get("option")); assertEquals("selected option 2 params size", 4, paramMap.size()); assertEquals("selected option 2 param 'titles'", "a3", paramMap.get("titles")); assertEquals("selected option 2 inherited param 'parent_param'", false, paramMap.get("parent_param")); subList = options.select(list.get(3)); paramMap = options.getSelectedParams(); assertNull("selected option 3 sublist is null", subList); assertEquals("selected option 3 params size", 1, paramMap.size()); assertEquals("selected option 3 inherited param 'parent_param'", false, paramMap.get("parent_param")); } @Test public void testOptionParameterOverrides() throws Exception { URL url = Thread.currentThread().getContextClassLoader().getResource("at/alladin/rmbt/util/model/option/test3.json"); File file = new File(url.getPath()); ServerOptionContainer options = new ServerOptionContainer( new ArrayList<>(Arrays.asList(ServerOption.getGson().fromJson(new FileReader(file), ServerOption.class)))); final List<ServerOption> list = options.getRootOptions(); List<ServerOption> sublist = options.select(list.get(0)); assertEquals("option 0 select param size", 0, options.getSelectedParams().size()); sublist = options.select(sublist.get(0)); assertEquals("option 0->0 select param size", 2, options.getSelectedParams().size()); assertEquals("option 0->0 param 'option'", "a1", options.getSelectedParams().get("option")); sublist = options.select(sublist.get(0)); assertEquals("option 0->0->0 select param size", 2, options.getSelectedParams().size()); assertEquals("option 0->0->0 override param 'option'", "a2", options.getSelectedParams().get("option")); sublist = options.select(sublist.get(0)); assertEquals("option 0->0->0->0 select param size", 2, options.getSelectedParams().size()); assertEquals("option 0->0->0->0 override param 'option'", "a3", options.getSelectedParams().get("option")); sublist = options.select(sublist.get(0)); assertEquals("option 0->0->0->0->0 select param size", 2, options.getSelectedParams().size()); assertEquals("option 0->0->0->0->0 override param 'option'", "a3", options.getSelectedParams().get("option")); }
Calculator { public int add(int a, int b) { return a + b; } int add(int a, int b); int multiply(int a, int b); }
@Test public void add() throws Exception { assertThat(underTest.add(3, 3), is(6)); }
PrebuiltRuntimeImageBuildStep extends RuntimeImageBuildStep { @Override protected Artifact getArtifact(BuildContext buildContext) throws BuildStepException { String providedArtifactPath = buildContext.getRuntimeConfig().getArtifact(); if (providedArtifactPath != null) { return Artifact.fromPath(buildContext.getWorkspaceDir().resolve(providedArtifactPath)); } List<Path> artifacts; try { if (Artifact.isAnArtifact(buildContext.getWorkspaceDir())) { Artifact rootArtifact = Artifact.fromPath(buildContext.getWorkspaceDir()); ArtifactType artifactType = rootArtifact.getType(); if (artifactType == EXPLODED_WAR || artifactType == COMPAT_EXPLODED_WAR) { return rootArtifact; } } artifacts = Files.list(buildContext.getWorkspaceDir()) .filter((path) -> !Files.isDirectory(path)) .filter(Artifact::isAnArtifact) .collect(Collectors.toList()); } catch (IOException e) { throw new BuildStepException(e); } if (artifacts.size() < 1) { throw new ArtifactNotFoundException(); } else if (artifacts.size() > 1) { throw new TooManyArtifactsException(artifacts); } else { return Artifact.fromPath(artifacts.get(0)); } } @Inject PrebuiltRuntimeImageBuildStep(JdkServerLookup jdkServerLookup, @CompatDockerImage String compatImageName); }
@Test(expected = BuildStepException.class) public void testSingleJarArtifactWithCustomJdkAndServerOption() throws IOException, BuildStepException { Path workspace = new TestWorkspaceBuilder() .file("foo.jar").build() .build(); RuntimeConfig runtimeConfig = new RuntimeConfig(); runtimeConfig.setJdk("custom_jdk"); runtimeConfig.setServer("custom_server"); AppYaml appYaml = new AppYaml(); appYaml.setRuntimeConfig(runtimeConfig); BuildContext buildContext = new BuildContext(appYaml, workspace, false); assertEquals(workspace.resolve("foo.jar"), prebuiltRuntimeImageBuildStep.getArtifact(buildContext).getPath()); prebuiltRuntimeImageBuildStep.run(buildContext); } @Test(expected = ArtifactNotFoundException.class) public void testUnrecognizedArtifact() throws IOException, BuildStepException { String artifact = "foo.xyz"; Path workspace = new TestWorkspaceBuilder() .file(artifact).build() .build(); BuildContext mockContext = mock(BuildContext.class); when(mockContext.getRuntimeConfig()).thenReturn(new RuntimeConfig()); when(mockContext.getWorkspaceDir()).thenReturn(workspace); assertEquals(artifact, prebuiltRuntimeImageBuildStep.getArtifact(mockContext)); prebuiltRuntimeImageBuildStep.run(mockContext); }
AppYamlFinder { public Optional<Path> findAppYamlFile(Path searchDir) { Preconditions.checkArgument(Files.isDirectory(searchDir)); if (providedConfigPath.isPresent()) { Optional<Path> providedAppYaml = providedConfigPath .map(searchDir::resolve) .filter(this::isValidFilePath); if (!providedAppYaml.isPresent()) { logger.warn("A yaml configuration file was expected, but none was found at the provided " + "path: {}. Proceeding with default configuration values.", providedConfigPath.get()); } return providedAppYaml; } else { return DEFAULT_APP_YAML_LOCATIONS.stream() .map(pathName -> searchDir.resolve(pathName)) .filter(this::isValidFilePath) .findFirst(); } } @Inject @VisibleForTesting AppYamlFinder(@ConfigYamlPath Optional<String> providedConfigPath); Optional<Path> findAppYamlFile(Path searchDir); }
@Test public void testAppYamlAtSrcMainNoEnvVar() throws IOException { String yamlPath = "src/main/appengine/app.yaml"; Path workspace = new TestWorkspaceBuilder() .file(yamlPath).build() .build(); Optional<Path> result = new AppYamlFinder(Optional.empty()).findAppYamlFile(workspace); assertEquals(workspace.resolve(yamlPath), result.get()); } @Test public void testAppYamlWithEnvVar() throws IOException { String pathFromEnvVar = "somedir/arbitraryfile"; Path workspace = new TestWorkspaceBuilder() .file("app.yaml").build() .file(pathFromEnvVar).build() .build(); Optional<Path> result = new AppYamlFinder(Optional.of(pathFromEnvVar)).findAppYamlFile(workspace); assertEquals(workspace.resolve(pathFromEnvVar), result.get()); } @Test public void testAppYamlWithInvalidEnvVar() throws IOException { String appYamlDefaultPath = "app.yaml"; Path workspace = new TestWorkspaceBuilder() .file(appYamlDefaultPath).build() .build(); Optional<Path> result = new AppYamlFinder(Optional.of("path/does/not/exist")).findAppYamlFile(workspace); assertEquals(Optional.empty(), result); } @Test public void testDirectoryAsAppYaml() throws IOException { Path workspace = new TestWorkspaceBuilder().build(); Optional<Path> result = new AppYamlFinder(Optional.empty()).findAppYamlFile(workspace); assertFalse(result.isPresent()); } @Test public void testAppYamlNotPresent() throws IOException { Path workspace = new TestWorkspaceBuilder() .file("other.yaml").build() .build(); Optional<Path> result = new AppYamlFinder(Optional.empty()).findAppYamlFile(workspace); assertFalse(result.isPresent()); } @Test public void testAppYamlAtRootNoEnvVar() throws IOException { String yamlPath = "app.yaml"; Path workspace = new TestWorkspaceBuilder() .file(yamlPath).build() .build(); Optional<Path> result = new AppYamlFinder(Optional.empty()).findAppYamlFile(workspace); assertEquals(workspace.resolve(yamlPath), result.get()); }
Artifact { public static boolean isAnArtifact(Path path) { try { fromPath(path); return true; } catch (IllegalArgumentException e) { return false; } } Artifact(ArtifactType type, Path path); static Artifact fromPath(Path path); Path getPath(); ArtifactType getType(); static boolean isAnArtifact(Path path); @Override String toString(); }
@Test public void testInvalidArtifacts() throws IOException { for (Path p : invalidArtifacts) { assertFalse(Artifact.isAnArtifact(p)); } }
Artifact { public static Artifact fromPath(Path path) { String extension = com.google.common.io.Files.getFileExtension(path.toString()); if (Files.exists(path.resolve("WEB-INF"))) { if (Files.exists(path.resolve("WEB-INF/appengine-web.xml"))) { return new Artifact(ArtifactType.COMPAT_EXPLODED_WAR, path); } return new Artifact(ArtifactType.EXPLODED_WAR, path); } else if (extension.equalsIgnoreCase("war")) { return new Artifact(ArtifactType.WAR, path); } else if (extension.equalsIgnoreCase("jar")) { return new Artifact(ArtifactType.JAR, path); } else { throw new IllegalArgumentException("The file at path " + path + " is not a valid Java " + "artifact. Expected a JAR, WAR, or exploded WAR artifact"); } } Artifact(ArtifactType type, Path path); static Artifact fromPath(Path path); Path getPath(); ArtifactType getType(); static boolean isAnArtifact(Path path); @Override String toString(); }
@Test public void testFromPathInvalidArtifact() { int thrown = 0; for (Path p : invalidArtifacts) { try { Artifact.fromPath(p); } catch (IllegalArgumentException e) { thrown++; } } assertEquals(invalidArtifacts.size(), thrown); }
JdkServerLookup { public String lookupJdkImage(String jdk) { String image; if (jdk == null) { image = this.jdkRuntimeMap.get(KEY_WILDCARD); } else { image = this.jdkRuntimeMap.get(jdk); } if (image == null) { throw new IllegalArgumentException( String.format("The provided runtime_config.jdk option '%s'" + " is invalid for JAR deployments. Please use a supported jdk option: %s", Strings.nullToEmpty(jdk), getAvailableJdks())); } return image; } JdkServerLookup(String[] jdkRuntimeMap, String[] serverRuntimeMap); Set<String> getAvailableJdks(); Set<String> getAvailableJdkServerPairs(); String lookupJdkImage(String jdk); String lookupServerImage(String jdk, String serverType); static final String KEY_WILDCARD; static final String KEY_DELIMITER; }
@Test public void testLookupJdkImageDefault() { assertEquals("defaultjdk", jdkServerLookup.lookupJdkImage(null)); } @Test public void testLookupJdkImageNonDefault() { assertEquals("jdk:old", jdkServerLookup.lookupJdkImage("oldjdk")); } @Test(expected = IllegalArgumentException.class) public void testLookupJdkImageNonexistent() { assertNull(jdkServerLookup.lookupJdkImage("invalid_jdk")); jdkServerLookupMergedDefaultSettings.lookupJdkImage("invalid_jdk"); }
JdkServerLookup { public String lookupServerImage(String jdk, String serverType) { String image = this.serverRuntimeMap.get(buildServerMapKey(jdk, serverType)); if (image == null) { throw new IllegalArgumentException(String.format("The provided runtime_config.jdk and " + "runtime_config.server configuration (runtime_config.jdk: '%s', " + "runtime_config.server: '%s') is invalid for WAR " + "deployments. Please use a supported " + "jdk/server combination: %s", Strings.nullToEmpty(jdk), Strings.nullToEmpty(serverType), getAvailableJdkServerPairs())); } return image; } JdkServerLookup(String[] jdkRuntimeMap, String[] serverRuntimeMap); Set<String> getAvailableJdks(); Set<String> getAvailableJdkServerPairs(); String lookupJdkImage(String jdk); String lookupServerImage(String jdk, String serverType); static final String KEY_WILDCARD; static final String KEY_DELIMITER; }
@Test public void testLookupServerImageDefaultServerAndJdk() { assertEquals("bothdefaults", jdkServerLookup.lookupServerImage(null, null)); } @Test public void testLookupServerImageDefaultServerAndNonDefaultJdk() { assertEquals("newjdk:defaultserver", jdkServerLookup.lookupServerImage("newjdk", null)); } @Test public void testLookupServerImageNonDefaultServerDefaultJdk() { assertEquals("defaultjdk:server1", jdkServerLookup.lookupServerImage(null, "server1")); } @Test public void testLookupServerImageNonDefaultServerAndNonDefaultJdk() { assertEquals("server1:old", jdkServerLookup.lookupServerImage("oldjdk", "server1")); } @Test(expected = IllegalArgumentException.class) public void testLookupServerImageInvalidJdk() { assertNull(jdkServerLookup.lookupServerImage("invalid_jdk", null)); jdkServerLookupMergedDefaultSettings.lookupServerImage("invalid_jdk", null); } @Test(expected = IllegalArgumentException.class) public void testLookupServerImageInvalidServer() { assertNull(jdkServerLookup.lookupServerImage(null, "invalid_server")); jdkServerLookupMergedDefaultSettings.lookupServerImage(null, "invalid_server"); }
BuildContext { public boolean isSourceBuild() { try { return !disableSourceBuild && (!Strings.isNullOrEmpty( appYaml.getRuntimeConfig().getBuildScript()) || getBuildTool().isPresent()); } catch (IOException e) { throw new RuntimeException(e); } } @Inject @VisibleForTesting BuildContext(@Assisted AppYaml appYaml, @Assisted Path workspaceDir, @DisableSourceBuild boolean disableSourceBuild); RuntimeConfig getRuntimeConfig(); Path getWorkspaceDir(); StringLineAppender getDockerfile(); StringLineAppender getDockerignore(); Optional<Path> getBuildArtifactLocation(); void setBuildArtifactLocation(Optional<Path> buildArtifactLocation); boolean isSourceBuild(); boolean isCompatEnabled(); void writeDockerResources(); Optional<BuildTool> getBuildTool(); }
@Test public void testIsSourceBuildWithBuildScript() { runtimeConfig.setBuildScript("build script"); assertTrue(initBuildContext().isSourceBuild()); } @Test public void testIsSourceBuildWithPomXmlPresent() throws IOException { workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .build(); assertTrue(initBuildContext().isSourceBuild()); } @Test public void testIsSourceBuildWithBuildGradlePresent() throws IOException { workspace = new TestWorkspaceBuilder() .file("build.gradle").build() .build(); assertTrue(initBuildContext().isSourceBuild()); } @Test public void testIsSourceBuildDisabled() throws IOException { disableSourceBuild = true; workspace = new TestWorkspaceBuilder() .file("build.gradle").build() .build(); assertFalse(initBuildContext().isSourceBuild()); }
MavenBuildStep implements BuildStep { @Override public void run(BuildContext buildContext) throws BuildStepException { buildContext.getDockerfile() .appendLine("FROM " + mavenDockerImage + " as " + DOCKERFILE_BUILD_STAGE) .appendLine("ADD . .") .appendLine("RUN " + getMavenExecutable(buildContext) + " -B -DskipTests clean install") .appendLine(); buildContext.setBuildArtifactLocation(Optional.of(Paths.get("target"))); } @Inject MavenBuildStep(@MavenDockerImage String mavenDockerImage); @Override void run(BuildContext buildContext); }
@Test public void testRunWithWrapper() throws IOException, BuildStepException { Path workspace = new TestWorkspaceBuilder() .file("mvnw").setIsExecutable(true).build() .build(); when(buildContext.getWorkspaceDir()).thenReturn(workspace); mavenBuildStep.run(buildContext); assertBuild(); assertTrue(dockerfileBuilder.toString().contains("RUN ./mvnw " + "-B -DskipTests clean install\n")); } @Test public void testRunWithSystemGradle() throws IOException, BuildStepException { Path workspace = new TestWorkspaceBuilder() .build(); when(buildContext.getWorkspaceDir()).thenReturn(workspace); mavenBuildStep.run(buildContext); assertBuild(); assertTrue(dockerfileBuilder.toString().contains("RUN mvn -B -DskipTests clean install\n")); }
BuildContext { public Optional<BuildTool> getBuildTool() throws IOException { return Files.list(workspaceDir) .filter((path) -> Files.isRegularFile(path)) .filter(BuildTool::isABuildFile) .sorted(Comparator.comparing(BuildTool::getForBuildFile)) .findFirst() .map(BuildTool::getForBuildFile); } @Inject @VisibleForTesting BuildContext(@Assisted AppYaml appYaml, @Assisted Path workspaceDir, @DisableSourceBuild boolean disableSourceBuild); RuntimeConfig getRuntimeConfig(); Path getWorkspaceDir(); StringLineAppender getDockerfile(); StringLineAppender getDockerignore(); Optional<Path> getBuildArtifactLocation(); void setBuildArtifactLocation(Optional<Path> buildArtifactLocation); boolean isSourceBuild(); boolean isCompatEnabled(); void writeDockerResources(); Optional<BuildTool> getBuildTool(); }
@Test public void testGetBuildToolWithNone() throws IOException { assertFalse(initBuildContext().getBuildTool().isPresent()); } @Test public void testGetBuildToolWithMavenAndGradle() throws IOException { workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .file("build.gradle").build() .build(); assertEquals(BuildTool.MAVEN, initBuildContext().getBuildTool().get()); } @Test public void testGetBuildToolWithMaven() throws IOException { workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .build(); assertEquals(BuildTool.MAVEN, initBuildContext().getBuildTool().get()); } @Test public void testGetBuildToolWithGradle() throws IOException { workspace = new TestWorkspaceBuilder() .file("build.gradle").build() .build(); assertEquals(BuildTool.GRADLE, initBuildContext().getBuildTool().get()); }
BuildContext { public boolean isCompatEnabled() { return appYaml.getBetaSettings().isEnableAppEngineApis(); } @Inject @VisibleForTesting BuildContext(@Assisted AppYaml appYaml, @Assisted Path workspaceDir, @DisableSourceBuild boolean disableSourceBuild); RuntimeConfig getRuntimeConfig(); Path getWorkspaceDir(); StringLineAppender getDockerfile(); StringLineAppender getDockerignore(); Optional<Path> getBuildArtifactLocation(); void setBuildArtifactLocation(Optional<Path> buildArtifactLocation); boolean isSourceBuild(); boolean isCompatEnabled(); void writeDockerResources(); Optional<BuildTool> getBuildTool(); }
@Test public void testIsForceCompatRuntimeWithBetaSettingsFalse() { betaSettings.setEnableAppEngineApis(false); assertFalse(initBuildContext().isCompatEnabled()); } @Test public void testIsForceCompatRuntimeWithBetaSettingsTrue() { betaSettings.setEnableAppEngineApis(true); assertTrue(initBuildContext().isCompatEnabled()); }
RootModule extends AbstractModule { @Provides protected JdkServerLookup provideJdkServerLookup() throws IOException { return this.jdkServerLookup; } RootModule(JdkServerLookup jdkServerLookup, String compatImage, String mavenDockerImage, String gradleDockerImage, boolean disableSourceBuild, Map<String, Object> commandLineOverrideSettings); RootModule(JdkServerLookup jdkServerLookup, String compatImage, String mavenDockerImage, String gradleDockerImage, boolean disableSourceBuild); }
@Test(expected = IllegalArgumentException.class) public void testProvideJdkServerLookupMissingServerDefault() throws IOException { String[] jdkMappings = {"*=gcr.io/foo"}; String[] serverMappings = {"foo=gcr.io/foo"}; new RootModule(Application.mergeSettingsWithDefaults(serverMappings, jdkMappings), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD) .provideJdkServerLookup(); } @Test(expected = IllegalArgumentException.class) public void testProvideJdkServerLookupBadArgFormat() throws IOException { String[] jdkMappings = {"*=gcr.io/foo"}; String[] serverMappings = {"foo=gcr.io/foo=bar"}; new RootModule(Application.mergeSettingsWithDefaults(serverMappings, jdkMappings), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD) .provideJdkServerLookup(); } @Test public void testProvideJdkServerLookup() throws IOException { String[] jdkMappings = { "*=gcr.io/jdk:latest", "someJdk=gcr.io/jdk:other" }; String[] serverMappings = { "*|*=gcr.io/server:latest", "key1|*=gcr.io/server:version" }; JdkServerLookup jdkServerLookup = new RootModule(Application.mergeSettingsWithDefaults(jdkMappings, serverMappings), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD) .provideJdkServerLookup(); assertEquals("gcr.io/jdk:latest", jdkServerLookup.lookupJdkImage(null)); assertEquals("gcr.io/jdk:other", jdkServerLookup.lookupJdkImage("someJdk")); assertEquals("gcr.io/server:latest", jdkServerLookup.lookupServerImage(null, null)); assertEquals("gcr.io/server:version", jdkServerLookup.lookupServerImage("key1", null)); } @Test public void testDefaultSettingsNoCommandLineGiven() throws IOException { JdkServerLookup jdkServerLookup = new RootModule(Application.mergeSettingsWithDefaults(null, null), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD, Collections.emptyMap()) .provideJdkServerLookup(); assertEquals("gcr.io/google-appengine/jetty:9", jdkServerLookup.lookupServerImage("*", "*")); assertEquals("gcr.io/google-appengine/tomcat:8", jdkServerLookup.lookupServerImage("openjdk8", "tomcat")); assertEquals("gcr.io/google-appengine/tomcat:latest", jdkServerLookup.lookupServerImage("*", "tomcat")); assertEquals("gcr.io/google-appengine/openjdk:8", jdkServerLookup.lookupJdkImage("*")); assertEquals("gcr.io/google-appengine/openjdk:8", jdkServerLookup.lookupJdkImage("openjdk8")); assertEquals("gcr.io/google-appengine/openjdk:9", jdkServerLookup.lookupJdkImage("openjdk9")); } @Test public void testDefaultSettingsPartialCommandLineGiven() throws IOException { String[] jdkMappings = {"*=gcr.io/jdk:latest"}; String[] serverMappings = {"*|*=gcr.io/server:latest", " openjdk8 | tomcat = gcr.io/google-appengine/tomcat:8-many-spaces"}; JdkServerLookup jdkServerLookup = new RootModule(Application.mergeSettingsWithDefaults(jdkMappings, serverMappings), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD, Collections.emptyMap()) .provideJdkServerLookup(); assertEquals("gcr.io/server:latest", jdkServerLookup.lookupServerImage("*", "*")); assertEquals("gcr.io/google-appengine/tomcat:8-many-spaces", jdkServerLookup.lookupServerImage("openjdk8", "tomcat")); assertEquals("gcr.io/google-appengine/tomcat:latest", jdkServerLookup.lookupServerImage("*", "tomcat")); assertEquals("gcr.io/jdk:latest", jdkServerLookup.lookupJdkImage("*")); assertEquals("gcr.io/google-appengine/openjdk:8", jdkServerLookup.lookupJdkImage("openjdk8")); assertEquals("gcr.io/google-appengine/openjdk:9", jdkServerLookup.lookupJdkImage("openjdk9")); }
RootModule extends AbstractModule { @Override protected void configure() { bind(new TypeLiteral<Optional<String>>(){}) .annotatedWith(ConfigYamlPath.class) .toInstance(Optional.ofNullable(System.getenv(CONFIG_YAML_ENV_VAR))); bind(new TypeLiteral<Map<String, Object>>() { }) .annotatedWith(CommandLineOverrideSettings.class) .toInstance(commandLineOverrideSettings); bind(String.class) .annotatedWith(CompatDockerImage.class) .toInstance(compatImage); bind(String.class) .annotatedWith(MavenDockerImage.class) .toInstance(mavenDockerImage); bind(String.class) .annotatedWith(GradleDockerImage.class) .toInstance(gradleDockerImage); bind(Boolean.class) .annotatedWith(DisableSourceBuild.class) .toInstance(disableSourceBuild); bind(new TypeLiteral<YamlParser<AppYaml>>(){}) .to(AppYamlParser.class); bind(AppYamlFinder.class); install(new FactoryModuleBuilder() .build(BuildStepFactory.class)); install(new FactoryModuleBuilder() .build(BuildContextFactory.class)); } RootModule(JdkServerLookup jdkServerLookup, String compatImage, String mavenDockerImage, String gradleDockerImage, boolean disableSourceBuild, Map<String, Object> commandLineOverrideSettings); RootModule(JdkServerLookup jdkServerLookup, String compatImage, String mavenDockerImage, String gradleDockerImage, boolean disableSourceBuild); }
@Test public void testConfigure() { String[] jdkMappings = {"*=gcr.io/jdk:latest"}; String[] serverMappings = {"*|*=gcr.io/server:latest"}; Guice.createInjector( new RootModule(Application.mergeSettingsWithDefaults(jdkMappings, serverMappings), COMPAT_IMAGE, MVN_IMAGE, GRADLE_IMAGE, DISABLE_BUILD)) .getInstance(BuildPipelineConfigurator.class); }
BuildPipelineConfigurator { public void generateDockerResources(Path workspaceDir) throws BuildStepException, IOException { BuildContext buildContext = configureBuildContext(workspaceDir); List<BuildStep> steps = new ArrayList<>(); if (buildContext.isSourceBuild()) { String buildScript = buildContext.getRuntimeConfig().getBuildScript(); if (!Strings.isNullOrEmpty(buildScript)) { steps.add(buildStepFactory.createScriptExecutionBuildStep(buildScript)); } else { buildContext.getBuildTool() .ifPresent(buildTool -> steps.add(getBuildStepForTool(buildTool))); } steps.add(buildStepFactory.createSourceBuildRuntimeImageStep()); } else { steps.add(buildStepFactory.createPrebuiltRuntimeImageBuildStep()); } steps.add(buildStepFactory.createJettyOptionsBuildStep()); for (BuildStep step : steps) { step.run(buildContext); } buildContext.writeDockerResources(); } @Inject BuildPipelineConfigurator(YamlParser<AppYaml> appYamlParser, AppYamlFinder appYamlFinder, BuildStepFactory buildStepFactory, BuildContextFactory buildContextFactory, @CommandLineOverrideSettings Map<String, Object> overrideSettings); void generateDockerResources(Path workspaceDir); }
@Test public void testPrebuiltArtifact() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("foo.war").build() .build(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createPrebuiltRuntimeImageBuildStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), prebuiltRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testMavenSourceBuild() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .build(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createMavenBuildStep(); verify(buildStepFactory, times(1)).createSourceBuildRuntimeImageStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), mavenBuildStep, sourceBuildRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testGradleSourceBuild() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("build.gradle").build() .build(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createGradleBuildStep(); verify(buildStepFactory, times(1)).createSourceBuildRuntimeImageStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), gradleBuildStep, sourceBuildRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testMavenAndGradleSourceBuild() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .file("build.gradle").build() .build(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createMavenBuildStep(); verify(buildStepFactory, times(1)).createSourceBuildRuntimeImageStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), mavenBuildStep, sourceBuildRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testMavenBuildWithCustomScriptAndOverrides() throws BuildStepException, IOException { String customScript = "custom mvn goals"; Path workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .file("app.yaml").withContents( "runtime_config:\n" + " jdk: openjdk8\n" + " build_script: " + customScript).build() .build(); Path yamlPath = workspace.resolve("app.yaml"); when(appYamlFinder.findAppYamlFile(workspace)) .thenReturn(Optional.of(yamlPath)); appYaml = spy(new AppYamlParser().parse(yamlPath)); when(appYamlYamlParser.parse(yamlPath)).thenReturn(appYaml); buildPipelineConfigurator = new BuildPipelineConfigurator(appYamlYamlParser, appYamlFinder, buildStepFactory, buildContextFactory, ImmutableMap.of("jdk", "fakeJdk")); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createScriptExecutionBuildStep(eq(customScript)); verify(buildStepFactory, times(1)).createSourceBuildRuntimeImageStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); RuntimeConfig expectedConfig = new RuntimeConfig(); expectedConfig.setBuildScript(customScript); expectedConfig.setJdk("fakeJdk"); assertBuildStepsCalledWithRuntimeConfig(expectedConfig, scriptExecutionBuildStep, sourceBuildRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testPrebuiltArtifactAndMavenBuild() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .file("foo.war").build() .build(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createMavenBuildStep(); verify(buildStepFactory, times(1)).createSourceBuildRuntimeImageStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), mavenBuildStep, sourceBuildRuntimeImageBuildStep, jettyOptionsBuildStep); } @Test public void testAppYamlIsDockerignored() throws IOException, BuildStepException { String relativeAppYamlPath = "foo/bar/app.yaml"; Path workspace = new TestWorkspaceBuilder() .file(relativeAppYamlPath).withContents("env: flex").build() .file("app.jar").build() .build(); when(appYamlFinder.findAppYamlFile(workspace)) .thenReturn(Optional.of(workspace.resolve(relativeAppYamlPath))); buildPipelineConfigurator.generateDockerResources(workspace); List<String> dockerIgnoreLines = Files.readLines(workspace.resolve(".dockerignore").toFile(), Charset.defaultCharset()); assertTrue(dockerIgnoreLines.contains(relativeAppYamlPath)); } @Test public void testSourceBuildDisable() throws BuildStepException, IOException { Path workspace = new TestWorkspaceBuilder() .file("pom.xml").build() .file("foo.war").build() .build(); disableSourceBuild = true; buildPipelineConfigurator = initConfigurator(); buildPipelineConfigurator.generateDockerResources(workspace); verify(buildStepFactory, times(1)).createPrebuiltRuntimeImageBuildStep(); verify(buildStepFactory, times(1)).createJettyOptionsBuildStep(); verifyNoMoreInteractions(buildStepFactory); assertBuildStepsCalledWithRuntimeConfig(new RuntimeConfig(), prebuiltRuntimeImageBuildStep, jettyOptionsBuildStep); }
JettyOptionsBuildStep implements BuildStep { @Override public void run(BuildContext buildContext) throws BuildStepException { if (buildContext.getRuntimeConfig().getJettyQuickstart()) { buildContext.getDockerfile().appendLine(JETTY_QUICKSTART_COMMAND); } } @Override void run(BuildContext buildContext); }
@Test public void testNoJettyQuickstart() throws IOException, BuildStepException { BuildContext ctx = initBuildContext(new RuntimeConfig()); String dockerfileBefore = ctx.getDockerfile().toString(); JettyOptionsBuildStep buildStep = new JettyOptionsBuildStep(); buildStep.run(ctx); assertEquals(dockerfileBefore, ctx.getDockerfile().toString()); } @Test public void testWithJettyQuickstart() throws IOException, BuildStepException { RuntimeConfig runtimeConfig = new RuntimeConfig(); runtimeConfig.setJettyQuickstart(true); BuildContext ctx = initBuildContext(runtimeConfig); String dockerfileBefore = ctx.getDockerfile().toString(); JettyOptionsBuildStep buildStep = new JettyOptionsBuildStep(); buildStep.run(ctx); String expected = dockerfileBefore + JettyOptionsBuildStep.JETTY_QUICKSTART_COMMAND + "\n"; assertEquals(expected, ctx.getDockerfile().toString()); }
ScriptExecutionBuildStep implements BuildStep { @Override public void run(BuildContext buildContext) throws BuildStepException { buildContext.getDockerfile() .appendLine("FROM " + BUILD_IMAGE + " as " + Constants.DOCKERFILE_BUILD_STAGE) .appendLine("ADD . .") .appendLine("RUN " + buildCommand) .appendLine(); } @Inject ScriptExecutionBuildStep(@Assisted String buildCommand); @Override void run(BuildContext buildContext); }
@Test public void testRun() throws BuildStepException { String buildCommand = "echo $VAR; cd /dir; mvn package"; new ScriptExecutionBuildStep(buildCommand).run(buildContext); String dockerfileContents = dockerfileBuilder.toString(); assertTrue(dockerfileContents.contains("RUN " + buildCommand + '\n')); assertTrue(dockerfileContents.contains("FROM " + ScriptExecutionBuildStep.BUILD_IMAGE + " as builder\n")); }
GradleBuildStep implements BuildStep { @Override public void run(BuildContext buildContext) throws BuildStepException { buildContext.getDockerfile() .appendLine("FROM " + gradleImage + " as " + Constants.DOCKERFILE_BUILD_STAGE) .appendLine("ADD . .") .appendLine("RUN " + getGradleExecutable(buildContext) + " build") .appendLine(); buildContext.setBuildArtifactLocation(Optional.of(Paths.get("build/libs"))); } @Inject GradleBuildStep(@GradleDockerImage String gradleImage); @Override void run(BuildContext buildContext); }
@Test public void testRunWithWrapper() throws IOException, BuildStepException { Path workspace = new TestWorkspaceBuilder() .file("gradlew").setIsExecutable(true).build() .build(); when(buildContext.getWorkspaceDir()).thenReturn(workspace); gradleBuildStep.run(buildContext); assertBuild(); assertTrue(dockerfileBuilder.toString().contains("RUN ./gradlew build\n")); } @Test public void testRunWithSystemGradle() throws IOException, BuildStepException { Path workspace = new TestWorkspaceBuilder() .build(); when(buildContext.getWorkspaceDir()).thenReturn(workspace); gradleBuildStep.run(buildContext); assertBuild(); assertTrue(dockerfileBuilder.toString().contains("RUN gradle build\n")); }
Modis35DimKey { public Dimension getDimensionX() { return getDimension(xDimIndex); } Modis35DimKey(Dimension... dims); int findXDimensionIndex(); int findYDimensionIndex(); static int findStartIndexOfBandVariables(List<Dimension> dimensions); int getRank(); Dimension getDimensionX(); Dimension getDimensionY(); Dimension getDimension(int index); @Override boolean equals(Object o); @Override int hashCode(); }
@Test public void testGetDimensionX() throws Exception { final Dimension yDim = new Dimension("y", 256); final Dimension xDim = new Dimension("x", 512); assertSame(xDim, new Modis35DimKey(yDim, xDim).getDimensionX()); }
VirtualDirBz2 extends VirtualDir { @Override public void close() { if (extractDir != null) { FileUtils.deleteTree(extractDir); extractDir = null; } } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testBz2_getFile_invalidPath() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertGetFileInvalidPath(virtualDir); virtualDir.close(); } @Test public void testBz2_list() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertCorrectList(virtualDir); virtualDir.close(); } @Test public void testTar_list_invalidPath() throws IOException { final File testTar = getTestFile("MSA_Albedo_test.tar"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testTar); assertListInvalidPath(virtualDir); virtualDir.close(); } @Test public void testBz2_list_invalidPath() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertListInvalidPath(virtualDir); virtualDir.close(); } @Test public void testBz2_getInputStream() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertExpectedInputStream(virtualDir); virtualDir.close(); } @Test public void testBz2_getInputStream_invalidPath() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertInputStreamInvalidPath(virtualDir); virtualDir.close(); } @Test public void testBz2_getFile() throws IOException { final File testBz2 = getTestFile("MSA_Albedo_test.tar.bz2"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testBz2); assertExpectedFile(virtualDir); virtualDir.close(); } @Test public void testTar_getFile_invalidPath() throws IOException { final File testTar = getTestFile("MSA_Albedo_test.tar"); VirtualDirBz2 virtualDir = new VirtualDirBz2(testTar); assertGetFileInvalidPath(virtualDir); virtualDir.close(); }
VirtualDirBz2 extends VirtualDir { static File createTargetDirInTemp(String name) throws IOException { File tempDir = null; String tempDirName = System.getProperty("java.io.tmpdir"); if (tempDirName != null) { tempDir = new File(tempDirName); } if (tempDir == null) { tempDir = new File(new File(System.getProperty("user.home", ".")), ".beam/temp"); if (!tempDir.exists()) { if (!tempDir.mkdirs()) { throw new IOException("unable to create directory: " + tempDir.getAbsolutePath()); } } } File targetDir = new File(tempDir, name); if (!targetDir.exists()) { if (!targetDir.mkdirs()) { throw new IOException("unable to create directory: " + targetDir.getAbsolutePath()); } } return targetDir; } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testCreateTargetDirInTemp_fromSystemPropertyTmpDir() throws IOException { final String tempDirName = System.getProperty("java.io.tmpdir"); assertNotNull(tempDirName); File dirInTemp = null; try { dirInTemp = VirtualDirBz2.createTargetDirInTemp("wurst"); assertNotNull(dirInTemp); assertTrue(dirInTemp.isDirectory()); assertEquals(new File(tempDirName, "wurst").getAbsolutePath(), dirInTemp.getAbsolutePath()); } finally { if (dirInTemp != null) { FileUtils.deleteTree(dirInTemp); } } } @Test public void testCreateTargetDirInTemp_fromSystemPropertyUserHome() throws IOException { final String oldTempDir = System.getProperty("java.io.tmpdir"); System.clearProperty("java.io.tmpdir"); final String userHome = System.getProperty("user.home"); assertNotNull(userHome); File dirInTemp = null; try { dirInTemp = VirtualDirBz2.createTargetDirInTemp("Schneck"); assertNotNull(dirInTemp); assertTrue(dirInTemp.isDirectory()); assertEquals(new File(userHome, ".beam/temp/Schneck").getAbsolutePath(), dirInTemp.getAbsolutePath()); } finally { System.setProperty("java.io.tmpdir", oldTempDir); if (dirInTemp != null) { FileUtils.deleteTree(dirInTemp); } } }
VirtualDirBz2 extends VirtualDir { static String getFilenameFromPath(String path) { int lastSepIndex = path.lastIndexOf("/"); if (lastSepIndex == -1) { lastSepIndex = path.lastIndexOf("\\"); if (lastSepIndex == -1) { return path; } } return path.substring(lastSepIndex + 1, path.length()); } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testGetFilenameFromPath_Windows() { final String fullPath = "C:\\bla\\blubber\\theFile.txt"; assertEquals("theFile.txt", VirtualDirBz2.getFilenameFromPath(fullPath)); final String relativePath = "bla\\schnuffi\\schnatter.txt"; assertEquals("schnatter.txt", VirtualDirBz2.getFilenameFromPath(relativePath)); } @Test public void testGetFilenameFromPath_Linux() { final String fullPath = "/bla/blubber/theFile.txt"; assertEquals("theFile.txt", VirtualDirBz2.getFilenameFromPath(fullPath)); final String relativePath = "bla/schnuffi/schnatter.txt"; assertEquals("schnatter.txt", VirtualDirBz2.getFilenameFromPath(relativePath)); } @Test public void testGetFilenameFromPath_notAPath() { final String file = "theFile.txt"; assertEquals(file, VirtualDirBz2.getFilenameFromPath(file)); }
VirtualDirBz2 extends VirtualDir { static boolean isTarOnly(String filename) { return (filename.endsWith(".tar")); } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testIsTarOnly() { assertTrue(VirtualDirBz2.isTarOnly("test_archive.tar")); assertFalse(VirtualDirBz2.isTarOnly("test_archive.tar.bz2")); assertFalse(VirtualDirBz2.isTarOnly("test_archive.exe")); assertFalse(VirtualDirBz2.isTarOnly("test_archive")); }
Modis35RasterDigest { static boolean rasterDimMatches(Modis35DimKey rasterDim, String rasterDimNameString) { String[] rasterDimNames = rasterDimNameString.split(","); if (rasterDim.getRank() == rasterDimNames.length) { boolean dimsMatch = true; for (int j = 0; j < rasterDim.getRank(); j++) { if (!rasterDim.getDimension(j).getShortName().equals(rasterDimNames[j])) { dimsMatch = false; } } if (dimsMatch) { return true; } } return false; } Modis35RasterDigest(Modis35DimKey rasterDim, Variable[] variables); Modis35DimKey getRasterDim(); Variable[] getRasterVariables(); static Modis35RasterDigest createRasterDigest(String rasterDimNames, final Group... groups); }
@Test public void testRasterDimMatches() throws Exception { final String rasterDimNames = "Cell_Along_Swath_1km,Cell_Across_Swath_1km,QA_Dimension"; Dimension dim1 = new Dimension("Cell_Along_Swath_1km", 10); Dimension dim2 = new Dimension("Cell_Across_Swath_1km", 20); Dimension dim3 = new Dimension("QA_Dimension", 30); Dimension[] dims = new Dimension[]{dim1, dim3, dim2}; Modis35DimKey rasterDim = new Modis35DimKey(dims); boolean result = Modis35RasterDigest.rasterDimMatches(rasterDim, rasterDimNames); assertFalse(result); dims = new Dimension[]{dim1, dim2}; rasterDim = new Modis35DimKey(dims); result = Modis35RasterDigest.rasterDimMatches(rasterDim, rasterDimNames); assertFalse(result); dims = new Dimension[]{dim2, dim1, dim3}; rasterDim = new Modis35DimKey(dims); result = Modis35RasterDigest.rasterDimMatches(rasterDim, rasterDimNames); assertFalse(result); dims = new Dimension[]{dim1, dim2, dim3}; rasterDim = new Modis35DimKey(dims); result = Modis35RasterDigest.rasterDimMatches(rasterDim, rasterDimNames); assertTrue(result); }
VirtualDirBz2 extends VirtualDir { static boolean isBz2Only(String filename) { return (filename.endsWith(".bz2") && !(filename.endsWith(".tar.bz2"))); } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testIsBz2Only() { assertTrue(VirtualDirBz2.isBz2Only("test_archive.bz2")); assertFalse(VirtualDirBz2.isBz2Only("test_archive.tar.bz2")); assertFalse(VirtualDirBz2.isBz2Only("test_archive.tar")); assertFalse(VirtualDirBz2.isBz2Only("test_archive.exe")); assertFalse(VirtualDirBz2.isBz2Only("test_archive")); }
VirtualDirBz2 extends VirtualDir { static boolean isTarBz2(String filename) { return (filename.endsWith(".tar.bz2")); } VirtualDirBz2(File bz2); @Override String getBasePath(); @Override InputStream getInputStream(String path); @Override File getFile(String path); @Override String[] list(String path); @Override void close(); @Override boolean isCompressed(); @Override boolean isArchive(); }
@Test public void testIsTarBz2() { assertTrue(VirtualDirBz2.isTarBz2("test_archive.tar.bz2")); assertFalse(VirtualDirBz2.isTarBz2("test_archive.tar")); assertFalse(VirtualDirBz2.isTarBz2("test_archive.bz2")); assertFalse(VirtualDirBz2.isTarBz2("test_archive")); }
MsgMSAProductReader extends AbstractProductReader { static boolean msgAlbedoFileNameMatches(String fileName) { if (!(fileName.matches(MSA_ALBEDO_HDF_FILENAME_REGEXP))) { throw new IllegalArgumentException("Input file name '" + fileName + "' does not match naming convention: 'HDF5_LSASAF_MSG_ALBEDO_<area>_yyyymmddhhmm'"); } return true; } protected MsgMSAProductReader(MsgMSAProductReaderPlugIn readerPlugIn); @Override void close(); }
@Test public void testMsgAlbedoFileNameMatches() { String filename = "bla.txt"; try { MsgMSAProductReader.msgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'HDF5_LSASAF_MSG_ALBEDO_<area>_yyyymmddhhmm'", e.getMessage()); } filename = "HDF5_LSASAF_MSG_ALBEDO_Euro_200601070000.jpg"; try { MsgMSAProductReader.msgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'HDF5_LSASAF_MSG_ALBEDO_<area>_yyyymmddhhmm'", e.getMessage()); } filename = "HDF5_LSASAF_MSG_ALBEDO_Euro_200601070000.dim"; try { MsgMSAProductReader.msgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'HDF5_LSASAF_MSG_ALBEDO_<area>_yyyymmddhhmm'", e.getMessage()); } filename = "HDF5_LSASAF_MSG_ALBEDO_Euro_200601070000.hdf"; assertEquals(true, MsgMSAProductReader.msgAlbedoFileNameMatches(filename)); }
MsgMSAProductReader extends AbstractProductReader { static String getRegionFromAlbedoInputFilename(String albedoInputFilename) { return albedoInputFilename.substring(23, 27); } protected MsgMSAProductReader(MsgMSAProductReaderPlugIn readerPlugIn); @Override void close(); }
@Test public void testGetRegionFromAlbedoInputFilename() throws Exception { String inputFilename = "HDF5_LSASAF_MSG_ALBEDO_Euro_200601070000.bz2"; assertEquals("Euro", MsgMSAProductReader.getRegionFromAlbedoInputFilename(inputFilename)); inputFilename = "HDF5_LSASAF_MSG_ALBEDO_NAfr_200601070000.bz2"; assertEquals("NAfr", MsgMSAProductReader.getRegionFromAlbedoInputFilename(inputFilename)); inputFilename = "HDF5_LSASAF_MSG_ALBEDO_SAfr_200601070000.bz2"; assertEquals("SAfr", MsgMSAProductReader.getRegionFromAlbedoInputFilename(inputFilename)); }
MsgMSAProductReaderPlugIn implements ProductReaderPlugIn { @Override public ProductReader createReaderInstance() { return new MsgMSAProductReader(this); } @Override DecodeQualification getDecodeQualification(Object input); static VirtualDir getInput(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static final String FORMAT_NAME_METEOSAT_MSA; }
@Test public void testCreateReaderInstanceReturnsNewInstanceEachTime() { ProductReader firstInstance = plugIn.createReaderInstance(); assertNotNull(firstInstance); ProductReader secondInstance = plugIn.createReaderInstance(); assertNotSame(secondInstance, firstInstance); }
MeteosatGeoCoding extends AbstractGeoCoding { @Override public GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos) { if (geoPos == null) { geoPos = new GeoPos(); } final int x = (int) Math.floor(pixelPos.x); final int y = (int) Math.floor(pixelPos.y); if (x >= 0 && y >= 0 && x < width && y < height) { int index = width * y + x; geoPos.setLocation(latData[index], lonData[index]); } else { geoPos.setInvalid(); } return geoPos; } MeteosatGeoCoding(Band latitude, Band longitude, String regionID); @Override GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos); @Override PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos); @Override boolean transferGeoCoding(Scene srcScene, Scene destScene, ProductSubsetDef subsetDef); @Override boolean isCrossingMeridianAt180(); @Override boolean canGetPixelPos(); @Override boolean canGetGeoPos(); @Override Datum getDatum(); @Override void dispose(); }
@Test public void testGetGeoPos() throws Exception { final MeteosatGeoCoding gc = createTestGC(); assertTrue(gc.canGetGeoPos()); assertFalse(gc.isCrossingMeridianAt180()); final GeoPos geoPos = new GeoPos(); assertSame(geoPos, gc.getGeoPos(new PixelPos(0.0F, 0.0F), geoPos)); assertEquals(new GeoPos(2.0F, 0.0F), geoPos); assertEquals(new GeoPos(2.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 0.0F), null)); assertEquals(new GeoPos(2.0F, 0.0F), gc.getGeoPos(new PixelPos(0.5F, 0.5F), null)); assertEquals(new GeoPos(1.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 1.0F), null)); assertEquals(new GeoPos(0.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 2.0F), null)); assertEquals(new GeoPos(1.0F, 1.0F), gc.getGeoPos(new PixelPos(1.0F, 1.0F), null)); assertEquals(new GeoPos(0.0F, 2.0F), gc.getGeoPos(new PixelPos(2.0F, 2.0F), null)); assertFalse(gc.getGeoPos(new PixelPos(1.0F, 0.0F), null).isValid()); assertFalse(gc.getGeoPos(new PixelPos(2.0F, 0.0F), null).isValid()); assertFalse(gc.getGeoPos(new PixelPos(2.0F, 1.0F), null).isValid()); }
MeteosatGeoCoding extends AbstractGeoCoding { @Override public PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos) { if (pixelPos == null) { pixelPos = new PixelPos(); } if (!initialized) { synchronized (this) { if (!initialized) { initialize(); initialized = true; } } } int si = getSuperLutI(geoPos.lon); int sj = getSuperLutJ(geoPos.lat); PixelBoxLut latLut = latSuperLut[sj][si]; PixelBoxLut lonLut = lonSuperLut[sj][si]; if (latLut == null || lonLut == null) { pixelPos.setInvalid(); return pixelPos; } final PixelBox latPixelBox = latLut.getPixelBox(geoPos.lat); final PixelBox lonPixelBox = lonLut.getPixelBox(geoPos.lon); if (latPixelBox == null || lonPixelBox == null) { pixelPos.setInvalid(); return pixelPos; } int x1 = Math.min(latPixelBox.minX, lonPixelBox.minX); int y1 = Math.min(latPixelBox.minY, lonPixelBox.minY); int x2 = Math.max(latPixelBox.maxX, lonPixelBox.maxX); int y2 = Math.max(latPixelBox.maxY, lonPixelBox.maxY); if (x1 == x2 && y1 == y2) { pixelPos.setLocation(x1 + 0.5F, y1 + 0.5F); return pixelPos; } int w = x2 - x1 + 1; int h = y2 - y1 + 1; if (w <= 0 || h <= 0) { pixelPos.setInvalid(); return pixelPos; } final MeteosatQuadTreeSearch.Result result = new MeteosatQuadTreeSearch.Result(); boolean pixelFound = mqts.search(0, geoPos.lat, geoPos.lon, x1, y1, w, h, result); if (pixelFound) { pixelPos.setLocation(result.getX() + 0.5f, result.getY() + 0.5f); } else { pixelPos.setInvalid(); } return pixelPos; } MeteosatGeoCoding(Band latitude, Band longitude, String regionID); @Override GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos); @Override PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos); @Override boolean transferGeoCoding(Scene srcScene, Scene destScene, ProductSubsetDef subsetDef); @Override boolean isCrossingMeridianAt180(); @Override boolean canGetPixelPos(); @Override boolean canGetGeoPos(); @Override Datum getDatum(); @Override void dispose(); }
@Test public void testGetPixelPos() throws Exception { final MeteosatGeoCoding gc = createTestGC(); assertTrue(gc.canGetPixelPos()); assertFalse(gc.isCrossingMeridianAt180()); final PixelPos pixelPos = new PixelPos(); assertSame(pixelPos, gc.getPixelPos(new GeoPos(0.0F, 0.0F), pixelPos)); assertEquals(new PixelPos(0.5F, 2.5F), pixelPos); assertEquals(new PixelPos(1.5F, 1.5F), gc.getPixelPos(new GeoPos(1.0F, 1.0F), pixelPos)); }
MisrProductReaderPlugIn implements ProductReaderPlugIn { @Override public ProductReader createReaderInstance() { return new MisrProductReader(this); } @Override DecodeQualification getDecodeQualification(Object input); static VirtualDir getInput(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static final String FORMAT_NAME_MISR; }
@Test public void testCreateReaderInstanceReturnsNewInstanceEachTime() { ProductReader firstInstance = plugIn.createReaderInstance(); assertNotNull(firstInstance); ProductReader secondInstance = plugIn.createReaderInstance(); assertNotSame(secondInstance, firstInstance); }
MisrGeoCoding extends AbstractGeoCoding { @Override public GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos) { if (geoPos == null) { geoPos = new GeoPos(); } final int x = (int) Math.floor(pixelPos.x); final int y = (int) Math.floor(pixelPos.y); if (x >= 0 && y >= 0 && x < width && y < height) { int index = width * y + x; geoPos.setLocation(latData[index], lonData[index]); } else { geoPos.setInvalid(); } return geoPos; } MisrGeoCoding(Band latitude, Band longitude, String regionID); @Override GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos); @Override PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos); @Override boolean transferGeoCoding(Scene srcScene, Scene destScene, ProductSubsetDef subsetDef); @Override boolean isCrossingMeridianAt180(); @Override boolean canGetPixelPos(); @Override boolean canGetGeoPos(); @Override Datum getDatum(); @Override void dispose(); }
@Test public void testGetGeoPos() throws Exception { final MisrGeoCoding gc = createTestGC(); assertTrue(gc.canGetGeoPos()); assertFalse(gc.isCrossingMeridianAt180()); final GeoPos geoPos = new GeoPos(); assertSame(geoPos, gc.getGeoPos(new PixelPos(0.0F, 0.0F), geoPos)); assertEquals(new GeoPos(2.0F, 0.0F), geoPos); assertEquals(new GeoPos(2.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 0.0F), null)); assertEquals(new GeoPos(2.0F, 0.0F), gc.getGeoPos(new PixelPos(0.5F, 0.5F), null)); assertEquals(new GeoPos(1.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 1.0F), null)); assertEquals(new GeoPos(0.0F, 0.0F), gc.getGeoPos(new PixelPos(0.0F, 2.0F), null)); assertEquals(new GeoPos(1.0F, 1.0F), gc.getGeoPos(new PixelPos(1.0F, 1.0F), null)); assertEquals(new GeoPos(0.0F, 2.0F), gc.getGeoPos(new PixelPos(2.0F, 2.0F), null)); assertFalse(gc.getGeoPos(new PixelPos(1.0F, 0.0F), null).isValid()); assertFalse(gc.getGeoPos(new PixelPos(2.0F, 0.0F), null).isValid()); assertFalse(gc.getGeoPos(new PixelPos(2.0F, 1.0F), null).isValid()); }
MisrGeoCoding extends AbstractGeoCoding { @Override public PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos) { if (pixelPos == null) { pixelPos = new PixelPos(); } if (!initialized) { synchronized (this) { if (!initialized) { initialize(); initialized = true; } } } int si = getSuperLutI(geoPos.lon); int sj = getSuperLutJ(geoPos.lat); PixelBoxLut latLut = latSuperLut[sj][si]; PixelBoxLut lonLut = lonSuperLut[sj][si]; if (latLut == null || lonLut == null) { pixelPos.setInvalid(); return pixelPos; } final PixelBox latPixelBox = latLut.getPixelBox(geoPos.lat); final PixelBox lonPixelBox = lonLut.getPixelBox(geoPos.lon); if (latPixelBox == null || lonPixelBox == null) { pixelPos.setInvalid(); return pixelPos; } int x1 = Math.min(latPixelBox.minX, lonPixelBox.minX); int y1 = Math.min(latPixelBox.minY, lonPixelBox.minY); int x2 = Math.max(latPixelBox.maxX, lonPixelBox.maxX); int y2 = Math.max(latPixelBox.maxY, lonPixelBox.maxY); if (x1 == x2 && y1 == y2) { pixelPos.setLocation(x1 + 0.5F, y1 + 0.5F); return pixelPos; } int w = x2 - x1 + 1; int h = y2 - y1 + 1; if (w <= 0 || h <= 0) { pixelPos.setInvalid(); return pixelPos; } final MisrQuadTreeSearch.Result result = new MisrQuadTreeSearch.Result(); boolean pixelFound = mqts.search(0, geoPos.lat, geoPos.lon, x1, y1, w, h, result); if (pixelFound) { pixelPos.setLocation(result.getX() + 0.5f, result.getY() + 0.5f); } else { pixelPos.setInvalid(); } return pixelPos; } MisrGeoCoding(Band latitude, Band longitude, String regionID); @Override GeoPos getGeoPos(PixelPos pixelPos, GeoPos geoPos); @Override PixelPos getPixelPos(GeoPos geoPos, PixelPos pixelPos); @Override boolean transferGeoCoding(Scene srcScene, Scene destScene, ProductSubsetDef subsetDef); @Override boolean isCrossingMeridianAt180(); @Override boolean canGetPixelPos(); @Override boolean canGetGeoPos(); @Override Datum getDatum(); @Override void dispose(); }
@Test public void testGetPixelPos() throws Exception { final MisrGeoCoding gc = createTestGC(); assertTrue(gc.canGetPixelPos()); assertFalse(gc.isCrossingMeridianAt180()); final PixelPos pixelPos = new PixelPos(); assertSame(pixelPos, gc.getPixelPos(new GeoPos(0.0F, 0.0F), pixelPos)); assertEquals(new PixelPos(0.5F, 2.5F), pixelPos); assertEquals(new PixelPos(1.5F, 1.5F), gc.getPixelPos(new GeoPos(1.0F, 1.0F), pixelPos)); }
MfgMSAProductReaderPlugIn implements ProductReaderPlugIn { @Override public ProductReader createReaderInstance() { return new MfgMSAProductReader(this); } @Override DecodeQualification getDecodeQualification(Object input); static VirtualDir getInput(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static final String FORMAT_NAME_METEOSAT_MSA; }
@Test public void testCreateReaderInstanceReturnsNewInstanceEachTime() { ProductReader firstInstance = plugIn.createReaderInstance(); assertNotNull(firstInstance); ProductReader secondInstance = plugIn.createReaderInstance(); assertNotSame(secondInstance, firstInstance); }
GMTED2010ElevationModelDescriptor extends AbstractElevationModelDescriptor { static String createTileFilename(int minLat, int minLon) { int lat = Math.abs(minLat); int lon = Math.abs(minLon); String latChar = minLat < 0 ? "S" : "N"; String lonChar = minLon < 0 ? "W" : "E"; return String.format("%s%03d/%d%s%03d%s_" + GMTED_MEA300_TIF, lonChar, lon, lat, latChar, lon, lonChar); } GMTED2010ElevationModelDescriptor(); @Override String getName(); @Override Datum getDatum(); @Override float getNoDataValue(); @Override boolean isDemInstalled(); @Override URL getDemArchiveUrl(); @Override ElevationModel createDem(Resampling resampling); File getTileFile(int lonIndex, int latIndex); static final String NAME; static final int NUM_X_TILES; static final int NUM_Y_TILES; static final int DEGREE_RES_X; static final int DEGREE_RES_Y; static final int TILE_WIDTH; static final int TILE_HEIGHT; static final int NO_DATA_VALUE; static final int RASTER_WIDTH; static final int RASTER_HEIGHT; static final Datum DATUM; }
@Test public void testCreateTileFilename() throws Exception { assertEquals("E000/10N000E" + GMTED_MEA300_TIF, createTileFilename(+10, -0)); assertEquals("E000/10S000E" + GMTED_MEA300_TIF, createTileFilename(-10, -0)); assertEquals("W180/10N180W" + GMTED_MEA300_TIF, createTileFilename(+10, -180)); assertEquals("W180/10S180W" + GMTED_MEA300_TIF, createTileFilename(-10, -180)); assertEquals("E150/10N150E" + GMTED_MEA300_TIF, createTileFilename(+10, +150)); assertEquals("E150/10S150E" + GMTED_MEA300_TIF, createTileFilename(-10, +150)); assertEquals("E000/70N000E" + GMTED_MEA300_TIF, createTileFilename(+70, -0)); assertEquals("E000/90S000E" + GMTED_MEA300_TIF, createTileFilename(-90, -0)); }
GMTED2010ElevationModelDescriptor extends AbstractElevationModelDescriptor { static int getMinLat(int latIndex) { return (NUM_Y_TILES - 1 - latIndex) * DEGREE_RES_Y - 90; } GMTED2010ElevationModelDescriptor(); @Override String getName(); @Override Datum getDatum(); @Override float getNoDataValue(); @Override boolean isDemInstalled(); @Override URL getDemArchiveUrl(); @Override ElevationModel createDem(Resampling resampling); File getTileFile(int lonIndex, int latIndex); static final String NAME; static final int NUM_X_TILES; static final int NUM_Y_TILES; static final int DEGREE_RES_X; static final int DEGREE_RES_Y; static final int TILE_WIDTH; static final int TILE_HEIGHT; static final int NO_DATA_VALUE; static final int RASTER_WIDTH; static final int RASTER_HEIGHT; static final Datum DATUM; }
@Test public void testgetMinLat() throws Exception { assertEquals(-90, GMTED2010ElevationModelDescriptor.getMinLat(8)); assertEquals(-70, GMTED2010ElevationModelDescriptor.getMinLat(7)); assertEquals(-50, GMTED2010ElevationModelDescriptor.getMinLat(6)); assertEquals(-30, GMTED2010ElevationModelDescriptor.getMinLat(5)); assertEquals(-10, GMTED2010ElevationModelDescriptor.getMinLat(4)); assertEquals(+10, GMTED2010ElevationModelDescriptor.getMinLat(3)); assertEquals(+30, GMTED2010ElevationModelDescriptor.getMinLat(2)); assertEquals(+50, GMTED2010ElevationModelDescriptor.getMinLat(1)); assertEquals(+70, GMTED2010ElevationModelDescriptor.getMinLat(0)); }
GMTED2010ElevationModelDescriptor extends AbstractElevationModelDescriptor { static int getMinLon(int lonIndex) { return lonIndex * DEGREE_RES_X - 180; } GMTED2010ElevationModelDescriptor(); @Override String getName(); @Override Datum getDatum(); @Override float getNoDataValue(); @Override boolean isDemInstalled(); @Override URL getDemArchiveUrl(); @Override ElevationModel createDem(Resampling resampling); File getTileFile(int lonIndex, int latIndex); static final String NAME; static final int NUM_X_TILES; static final int NUM_Y_TILES; static final int DEGREE_RES_X; static final int DEGREE_RES_Y; static final int TILE_WIDTH; static final int TILE_HEIGHT; static final int NO_DATA_VALUE; static final int RASTER_WIDTH; static final int RASTER_HEIGHT; static final Datum DATUM; }
@Test public void testgetMinLon() throws Exception { assertEquals(-180, GMTED2010ElevationModelDescriptor.getMinLon(0)); assertEquals(-150, GMTED2010ElevationModelDescriptor.getMinLon(1)); assertEquals(-120, GMTED2010ElevationModelDescriptor.getMinLon(2)); assertEquals(-90, GMTED2010ElevationModelDescriptor.getMinLon(3)); assertEquals(-60, GMTED2010ElevationModelDescriptor.getMinLon(4)); assertEquals(-30, GMTED2010ElevationModelDescriptor.getMinLon(5)); assertEquals(+0, GMTED2010ElevationModelDescriptor.getMinLon(6)); assertEquals(+30, GMTED2010ElevationModelDescriptor.getMinLon(7)); assertEquals(+60, GMTED2010ElevationModelDescriptor.getMinLon(8)); assertEquals(+90, GMTED2010ElevationModelDescriptor.getMinLon(9)); assertEquals(+120, GMTED2010ElevationModelDescriptor.getMinLon(10)); assertEquals(+150, GMTED2010ElevationModelDescriptor.getMinLon(11)); }
BbdrOp extends PixelOperator { static Matrix matrixSquare(double[] doubles) { Matrix matrix = new Matrix(doubles.length, doubles.length); for (int i = 0; i < doubles.length; i++) { for (int j = 0; j < doubles.length; j++) { matrix.set(i, j, doubles[i] * doubles[j]); } } return matrix; } }
@Test public void testMatrixSquare() { double[] in = {2, 4, 6}; Matrix squareMatrix = BbdrOp.matrixSquare(in); assertNotNull(squareMatrix); assertEquals(3, squareMatrix.getColumnDimension()); assertEquals(3, squareMatrix.getRowDimension()); assertEquals(4, squareMatrix.get(0,0), 1E-6); assertEquals(8, squareMatrix.get(1,0), 1E-6); assertEquals(12, squareMatrix.get(2,0), 1E-6); assertEquals(8, squareMatrix.get(0,1), 1E-6); assertEquals(16, squareMatrix.get(1,1), 1E-6); assertEquals(24, squareMatrix.get(2,1), 1E-6); assertEquals(12, squareMatrix.get(0,2), 1E-6); assertEquals(24, squareMatrix.get(1,2), 1E-6); assertEquals(36, squareMatrix.get(2,2), 1E-6); }
GasLookupTable { public float[] getTg(float amf, float gas) { int ind_amf = BbdrUtils.getIndexBefore(amf, amfArray); float amf_p = (amf - amfArray[ind_amf]) / (amfArray[ind_amf + 1] - amfArray[ind_amf]); int ind_gas = BbdrUtils.getIndexBefore(gas, gasArray); float gas_p = (gas - gasArray[ind_gas]) / (gasArray[ind_gas + 1] - gasArray[ind_gas]); float[] tg = new float[sensor.getNumBands()]; for (int iWvl = 0; iWvl < tg.length; iWvl++) { tg[iWvl] = (1.0f - amf_p) * (1.0f - gas_p) * lutGas[iWvl][ind_gas][ind_amf] + gas_p * (1.0f - amf_p) * lutGas[iWvl][ind_gas + 1][ind_amf] + (1.0f - gas_p) * amf_p * lutGas[iWvl][ind_gas][ind_amf + 1] + amf_p * gas_p * lutGas[iWvl][ind_gas + 1][ind_amf + 1]; } return tg; } GasLookupTable(Sensor sensor); void load(Product sourceProduct); float getGasMeanVal(); float[] getTg(float amf, float gas); float[][][] getKxTg(float amf, float gas); }
@Test public void testGetTg() { float[] tg = gasLookupTable.getTg(2.0f, 0.1f); assertEquals(0.9999f, tg[0], 1.E-4); assertEquals(0.9995f, tg[1], 1.E-4); tg = gasLookupTable.getTg(4.0f, 0.4f); assertEquals(0.9999f, tg[0], 1.E-4); assertEquals(0.9960f, tg[1], 1.E-4); }
GasLookupTable { public float[][][] getKxTg(float amf, float gas) { int ind_amf = BbdrUtils.getIndexBefore(amf, amfArray); float amf_p = (amf - amfArray[ind_amf]) / (amfArray[ind_amf + 1] - amfArray[ind_amf]); int ind_gas = BbdrUtils.getIndexBefore(gas, gasArray); float gas_p = (gas - gasArray[ind_gas]) / (gasArray[ind_gas + 1] - gasArray[ind_gas]); float[][][] kx_tg = new float[sensor.getNumBands()][2][2]; for (int iWvl = 0; iWvl < sensor.getNumBands(); iWvl++) { for (int iKxcase = 0; iKxcase < kx_tg[iWvl].length; iKxcase++) { for (int iKx = 0; iKx < kx_tg[iWvl][iKxcase].length; iKx++) { kx_tg[iWvl][iKxcase][iKx] = (1.0f - amf_p) * (1.0f - gas_p) * kxLutGas[iWvl][ind_gas][ind_amf][iKxcase][iKx] + gas_p * (1.0f - amf_p) * kxLutGas[iWvl][ind_gas + 1][ind_amf][iKxcase][iKx] + (1.0f - gas_p) * amf_p * kxLutGas[iWvl][ind_gas][ind_amf + 1][iKxcase][iKx] + amf_p * gas_p * kxLutGas[iWvl][ind_gas + 1][ind_amf + 1][iKxcase][iKx]; } } } return kx_tg; } GasLookupTable(Sensor sensor); void load(Product sourceProduct); float getGasMeanVal(); float[] getTg(float amf, float gas); float[][][] getKxTg(float amf, float gas); }
@Test public void testGetKxTg() { float[][][] kxtg = gasLookupTable.getKxTg(2.0f, 0.1f); assertEquals(-1.6943E-8f, kxtg[0][0][0], 1.E-4); assertEquals(5.07743E-8f, kxtg[0][0][1], 1.E-4); assertEquals(1.97026E-8f, kxtg[3][0][0], 1.E-4); assertEquals(-4.6359E-8f, kxtg[3][0][1], 1.E-4); }
GasLookupTable { static float[] convertAngArrayToAmfArray(float[] ang) { float[] geomAmf = new float[ang.length]; for (int i = 0; i < geomAmf.length; i++) { geomAmf[i] = (float) (2.0 / Math.cos(Math.toRadians(ang[i]))); } return geomAmf; } GasLookupTable(Sensor sensor); void load(Product sourceProduct); float getGasMeanVal(); float[] getTg(float amf, float gas); float[][][] getKxTg(float amf, float gas); }
@Test public void testConvertAngArrayToAmfArray() { float[] ang = new float[]{0.0f, 20.0f, 40.0f, 50.0f, 60.0f, 65.0f, 70.0f}; float[] amf = GasLookupTable.convertAngArrayToAmfArray(ang); assertEquals(2.0f, amf[0], 1.E-4); assertEquals(2.12836f, amf[1], 1.E-4); assertEquals(2.61081f, amf[2], 1.E-4); assertEquals(3.11145f, amf[3], 1.E-4); assertEquals(4.0f, amf[4], 1.E-4); assertEquals(4.7324f, amf[5], 1.E-4); assertEquals(5.84761f, amf[6], 1.E-4); }
BbdrUtils { public static int getIndexBefore(float value, float[] array) { for (int i = 0; i < array.length; i++) { if (value < array[i]) { if (i != 0) { return i - 1; } else { return i; } } } return (array.length - 2); } static AotLookupTable getAotLookupTable(Sensor sensor); static LookupTable getAotKxLookupTable(Sensor sensor); static NskyLookupTable getNskyLookupTableDw(Sensor sensor); static NskyLookupTable getNskyLookupTableUp(Sensor sensor); static int getIndexBefore(float value, float[] array); static float getImageMeanValue(PlanarImage image); static int getDoyFromYYYYMMDD(String yyyymmdd); }
@Test public void testGetIndexBefore() { float[] values = {1.8f, 2.2f, 4.5f, 5.5f}; assertEquals(0, BbdrUtils.getIndexBefore(1.2f, values)); assertEquals(1, BbdrUtils.getIndexBefore(2.5f, values)); assertEquals(2, BbdrUtils.getIndexBefore(4.6f, values)); assertEquals(2, BbdrUtils.getIndexBefore(7.7f, values)); }
BbdrUtils { public static int getDoyFromYYYYMMDD(String yyyymmdd) { Calendar cal = Calendar.getInstance(); int doy = -1; try { final int year = Integer.parseInt(yyyymmdd.substring(0, 4)); final int month = Integer.parseInt(yyyymmdd.substring(4, 6)) - 1; final int day = Integer.parseInt(yyyymmdd.substring(6, 8)); cal.set(year, month, day); doy = cal.get(Calendar.DAY_OF_YEAR); } catch (StringIndexOutOfBoundsException e) { e.printStackTrace(); } catch (NumberFormatException e) { e.printStackTrace(); } return doy; } static AotLookupTable getAotLookupTable(Sensor sensor); static LookupTable getAotKxLookupTable(Sensor sensor); static NskyLookupTable getNskyLookupTableDw(Sensor sensor); static NskyLookupTable getNskyLookupTableUp(Sensor sensor); static int getIndexBefore(float value, float[] array); static float getImageMeanValue(PlanarImage image); static int getDoyFromYYYYMMDD(String yyyymmdd); }
@Test public void testGetDoyFromYYYYMMDD() { String yyyymmdd = "20070101"; int doy = BbdrUtils.getDoyFromYYYYMMDD(yyyymmdd); assertEquals(1, doy); yyyymmdd = "20071218"; doy = BbdrUtils.getDoyFromYYYYMMDD(yyyymmdd); assertEquals(352, doy); }
UclCloudDetection { static boolean iCloudImpl(float cloudD, float landD, float townD) { boolean isCloud = true; float highestValue = Float.NaN; if (!Float.isNaN(cloudD)) { highestValue = cloudD; } if (!Float.isNaN(landD)) { if (Float.isNaN(highestValue) || landD > highestValue) { highestValue = landD; isCloud = false; } } if (!Float.isNaN(townD)) { if (Float.isNaN(highestValue) || townD > highestValue) { isCloud = false; } } return isCloud; } UclCloudDetection(ScatterData cloud, ScatterData land, ScatterData town); static UclCloudDetection create(); boolean isCloud(float sdrRed, float sdrGreen, float sdrBlue); static float[] rgb2hsv(float red, float green, float blue); }
@Test public void testisCloudImpl() throws Exception { assertTrue(UclCloudDetection.iCloudImpl(Float.NaN, Float.NaN, Float.NaN)); assertTrue(UclCloudDetection.iCloudImpl(1f, Float.NaN, Float.NaN)); assertTrue(UclCloudDetection.iCloudImpl(1f, 0.5f, Float.NaN)); assertFalse(UclCloudDetection.iCloudImpl(1f, 0.5f, 2f)); assertFalse(UclCloudDetection.iCloudImpl(Float.NaN, 0.5f, 2f)); }
MfgMSAProductReader extends AbstractProductReader { static boolean mfgAlbedoFileNameMatches(String fileName) { if (!(fileName.matches("MSA_Albedo_L2.0_V[0-9].[0-9]{2}_[0-9]{3}_[0-9]{4}_[0-9]{3}_[0-9]{3}.(?i)(hdf)"))) { throw new IllegalArgumentException("Input file name '" + fileName + "' does not match naming convention: 'MSA_Albedo_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'"); } return true; } protected MfgMSAProductReader(MfgMSAProductReaderPlugIn readerPlugIn); @Override void close(); }
@Test public void testMfgAlbedoFileNameMatches() { String filename = "bla.txt"; try { MfgMSAProductReader.mfgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Albedo_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Albedo_.01_000_2006_001_010.jpg"; try { MfgMSAProductReader.mfgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Albedo_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Albedo_L2.0_V2.01_000_2006_001_010.dim"; try { MfgMSAProductReader.mfgAlbedoFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Albedo_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Albedo_L2.0_V2.01_000_2006_001_010.hdf"; assertEquals(true, MfgMSAProductReader.mfgAlbedoFileNameMatches(filename)); filename = "MSA_Albedo_L2.0_V2.01_000_2006_001_010.HDF"; assertEquals(true, MfgMSAProductReader.mfgAlbedoFileNameMatches(filename)); }
TiffDirectoryReaderPlugin implements ProductReaderPlugIn { static boolean isCompressedFile(File file) { final String extension = FileUtils.getExtension(file); return !StringUtils.isNullOrEmpty(extension) && (extension.contains("zip") || extension.contains("tar") || extension.contains("tgz") || extension.contains("gz")); } @Override DecodeQualification getDecodeQualification(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static VirtualDir getInput(Object input); static final String FORMAT_NAME_TIFF_DIR; }
@Test public void testIsCompressedFile() { assertTrue(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.zip"))); assertTrue(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.tar"))); assertTrue(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.tgz"))); assertTrue(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.tar.gz"))); assertTrue(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.gz"))); assertFalse(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.txt"))); assertFalse(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.doc"))); assertFalse(TiffDirectoryReaderPlugin.isCompressedFile(new File("test.xml"))); assertFalse(TiffDirectoryReaderPlugin.isCompressedFile(new File("test"))); }
TiffDirectoryReaderPlugin implements ProductReaderPlugIn { static boolean isMetadataFile(File file) { if (file == null) { return false; } final String filename = file.getName().toLowerCase(); return filename.toLowerCase().endsWith("_meta.txt"); } @Override DecodeQualification getDecodeQualification(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static VirtualDir getInput(Object input); static final String FORMAT_NAME_TIFF_DIR; }
@Test public void testIsMetadataFile() { final File metaFile = new File("bla_HC_meta.txt"); final File nonMetaFile = new File("L5043033_03319950627_B50.TIF"); assertTrue(TiffDirectoryReaderPlugin.isMetadataFile(metaFile)); assertFalse(TiffDirectoryReaderPlugin.isMetadataFile(nonMetaFile)); }
TiffDirectoryReaderPlugin implements ProductReaderPlugIn { @Override public Class[] getInputTypes() { return READER_INPUT_TYPES; } @Override DecodeQualification getDecodeQualification(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static VirtualDir getInput(Object input); static final String FORMAT_NAME_TIFF_DIR; }
@Test public void testGetInputTypes() { final TiffDirectoryReaderPlugin plugin = new TiffDirectoryReaderPlugin(); final Class[] inputTypes = plugin.getInputTypes(); assertEquals(2, inputTypes.length); assertEquals(String.class, inputTypes[0]); assertEquals(File.class, inputTypes[1]); }
TiffDirectoryReaderPlugin implements ProductReaderPlugIn { static boolean isMatchingArchiveFileName(String fileName) { return StringUtils.isNotNullAndNotEmpty(fileName) && fileName.startsWith("Sample"); } @Override DecodeQualification getDecodeQualification(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static VirtualDir getInput(Object input); static final String FORMAT_NAME_TIFF_DIR; }
@Test public void testIsMatchingArchiveFileName() { assertTrue(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("Sample_30m19950627.tgz")); assertTrue(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("Sample_L5_60m19950627.tgz")); assertTrue(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("Sample_LE71810402006015ASN00.tar")); assertTrue(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("Sample_L7_1810402006015ASN00.tar")); assertFalse(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("ATS_TOA_1PPTOM20070110_192521_000000822054_00328_25432_0001.N1")); assertFalse(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("SchnickSchnack.zip")); assertFalse(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("hoppla.txt")); assertFalse(TiffDirectoryReaderPlugin.isMatchingArchiveFileName("")); }
GlobAlbedoMosaicProductReader extends AbstractProductReader { String getMosaicFileRegex(String filename) { return pattern.matcher(filename).replaceFirst("h\\\\d\\\\dv\\\\d\\\\d"); } protected GlobAlbedoMosaicProductReader(GlobAlbedoMosaicReaderPlugIn readerPlugIn); static File[] getModisPriorTileDirectories(String rootDirString); static File[] getAdamTileDirectories(String rootDirString); static File[] getPriorTileDirectories(String rootDirString, int stage); void setMosaicModisPriors(boolean mosaicModisPriors); void setMosaicNewModisPriors(boolean mosaicNewModisPriors); void setMosaicAdam(boolean mosaicAdam); void setMosaicAdamPriors(boolean mosaicAdamPriors); void setPriorStage(int adamPriorStage); @Override void close(); }
@Test public void testGetMosaicFileRegex() { String dimRegex = mosaicGrid.getMosaicFileRegex("GlobAlbedo.2005129.h18v04.dim"); assertEquals("GlobAlbedo.2005129.h\\d\\dv\\d\\d.dim", dimRegex); String enviRegex = mosaicGrid.getMosaicFileRegex("GlobAlbedo.2005129.h18v04_ENVI.bin"); assertEquals("GlobAlbedo.2005129.h\\d\\dv\\d\\d_ENVI.bin", enviRegex); }
GlobAlbedoMosaicProductReader extends AbstractProductReader { MosaicTile createMosaicTile(File file) { Matcher matcher = pattern.matcher(file.getName()); matcher.find(); int x = Integer.parseInt(matcher.group(1)); int y = Integer.parseInt(matcher.group(2)); int index = mosaicDefinition.calculateIndex(x, y); return new MosaicTile(x, y, index, file); } protected GlobAlbedoMosaicProductReader(GlobAlbedoMosaicReaderPlugIn readerPlugIn); static File[] getModisPriorTileDirectories(String rootDirString); static File[] getAdamTileDirectories(String rootDirString); static File[] getPriorTileDirectories(String rootDirString, int stage); void setMosaicModisPriors(boolean mosaicModisPriors); void setMosaicNewModisPriors(boolean mosaicNewModisPriors); void setMosaicAdam(boolean mosaicAdam); void setMosaicAdamPriors(boolean mosaicAdamPriors); void setPriorStage(int adamPriorStage); @Override void close(); }
@Test public void testCreateMosaicTile() { File mosaicFile = new File("GlobAlbedo.2005129.h18v04.dim"); MosaicTile tile = mosaicGrid.createMosaicTile(mosaicFile); assertNotNull(tile); assertSame(mosaicFile, tile.getFile()); assertEquals(18, tile.getTileX()); assertEquals(4, tile.getTileY()); assertEquals(162, tile.getIndex()); }
GlobAlbedoMosaicProductReader extends AbstractProductReader { String getProductName(File file) { String fileName = file.getName(); Matcher matcher = pattern.matcher(fileName); matcher.find(); return fileName.substring(0, matcher.start() - 1); } protected GlobAlbedoMosaicProductReader(GlobAlbedoMosaicReaderPlugIn readerPlugIn); static File[] getModisPriorTileDirectories(String rootDirString); static File[] getAdamTileDirectories(String rootDirString); static File[] getPriorTileDirectories(String rootDirString, int stage); void setMosaicModisPriors(boolean mosaicModisPriors); void setMosaicNewModisPriors(boolean mosaicNewModisPriors); void setMosaicAdam(boolean mosaicAdam); void setMosaicAdamPriors(boolean mosaicAdamPriors); void setPriorStage(int adamPriorStage); @Override void close(); }
@Test public void testGetProductName() { File mosaicFile = new File("GlobAlbedo.2005129.h18v04.dim"); String productName = mosaicGrid.getProductName(mosaicFile); assertEquals("GlobAlbedo.2005129", productName); mosaicFile = new File("GlobAlbedo.2005129.h18v04_ENVI.binm"); productName = mosaicGrid.getProductName(mosaicFile); assertEquals("GlobAlbedo.2005129", productName); }
Modis29ProductReaderPlugIn implements ProductReaderPlugIn { @Override public ProductReader createReaderInstance() { return new Modis29ProductReader(this); } @Override DecodeQualification getDecodeQualification(Object input); static VirtualDir getInput(Object input); @Override Class[] getInputTypes(); @Override ProductReader createReaderInstance(); @Override String[] getFormatNames(); @Override String[] getDefaultFileExtensions(); @Override String getDescription(Locale locale); @Override BeamFileFilter getProductFileFilter(); static final String FORMAT_NAME_METEOSAT_MSA; }
@Test public void testCreateReaderInstanceReturnsNewInstanceEachTime() { ProductReader firstInstance = plugIn.createReaderInstance(); assertNotNull(firstInstance); ProductReader secondInstance = plugIn.createReaderInstance(); assertNotSame(secondInstance, firstInstance); }
MfgMSAProductReader extends AbstractProductReader { static boolean mfgAncillaryFileNameMatches(String fileName) { if (!(fileName.matches("MSA_Ancillary_L2.0_V[0-9].[0-9]{2}_[0-9]{3}_[0-9]{4}_[0-9]{3}_[0-9]{3}.(?i)(hdf)"))) { throw new IllegalArgumentException("Input file name '" + fileName + "' does not match naming convention: 'MSA_Ancillary_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'"); } return true; } protected MfgMSAProductReader(MfgMSAProductReaderPlugIn readerPlugIn); @Override void close(); }
@Test public void testAncillaryFileNameMatches() { String filename = "bla.txt"; try { MfgMSAProductReader.mfgAncillaryFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Ancillary_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Ancillary_.01_000_2006_001_010.jpg"; try { MfgMSAProductReader.mfgAncillaryFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Ancillary_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Ancillary_L2.0_V2.01_000_2006_001_010.dim"; try { MfgMSAProductReader.mfgAncillaryFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Ancillary_L2.0_Vm.nn_sss_yyyy_fff_lll.HDF'", e.getMessage()); } filename = "MSA_Ancillary_L2.0_V2.01_000_2006_001_010.hdf"; assertEquals(true, MfgMSAProductReader.mfgAncillaryFileNameMatches(filename)); filename = "MSA_Ancillary_L2.0_V2.01_000_2006_001_010.HDF"; assertEquals(true, MfgMSAProductReader.mfgAncillaryFileNameMatches(filename)); }
MfgMSAProductReader extends AbstractProductReader { static boolean mfgStaticInputFileNameMatches(String fileName) { if (!(fileName.matches("MSA_Static_L2.0_V[0-9].[0-9]{2}_[0-9]{3}_[0-9]{1}.(?i)(hdf)"))) { throw new IllegalArgumentException("Input file name '" + fileName + "' does not match naming convention: 'MSA_Static_L2.0_Vm.nn_sss_m.HDF'"); } return true; } protected MfgMSAProductReader(MfgMSAProductReaderPlugIn readerPlugIn); @Override void close(); }
@Test public void testStaticInputFileNameMatches() { String filename = "bla.txt"; try { MfgMSAProductReader.mfgStaticInputFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Static_L2.0_Vm.nn_sss_m.HDF'", e.getMessage()); } filename = "MSA_Static_.01_000_2006_001_010.jpg"; try { MfgMSAProductReader.mfgStaticInputFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Static_L2.0_Vm.nn_sss_m.HDF'", e.getMessage()); } filename = "MSA_Static_L2.0_V2.01_000_2006_001_010.dim"; try { MfgMSAProductReader.mfgStaticInputFileNameMatches(filename); } catch (Exception e) { assertEquals(true, e instanceof IllegalArgumentException); assertEquals("Input file name '" + filename + "' does not match naming convention: 'MSA_Static_L2.0_Vm.nn_sss_m.HDF'", e.getMessage()); } filename = "MSA_Static_L2.0_V2.01_000_0.hdf"; assertEquals(true, MfgMSAProductReader.mfgStaticInputFileNameMatches(filename)); filename = "MSA_Static_L2.0_V2.01_000_0.HDF"; assertEquals(true, MfgMSAProductReader.mfgStaticInputFileNameMatches(filename)); }
DatasetTreeBuildETL extends EtlJob { @Override public void extract() throws Exception { logger.info("In DatasetTreeBuildETL java launch extract jython scripts"); } DatasetTreeBuildETL(int appId, long whExecId); DatasetTreeBuildETL(int appId, long whExecId, Properties properties); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testExtract() throws Exception { datasetTreeBuildETL.extract(); }
AzJobChecker { public List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow() throws IOException, SQLException { return getRecentFinishedJobFromFlow(DEFAULT_LOOK_BACK_TIME_MINUTES); } AzJobChecker(Properties prop); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(int timeFrameMinutes); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(int timeFrameMinutes, long endTimeStamp); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(long startTimeStamp, long endTimeStamp); List<AzkabanJobExecRecord> parseJson(String flowJson, long flowExecId); void close(); }
@Test(groups = {"needConfig"}) public void getRecentFinishedJobFromFlowTest2() throws SQLException, IOException { List<AzkabanJobExecRecord> results = ajc.getRecentFinishedJobFromFlow(2, 1448916456L); for (AzkabanJobExecRecord a : results) { System.out.print(a.getFlowExecId() + "\t"); System.out.print(a.getJobName() + "\t"); System.out.println(a.getJobExecId()); } Assert.assertNotNull(results); } @Test(groups = {"needConfig"}) public void getRecentFinishedJobFromFlowTest() throws SQLException, IOException { List<AzkabanJobExecRecord> results = ajc.getRecentFinishedJobFromFlow(); for (AzkabanJobExecRecord a : results) { System.out.print(a.getFlowExecId() + "\t"); System.out.print(a.getJobName() + "\t"); System.out.println(a.getJobExecId()); } Assert.assertNotNull(results); }
AzJobChecker { public List<AzkabanJobExecRecord> parseJson(String flowJson, long flowExecId) throws IOException { ObjectMapper mapper = new ObjectMapper(); JsonNode wholeFlow = mapper.readTree(flowJson); JsonNode allJobs = wholeFlow.get("nodes"); String flowPath = wholeFlow.get("projectName").asText() + ":" + wholeFlow.get("flowId").asText(); List<AzkabanJobExecRecord> results = parseJsonHelper(allJobs, flowExecId, flowPath); AzkabanJobExecUtil.sortAndSet(results); return results; } AzJobChecker(Properties prop); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(int timeFrameMinutes); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(int timeFrameMinutes, long endTimeStamp); List<AzkabanJobExecRecord> getRecentFinishedJobFromFlow(long startTimeStamp, long endTimeStamp); List<AzkabanJobExecRecord> parseJson(String flowJson, long flowExecId); void close(); }
@Test(groups = {"needConfig"}) public void parseNestedJsonTest() throws IOException, URISyntaxException { URL url = Thread.currentThread().getContextClassLoader().getResource("nestedFlowContent.json"); byte[] encoded = Files.readAllBytes(Paths.get(url.getPath())); String nestedJson = new String(encoded, "UTF-8"); List<AzkabanJobExecRecord> result = ajc.parseJson(nestedJson, 11111); for (int i = 0; i < result.size(); i++) { AzkabanJobExecRecord aje = result.get(i); System.out.println(aje.getJobExecId()); System.out.println(aje.getJobName()); System.out.println(aje.getStartTime()); System.out.println(aje.getEndTime()); System.out.println(aje.getFlowPath()); System.out.println(); Assert.assertEquals((long) aje.getJobExecId(), 11111 * 1000 + i); } }
AzDbCommunicator { public String getExecLog(long execId, String jobName) throws SQLException, IOException { System.out.println("start"); String cmd = "select log from execution_logs where exec_id = " + execId + " and name = '" + jobName + "'and attempt = 0 order by start_byte;"; System.out.println(cmd); Statement statement = conn.createStatement(); ResultSet rs = statement.executeQuery(cmd); StringBuilder sb = new StringBuilder(); while (rs.next()) { Blob logBlob = rs.getBlob("log"); GZIPInputStream gzip = new GZIPInputStream(logBlob.getBinaryStream()); sb.append(IOUtils.toString(gzip, "UTF-8")); } statement.close(); System.out.println("stop"); return sb.toString(); } AzDbCommunicator(Properties prop); String getExecLog(long execId, String jobName); void close(); }
@Test(groups = {"needConfig"}) public void getExecLogTest() throws IOException, SQLException { int execId = 870536; String jobName = "azkaban-log_load-azkaban-log"; String log = adc.getExecLog(execId, jobName); Assert.assertNotNull(log); }
AzJsonAnalyzer { public List<LineageRecord> extractFromJson() throws JSONException { List<LineageRecord> results = new ArrayList<>(); List<String> inputs = parseInputs(); for (String s : inputs) { results.add(construct(s, "source", "read", null, null, null, null)); } List<String> outputs = parseOutputs(); for (String s : outputs) { results.add(construct(s, "target", "write",null, null, null, null)); } return results; } AzJsonAnalyzer(String jsonString, AzkabanJobExecRecord aje, int defaultDatabaseId); List<LineageRecord> extractFromJson(); List<String> sepCommaString(List<String> originalStrings); List<String> parseInputs(); List<String> parseOutputs(); }
@Test public void extractFromJsonTest() throws JSONException { String jsonString = "{\"conf\":{\"path\":\"hdfs: + "\"property\":[{\"name\":\"mapreduce.jobtracker.address\"," + "\"value\":\"local\"," + "\"source\":[\"mapred-default.xml\",\"file:/grid/c/tmp/yarn/usercache/dev_svc/appcache/application_1441257279406_140910/filecache/13/job.xml\",\"job.xml\",\"hdfs: + "{\"name\":\"mapreduce.input.fileinputformat.inputdir\",\"value\":\"hdfs: + "\"source\":[\"something\"]}" + " ]" + " }}"; AzkabanJobExecRecord azkabanJobExecRecord = new AzkabanJobExecRecord(-1, "someJobName", new Long(0), 0, 0, "S", "path"); azkabanJobExecRecord.setJobExecId((long) 111); AzJsonAnalyzer efc = new AzJsonAnalyzer(jsonString, azkabanJobExecRecord, -1); List<LineageRecord> results = efc.extractFromJson(); Assert.assertEquals(results.get(0).getFullObjectName(), "hdfs: }
HadoopJobHistoryNodeExtractor { public String getConfFromHadoop(String hadoopJobId) throws Exception { String url = this.serverURL + "/" + hadoopJobId + "/conf"; logger.debug("get job conf from : {}", url); HttpUriRequest request = new HttpGet(url); HttpResponse response = httpClient.execute(request); HttpEntity entity = response.getEntity(); String confResult = EntityUtils.toString(entity); EntityUtils.consume(entity); return confResult; } HadoopJobHistoryNodeExtractor(Properties prop); String getConfFromHadoop(String hadoopJobId); void close(); }
@Test(groups = {"needConfig"}) public void testGetConf() throws Exception { String result = he.getConfFromHadoop("job_1437229398924_817615"); System.err.println(result); Assert.assertNotNull(result); }
AzLogParser { public static Set<String> getHadoopJobIdFromLog(String log) { StringBuilder patterns = new StringBuilder(); for (String s : logHadoopIdPatterns) { patterns.append(s).append("|"); } if(patterns.length() > 0) patterns.deleteCharAt(patterns.length() - 1); Pattern hadoopJobIdPattern = Pattern.compile(patterns.toString()); Matcher matcher = hadoopJobIdPattern.matcher(log); Set<String> listMatches = new HashSet<>(); while (matcher.find()) { for (int i = 1; i <= logHadoopIdPatterns.size(); i++) { if (matcher.group(i) != null) { listMatches.add(matcher.group(i)); } } } return listMatches; } static Set<String> getHadoopJobIdFromLog(String log); synchronized static void initialize(Connection conn); static List<LineageRecord> getLineageFromLog(String log, AzkabanJobExecRecord azkabanJobExecRecord, Integer defaultDatabaseId); }
@Test(groups = {"needConfig"}) public void getHadoopJobIdFromLogTest() { String logSample = "9-08-2015 03:02:16 PDT hadoop-datasets-stats_sizeAggr INFO - INFO map 26% reduce 0%\n" + "29-08-2015 03:02:16 PDT hadoop-datasets-stats_sizeAggr INFO - INFO map 30% reduce 0%\n" + "29-08-2015 03:02:17 PDT hadoop-datasets-stats_sizeAggr INFO - INFO Job job_1440264275625_235896 completed successfully\n" + "29-08-2015 03:02:17 PDT hadoop-datasets-stats_sizeAggr INFO - INFO map 29% reduce 0%\n" + "29-08-2015 03:02:17 PDT hadoop-datasets-stats_sizeAggr INFO - INFO Job job_1440264275625_235886 completed successfully\n" + "29-08-2015 03:02:17 PDT hadoop-datasets-stats_sizeAggr INFO - INFO map 19% reduce 0%"; Set<String> hadoopJobId = AzLogParser.getHadoopJobIdFromLog(logSample); String[] expectedJobId = new String[]{"job_1440264275625_235886", "job_1440264275625_235896"}; Assert.assertEquals(hadoopJobId.toArray(), expectedJobId); String hiveLog = "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - 15/10/07 21:38:47 INFO impl.YarnClientImpl: Submitted application application_1443068642861_495047\n" + "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - 15/10/07 21:38:47 INFO mapreduce.Job: The url to track the job: http: + "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - Starting Job = job_1443068642861_495047, Tracking URL = http: + "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - 15/10/07 21:38:47 INFO exec.Task: Starting Job = job_1443068642861_495047, Tracking URL = http: + "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - Kill Command = /export/apps/hadoop/latest/bin/hadoop job -kill job_1443068642861_495047\n" + "07-10-2015 14:38:47 PDT hive-with-hiveconf INFO - 15/10/07 21:38:47 INFO exec.Task: Kill Command = /export/apps/hadoop/latest/bin/hadoop job -kill job_1443068642861_495047\n" + "07-10-2015 14:38:55 PDT hive-with-hiveconf INFO - Hadoo"; hadoopJobId = AzLogParser.getHadoopJobIdFromLog(hiveLog); expectedJobId = new String[]{"job_1443068642861_495047"}; Assert.assertEquals(hadoopJobId.toArray(), expectedJobId); }
AzLogParser { public static List<LineageRecord> getLineageFromLog(String log, AzkabanJobExecRecord azkabanJobExecRecord, Integer defaultDatabaseId) { List<LineageRecord> result = new ArrayList<>(); Pattern typePattern = Pattern.compile("^(\\w+):/.*"); String datasetType = ""; for (LogLineagePattern patternObject : logLineagePatterns) { Pattern pattern = Pattern.compile(patternObject.regex); Matcher matcher = pattern.matcher(log); while (matcher.find()) { String datasetString = matcher.group(patternObject.datasetIndex); System.out.println("MATCH STRING: " + datasetString + "\n"); List<String> datasets = DatasetPath.separatedDataset(datasetString); for (String dataset : datasets) { if (patternObject.databaseNameIndex > 0) { dataset = matcher.group(patternObject.databaseNameIndex) + "/" + dataset; } LineageRecord lineageRecord = new LineageRecord(azkabanJobExecRecord.getAppId(), azkabanJobExecRecord.getFlowExecId(), azkabanJobExecRecord.getJobName(), azkabanJobExecRecord.getJobExecId()); Matcher typeMatcher = typePattern.matcher(dataset); if (typeMatcher.matches()) { datasetType = typeMatcher.group(1); } else { datasetType = "hdfs"; } lineageRecord.setDatasetInfo(defaultDatabaseId, dataset, datasetType); long recordCount = (patternObject.recordCountIndex < 1) ? 0 : Long.valueOf(matcher.group(patternObject.recordCountIndex)); long insertCount = (patternObject.insertCountIndex < 1) ? 0 : Long.valueOf(matcher.group(patternObject.insertCountIndex)); long deleteCount = (patternObject.deleteCountIndex < 1) ? 0 : Long.valueOf(matcher.group(patternObject.deleteCountIndex)); long updateCount = (patternObject.updateCountIndex < 1) ? 0 : Long.valueOf(matcher.group(patternObject.updateCountIndex)); lineageRecord .setOperationInfo(patternObject.sourceTargetType, patternObject.operation, recordCount, insertCount, deleteCount, updateCount, azkabanJobExecRecord.getStartTime(), azkabanJobExecRecord.getEndTime(), azkabanJobExecRecord.getFlowPath()); result.add(lineageRecord); } } } return result; } static Set<String> getHadoopJobIdFromLog(String log); synchronized static void initialize(Connection conn); static List<LineageRecord> getLineageFromLog(String log, AzkabanJobExecRecord azkabanJobExecRecord, Integer defaultDatabaseId); }
@Test(groups = {"needConfig"}) public void getLineageFromLogTest() { String logSample = "asfdasdfsadf Moving from staged path[asdf] to final resting place[/tm/b/c] sdaf dsfasdfasdf"; AzkabanJobExecRecord sampleExecution = new AzkabanJobExecRecord(TEST_APP_ID, "someJobName", (long) 0, 0, 0, "S", "path"); sampleExecution.setJobExecId((long) 11111); List<LineageRecord> result = AzLogParser.getLineageFromLog(logSample, sampleExecution, -1); System.out.println(result.get(0).toDatabaseValue()); Assert.assertEquals(result.get(0).toDatabaseValue(), "'-1','0','11111',NULL,'someJobName','0','0','-1',NULL,'/tm/b/c',NULL,NULL,NULL,NULL,'HDFS','target',NULL,NULL,'write','0','0','0','0','path',NULL,NULL"); } @Test(groups = {"needConfig"}) public void getLineageFromLogTest2() { String logSample = "Estimated disk size for store endorsements-member-restrictions in node Node lva1-app1508.prod.linkedin.com [id 39] in KB: 90916\n" + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Checksum for node 39 - f17b2f57adb0595e80ea86c0dd997fc0\n" + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Setting permission to 755 for /jobs/endorse/endorsements/master/tmp/endorsements-member-restrictions.store/lva1-voldemort-read-only-2-vip.prod.linkedin.com/node-39/.metadata\n" + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Pushing to cluster URL: tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO StorePushTask.call() invoked for cluster URL: tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - WARN The server requested pushHighAvailability to be DISABLED on cluster: tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Push starting for cluster: tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Client zone-id [-1] Attempting to get raw store [voldsys$_metadata_version_persistence] \n" + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO Client zone-id [-1] Attempting to get raw store [voldsys$_store_quotas] \n" + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO tcp: + "17-11-2015 01:32:27 PST endorsements_push-lva-endorsements-member-restrictions INFO - INFO tcp: AzkabanJobExecRecord sampleExecution = new AzkabanJobExecRecord(TEST_APP_ID, "someJobName", (long) 0, 0, 0, "S", "path"); List<LineageRecord> result = AzLogParser.getLineageFromLog(logSample, sampleExecution, TEST_DATABASE_ID); System.out.println(result.get(0).toDatabaseValue()); Assert.assertEquals(result.get(0).getFullObjectName(), "tcp: }
AzLineageExtractor { public static List<LineageRecord> extractLineage(AzExecMessage message) throws Exception { List<LineageRecord> oneAzkabanJobLineage = new ArrayList<>(); String []flowSequence = message.azkabanJobExecution.getFlowPath().split(":")[1].split("/"); logger.info("not here"); String jobPrefix = ""; for (int i = 0; i < flowSequence.length; i++) { jobPrefix += flowSequence[i] + ":"; } logger.info(jobPrefix); String log = message.adc.getExecLog(message.azkabanJobExecution.getFlowExecId(), message.azkabanJobExecution.getJobName()); Set<String> hadoopJobIds = AzLogParser.getHadoopJobIdFromLog(log); for (String hadoopJobId : hadoopJobIds) { logger.debug("Get Hadoop job config: {} from Azkaban job: {}" + hadoopJobId, message.azkabanJobExecution.toString()); String confJson = message.hnne.getConfFromHadoop(hadoopJobId); AzJsonAnalyzer ja = new AzJsonAnalyzer(confJson, message.azkabanJobExecution, Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY))); List<LineageRecord> oneHadoopJobLineage = ja.extractFromJson(); oneAzkabanJobLineage.addAll(oneHadoopJobLineage); } logger.info("finish the part one"); LineageCombiner lineageCombiner = new LineageCombiner(message.connection); lineageCombiner.addAll(oneAzkabanJobLineage); Integer defaultDatabaseId = Integer.valueOf(message.prop.getProperty(Constant.AZ_DEFAULT_HADOOP_DATABASE_ID_KEY)); List<LineageRecord> lineageFromLog = AzLogParser.getLineageFromLog(log, message.azkabanJobExecution, defaultDatabaseId); lineageCombiner.addAll(lineageFromLog); logger.info("finish the part two"); return lineageCombiner.getCombinedLineage(); } static List<LineageRecord> extractLineage(AzExecMessage message); static void extract(AzExecMessage message); }
@Test(groups = {"needConfig"}) public void extractLineageTest() throws Exception { AzkabanJobExecRecord aje = new AzkabanJobExecRecord(31, "endorsements_push-dev-endorsements-suggested-endorsements", (long) 1075483, 0, 0, "S", "endorsements:endorsements"); aje.setJobExecId((long) 1075483037); Statement statement = conn.createStatement(); statement.execute("TRUNCATE TABLE stg_job_execution_data_lineage"); AzExecMessage message = new AzExecMessage(aje, prop); message.databaseWriter = new DatabaseWriter(connUrl, "stg_job_execution_data_lineage"); message.hnne = new HadoopJobHistoryNodeExtractor(prop); message.adc = new AzDbCommunicator(prop); message.connection = conn; AzLineageExtractor.extract(message); ResultSet rs = statement.executeQuery("select count(*) from stg_job_execution_data_lineage"); rs.next(); int totalCount = rs.getInt("count(*)"); int expectedTotalCount = 15; Assert.assertEquals(totalCount, expectedTotalCount); statement.execute("TRUNCATE TABLE stg_job_execution_data_lineage"); conn.close(); }
HdfsMetadataEtl extends EtlJob { @Override public void extract() throws Exception { logger.info("Begin hdfs metadata extract! - " + prop.getProperty(Constant.WH_EXEC_ID_KEY)); boolean isRemote = Boolean.valueOf(prop.getProperty(Constant.HDFS_REMOTE, "false")); if (isRemote) { extractRemote(); } else { extractLocal(); } } @Deprecated HdfsMetadataEtl(Integer dbId, Long whExecId); HdfsMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testExtract() throws Exception { ds.extract(); }
DatasetTreeBuildETL extends EtlJob { @Override public void transform() throws Exception { logger.info("In DatasetTreeBuildETL java launch transform jython scripts"); } DatasetTreeBuildETL(int appId, long whExecId); DatasetTreeBuildETL(int appId, long whExecId, Properties properties); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testTransform() throws Exception { datasetTreeBuildETL.transform(); }
HdfsMetadataEtl extends EtlJob { @Override public void transform() throws Exception { logger.info("Begin hdfs metadata transform : " + prop.getProperty(Constant.WH_EXEC_ID_KEY)); InputStream inputStream = classLoader.getResourceAsStream("jython/HdfsTransform.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated HdfsMetadataEtl(Integer dbId, Long whExecId); HdfsMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testTransform() throws Exception { ds.transform(); }
HdfsMetadataEtl extends EtlJob { @Override public void load() throws Exception { logger.info("Begin hdfs metadata load : " + prop.getProperty(Constant.WH_EXEC_ID_KEY)); InputStream inputStream = classLoader.getResourceAsStream("jython/HdfsLoad.py"); interpreter.execfile(inputStream); inputStream.close(); logger.info("hdfs metadata load finished : " + prop.getProperty(Constant.WH_EXEC_ID_KEY)); } @Deprecated HdfsMetadataEtl(Integer dbId, Long whExecId); HdfsMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testLoad() throws Exception { ds.load(); }
TeradataMetadataEtl extends EtlJob { @Override public void extract() throws Exception { logger.info("In teradata metadata ETL, launch extract jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/TeradataExtract.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated TeradataMetadataEtl(int dbId, long whExecId); TeradataMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test(groups = {"needConfig"}) public void testExtract() throws Exception { TeradataMetadataEtl t = new TeradataMetadataEtl(3, 0L); t.extract(); }
OracleMetadataEtl extends EtlJob { @Override public void extract() throws Exception { logger.info("In Oracle metadata ETL, launch extract jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/OracleExtract.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated OracleMetadataEtl(int dbId, long whExecId); OracleMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void extractTest() throws Exception { _etl.extract(); }
OracleMetadataEtl extends EtlJob { @Override public void load() throws Exception { logger.info("In Oracle metadata ETL, launch load jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/OracleLoad.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated OracleMetadataEtl(int dbId, long whExecId); OracleMetadataEtl(int dbId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void loadTest() throws Exception { _etl.load(); }
CodeSearchMetadataEtl extends EtlJob { @Override public void extract() throws Exception { logger.info("In Code Search metadata ETL, launch extract jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/CodeSearchExtract.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated CodeSearchMetadataEtl(int appId, long whExecId); CodeSearchMetadataEtl(int appId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void extractTest() throws Exception { _etl.extract(); }
CodeSearchMetadataEtl extends EtlJob { @Override public void load() throws Exception { logger.info("In Code Search metadata ETL, launch load jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/CodeSearchLoad.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated CodeSearchMetadataEtl(int appId, long whExecId); CodeSearchMetadataEtl(int appId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void loadTest() throws Exception { _etl.load(); }
MultiproductMetadataEtl extends EtlJob { @Override public void extract() throws Exception { logger.info("In Multiproduct metadata ETL, launch extract jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/MultiproductExtract.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated MultiproductMetadataEtl(int appId, long whExecId); MultiproductMetadataEtl(int appId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void extractTest() throws Exception { _etl.extract(); }
MultiproductMetadataEtl extends EtlJob { @Override public void load() throws Exception { logger.info("In Multiproduct metadata ETL, launch load jython scripts"); InputStream inputStream = classLoader.getResourceAsStream("jython/MultiproductLoad.py"); interpreter.execfile(inputStream); inputStream.close(); } @Deprecated MultiproductMetadataEtl(int appId, long whExecId); MultiproductMetadataEtl(int appId, long whExecId, Properties prop); @Override void extract(); @Override void transform(); @Override void load(); }
@Test public void loadTest() throws Exception { _etl.load(); }