target
stringlengths
20
113k
src_fm
stringlengths
11
86.3k
src_fm_fc
stringlengths
21
86.4k
src_fm_fc_co
stringlengths
30
86.4k
src_fm_fc_ms
stringlengths
42
86.8k
src_fm_fc_ms_ff
stringlengths
43
86.8k
@Test public void testGetData() { MondrianInputMeta meta = new MondrianInputMeta(); assertTrue( meta.getStepData() instanceof MondrianData ); }
public StepDataInterface getStepData() { return new MondrianData(); }
MondrianInputMeta extends BaseStepMeta implements StepMetaInterface { public StepDataInterface getStepData() { return new MondrianData(); } }
MondrianInputMeta extends BaseStepMeta implements StepMetaInterface { public StepDataInterface getStepData() { return new MondrianData(); } MondrianInputMeta(); }
MondrianInputMeta extends BaseStepMeta implements StepMetaInterface { public StepDataInterface getStepData() { return new MondrianData(); } MondrianInputMeta(); DatabaseMeta getDatabaseMeta(); void setDatabaseMeta( DatabaseMeta database ); boolean isVariableReplacementActive(); void setVariableReplacementActive( boolean variableReplacementActive ); String getSQL(); void setSQL( String sql ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); void setDefault(); void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); DatabaseMeta[] getUsedDatabaseConnections(); String getCatalog(); void setCatalog( String catalog ); String getRole(); void setRole( String role ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); }
MondrianInputMeta extends BaseStepMeta implements StepMetaInterface { public StepDataInterface getStepData() { return new MondrianData(); } MondrianInputMeta(); DatabaseMeta getDatabaseMeta(); void setDatabaseMeta( DatabaseMeta database ); boolean isVariableReplacementActive(); void setVariableReplacementActive( boolean variableReplacementActive ); String getSQL(); void setSQL( String sql ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); void setDefault(); void getFields( RowMetaInterface row, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); DatabaseMeta[] getUsedDatabaseConnections(); String getCatalog(); void setCatalog( String catalog ); String getRole(); void setRole( String role ); String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); }
@Test public void cloneTest() throws Exception { DatabaseLookupMeta meta = new DatabaseLookupMeta(); meta.allocate( 2, 2 ); meta.setStreamKeyField1( new String[] { "aa", "bb" } ); meta.setTableKeyField( new String[] { "cc", "dd" } ); meta.setKeyCondition( new String[] { "ee", "ff" } ); meta.setStreamKeyField2( new String[] { "gg", "hh" } ); meta.setReturnValueField( new String[] { "ii", "jj" } ); meta.setReturnValueNewName( new String[] { "kk", "ll" } ); meta.setReturnValueDefault( new String[] { "mm", "nn" } ); meta.setReturnValueDefaultType( new int[] { 10, 50 } ); meta.setOrderByClause( "FOO DESC" ); DatabaseLookupMeta aClone = (DatabaseLookupMeta) meta.clone(); assertFalse( aClone == meta ); assertTrue( Arrays.equals( meta.getStreamKeyField1(), aClone.getStreamKeyField1() ) ); assertTrue( Arrays.equals( meta.getTableKeyField(), aClone.getTableKeyField() ) ); assertTrue( Arrays.equals( meta.getKeyCondition(), aClone.getKeyCondition() ) ); assertTrue( Arrays.equals( meta.getStreamKeyField2(), aClone.getStreamKeyField2() ) ); assertTrue( Arrays.equals( meta.getReturnValueField(), aClone.getReturnValueField() ) ); assertTrue( Arrays.equals( meta.getReturnValueNewName(), aClone.getReturnValueNewName() ) ); assertTrue( Arrays.equals( meta.getReturnValueDefault(), aClone.getReturnValueDefault() ) ); assertTrue( Arrays.equals( meta.getReturnValueDefaultType(), aClone.getReturnValueDefaultType() ) ); assertEquals( meta.getOrderByClause(), aClone.getOrderByClause() ); assertEquals( meta.getXML(), aClone.getXML() ); }
@Override public Object clone() { DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone(); int nrkeys = streamKeyField1.length; int nrvalues = returnValueField.length; retval.allocate( nrkeys, nrvalues ); System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys ); System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys ); System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys ); System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys ); System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues ); System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues ); System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues ); System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues ); return retval; }
DatabaseLookupMeta extends BaseStepMeta implements StepMetaInterface, ProvidesModelerMeta { @Override public Object clone() { DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone(); int nrkeys = streamKeyField1.length; int nrvalues = returnValueField.length; retval.allocate( nrkeys, nrvalues ); System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys ); System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys ); System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys ); System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys ); System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues ); System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues ); System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues ); System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues ); return retval; } }
DatabaseLookupMeta extends BaseStepMeta implements StepMetaInterface, ProvidesModelerMeta { @Override public Object clone() { DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone(); int nrkeys = streamKeyField1.length; int nrvalues = returnValueField.length; retval.allocate( nrkeys, nrvalues ); System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys ); System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys ); System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys ); System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys ); System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues ); System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues ); System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues ); System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues ); return retval; } DatabaseLookupMeta(); }
DatabaseLookupMeta extends BaseStepMeta implements StepMetaInterface, ProvidesModelerMeta { @Override public Object clone() { DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone(); int nrkeys = streamKeyField1.length; int nrvalues = returnValueField.length; retval.allocate( nrkeys, nrvalues ); System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys ); System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys ); System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys ); System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys ); System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues ); System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues ); System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues ); System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues ); return retval; } DatabaseLookupMeta(); boolean isCached(); void setCached( boolean cached ); int getCacheSize(); void setCacheSize( int cacheSize ); @Override DatabaseMeta getDatabaseMeta(); @Override String getTableName(); void setDatabaseMeta( DatabaseMeta database ); String[] getKeyCondition(); void setKeyCondition( String[] keyCondition ); String getOrderByClause(); void setOrderByClause( String orderByClause ); String[] getReturnValueDefault(); void setReturnValueDefault( String[] returnValueDefault ); int[] getReturnValueDefaultType(); void setReturnValueDefaultType( int[] returnValueDefaultType ); String[] getReturnValueField(); void setReturnValueField( String[] returnValueField ); String[] getReturnValueNewName(); void setReturnValueNewName( String[] returnValueNewName ); String[] getStreamKeyField1(); void setStreamKeyField1( String[] streamKeyField1 ); String[] getStreamKeyField2(); void setStreamKeyField2( String[] streamKeyField2 ); String[] getTableKeyField(); void setTableKeyField( String[] tableKeyField ); String getTablename(); void setTablename( String tablename ); boolean isFailingOnMultipleResults(); void setFailingOnMultipleResults( boolean failOnMultipleResults ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys, int nrvalues ); @Override Object clone(); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override RowMetaInterface getTableFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); @Override void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); @Override DatabaseMeta[] getUsedDatabaseConnections(); boolean isEatingRowOnLookupFailure(); void setEatingRowOnLookupFailure( boolean eatingRowOnLookupFailure ); @Override String getSchemaName(); @Override String getMissingDatabaseConnectionInformationMessage(); void setSchemaName( String schemaName ); @Override boolean supportsErrorHandling(); boolean isLoadingAllDataInCache(); void setLoadingAllDataInCache( boolean loadingAllDataInCache ); @Override RowMeta getRowMeta( StepDataInterface stepData ); @Override List<String> getDatabaseFields(); @Override List<String> getStreamFields(); }
DatabaseLookupMeta extends BaseStepMeta implements StepMetaInterface, ProvidesModelerMeta { @Override public Object clone() { DatabaseLookupMeta retval = (DatabaseLookupMeta) super.clone(); int nrkeys = streamKeyField1.length; int nrvalues = returnValueField.length; retval.allocate( nrkeys, nrvalues ); System.arraycopy( streamKeyField1, 0, retval.streamKeyField1, 0, nrkeys ); System.arraycopy( tableKeyField, 0, retval.tableKeyField, 0, nrkeys ); System.arraycopy( keyCondition, 0, retval.keyCondition, 0, nrkeys ); System.arraycopy( streamKeyField2, 0, retval.streamKeyField2, 0, nrkeys ); System.arraycopy( returnValueField, 0, retval.returnValueField, 0, nrvalues ); System.arraycopy( returnValueNewName, 0, retval.returnValueNewName, 0, nrvalues ); System.arraycopy( returnValueDefault, 0, retval.returnValueDefault, 0, nrvalues ); System.arraycopy( returnValueDefaultType, 0, retval.returnValueDefaultType, 0, nrvalues ); return retval; } DatabaseLookupMeta(); boolean isCached(); void setCached( boolean cached ); int getCacheSize(); void setCacheSize( int cacheSize ); @Override DatabaseMeta getDatabaseMeta(); @Override String getTableName(); void setDatabaseMeta( DatabaseMeta database ); String[] getKeyCondition(); void setKeyCondition( String[] keyCondition ); String getOrderByClause(); void setOrderByClause( String orderByClause ); String[] getReturnValueDefault(); void setReturnValueDefault( String[] returnValueDefault ); int[] getReturnValueDefaultType(); void setReturnValueDefaultType( int[] returnValueDefaultType ); String[] getReturnValueField(); void setReturnValueField( String[] returnValueField ); String[] getReturnValueNewName(); void setReturnValueNewName( String[] returnValueNewName ); String[] getStreamKeyField1(); void setStreamKeyField1( String[] streamKeyField1 ); String[] getStreamKeyField2(); void setStreamKeyField2( String[] streamKeyField2 ); String[] getTableKeyField(); void setTableKeyField( String[] tableKeyField ); String getTablename(); void setTablename( String tablename ); boolean isFailingOnMultipleResults(); void setFailingOnMultipleResults( boolean failOnMultipleResults ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys, int nrvalues ); @Override Object clone(); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override RowMetaInterface getTableFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); @Override void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); @Override DatabaseMeta[] getUsedDatabaseConnections(); boolean isEatingRowOnLookupFailure(); void setEatingRowOnLookupFailure( boolean eatingRowOnLookupFailure ); @Override String getSchemaName(); @Override String getMissingDatabaseConnectionInformationMessage(); void setSchemaName( String schemaName ); @Override boolean supportsErrorHandling(); boolean isLoadingAllDataInCache(); void setLoadingAllDataInCache( boolean loadingAllDataInCache ); @Override RowMeta getRowMeta( StepDataInterface stepData ); @Override List<String> getDatabaseFields(); @Override List<String> getStreamFields(); static final String[] conditionStrings; static final int CONDITION_EQ; static final int CONDITION_NE; static final int CONDITION_LT; static final int CONDITION_LE; static final int CONDITION_GT; static final int CONDITION_GE; static final int CONDITION_LIKE; static final int CONDITION_BETWEEN; static final int CONDITION_IS_NULL; static final int CONDITION_IS_NOT_NULL; }
@Test( expected = UnsupportedOperationException.class ) public void storeRowInCache_ThrowsException() throws Exception { buildCache( "" ).storeRowInCache( new DatabaseLookupMeta(), keysMeta.clone(), keys[ 0 ], data[ 0 ] ); }
@Override public void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ) { throw new UnsupportedOperationException( "This cache is read-only" ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ) { throw new UnsupportedOperationException( "This cache is read-only" ); } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ) { throw new UnsupportedOperationException( "This cache is read-only" ); } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ) { throw new UnsupportedOperationException( "This cache is read-only" ); } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ) { throw new UnsupportedOperationException( "This cache is read-only" ); } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void hasDbConditionStopsSearching() throws Exception { stepData.hasDBCondition = true; assertNull( buildCache( "" ).getRowFromCache( keysMeta.clone(), keys[ 0 ] ) ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_Finds_Only() throws Exception { ReadAllCache cache = buildCache( "=,<,=,IS NULL" ); Object[] found = cache.getRowFromCache( keysMeta.clone(), new Object[] { 1L, "2", new Date( 100 ), null } ); assertArrayEquals( "(keys[0] == 1) && (keys[1] < '2') && (keys[2] == 100) --> row 3", data[ 3 ], found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void testRow() throws Exception { Date date = new Date(); URI uri = new URI( "http: List<Object> objects = new ArrayList<>(); objects.add( 100 ); objects.add( 100.50 ); BigDecimal bigDecimal = new BigDecimal( "10000000000000000000.50" ); objects.add( bigDecimal ); objects.add( true ); objects.add( date ); objects.add( "A String" ); objects.add( uri ); List<String> names = new ArrayList<>(); names.add( "some int" ); names.add( "some Double" ); names.add( "some Decimal" ); names.add( "some Boolean" ); names.add( "some Date" ); names.add( "some String" ); names.add( "some Serializable" ); List<Class> classes = Arrays .asList( Integer.class, Double.class, BigDecimal.class, Boolean.class, Date.class, String.class, Object.class ); Row row = new DeserializedRow( names, classes, objects ); assertEquals( new Integer( 100 ), row.getObjects()[ 0 ] ); assertEquals( 100.50, (double) row.getObjects()[ 1 ], 0.001D ); assertEquals( bigDecimal, row.getObjects()[ 2 ] ); assertTrue( (Boolean) row.getObjects()[ 3 ] ); assertEquals( date, row.getObjects()[ 4 ] ); assertEquals( "A String", row.getObjects()[ 5 ] ); assertEquals( uri, row.getObjects()[ 6 ] ); }
@Override public Object[] getObjects() { return Collections.unmodifiableList( objects ).toArray(); }
DeserializedRow implements Row { @Override public Object[] getObjects() { return Collections.unmodifiableList( objects ).toArray(); } }
DeserializedRow implements Row { @Override public Object[] getObjects() { return Collections.unmodifiableList( objects ).toArray(); } DeserializedRow( List<String> names, List<Class> types, List<Object> objects ); }
DeserializedRow implements Row { @Override public Object[] getObjects() { return Collections.unmodifiableList( objects ).toArray(); } DeserializedRow( List<String> names, List<Class> types, List<Object> objects ); @Override List<String> getColumnNames(); @Override Object[] getObjects(); @Override boolean equals( Object o ); @Override int hashCode(); }
DeserializedRow implements Row { @Override public Object[] getObjects() { return Collections.unmodifiableList( objects ).toArray(); } DeserializedRow( List<String> names, List<Class> types, List<Object> objects ); @Override List<String> getColumnNames(); @Override Object[] getObjects(); @Override boolean equals( Object o ); @Override int hashCode(); }
@Test public void lookup_Finds_FirstMatching() throws Exception { ReadAllCache cache = buildCache( "=,IS NOT NULL,<=,IS NULL" ); Object[] found = cache.getRowFromCache( keysMeta.clone(), new Object[] { 1L, null, new Date( 1000000 ), null } ); assertArrayEquals( "(keys[0] == 1) && (keys[2] < 1000000) --> row 3", data[ 3 ], found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_Finds_WithBetweenOperator() throws Exception { RowMeta meta = keysMeta.clone(); meta.setValueMeta( 3, new ValueMetaDate() ); meta.addValueMeta( new ValueMetaInteger() ); ReadAllCache cache = buildCache( "<>,IS NOT NULL,BETWEEN,IS NULL" ); Object[] found = cache.getRowFromCache( meta, new Object[] { -1L, null, new Date( 140 ), new Date( 160 ), null } ); assertArrayEquals( "(140 <= keys[2] <= 160) --> row 4", data[ 4 ], found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_Finds_WithTwoBetweenOperators() throws Exception { RowMeta meta = new RowMeta(); meta.addValueMeta( new ValueMetaInteger() ); meta.addValueMeta( new ValueMetaString() ); meta.addValueMeta( new ValueMetaString() ); meta.addValueMeta( new ValueMetaDate() ); meta.addValueMeta( new ValueMetaDate() ); meta.addValueMeta( new ValueMetaInteger() ); ReadAllCache cache = buildCache( ">,BETWEEN,BETWEEN,IS NULL" ); Object[] found = cache.getRowFromCache( meta, new Object[] { -1L, "1", "3", new Date( 0 ), new Date( 1000 ), null } ); assertArrayEquals( "('1' <= keys[1] <= '3') && (0 <= keys[2] <= 1000) --> row 2", data[ 2 ], found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_DoesNotFind_FilteredByIndex() throws Exception { ReadAllCache cache = buildCache( "=,IS NOT NULL,>=,IS NOT NULL" ); Object[] found = cache.getRowFromCache( keysMeta.clone(), new Object[] { 1L, null, new Date( 0 ), null } ); assertNull( "(keys[3] != NULL) --> none", found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_DoesNotFind_WithBetweenOperator() throws Exception { RowMeta meta = keysMeta.clone(); meta.setValueMeta( 3, new ValueMetaDate() ); meta.addValueMeta( new ValueMetaInteger() ); ReadAllCache cache = buildCache( "<>,IS NOT NULL,BETWEEN,IS NULL" ); Object[] found = cache.getRowFromCache( meta, new Object[] { -1L, null, new Date( 1000 ), new Date( 2000 ), null } ); assertNull( "(1000 <= keys[2] <= 2000) --> none", found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void lookup_HandlesAbsenceOfLookupValue() throws Exception { stepData = new DatabaseLookupData(); stepData.conditions = new int[] { DatabaseLookupMeta.CONDITION_IS_NOT_NULL }; ReadAllCache.Builder builder = new ReadAllCache.Builder( stepData, 2 ); RowMeta keysMeta = new RowMeta(); keysMeta.addValueMeta( new ValueMetaInteger() ); builder.setKeysMeta( keysMeta ); builder.add( new Object[] { null }, new Object[] { "null" } ); builder.add( new Object[] { 1L }, new Object[] { "one" } ); ReadAllCache cache = builder.build(); Object[] found = cache.getRowFromCache( new RowMeta(), new Object[ 0 ] ); assertArrayEquals( "(keys[1] == 1L) --> row 2", new Object[] { "one" }, found ); }
@Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
ReadAllCache implements DatabaseLookupData.Cache { @Override public Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ) throws KettleException { if ( stepData.hasDBCondition ) { return null; } SearchingContext context = new SearchingContext(); context.init( keys.length ); for ( Index index : indexes ) { int column = index.getColumn(); Object lookupValue = ( column < lookupRow.length ) ? lookupRow[ column ] : null; index.applyRestrictionsTo( context, lookupMeta.getValueMeta( column ), lookupValue ); if ( context.isEmpty() ) { return null; } } BitSet candidates = context.getCandidates(); int candidate = candidates.nextSetBit( 0 ); while ( candidate != -1 ) { Object[] dataKeys = keys[ candidate ]; boolean matches = true; int lookupShift = 0; for ( int i = 0, len = otherConditions.length; i < len && matches; i++ ) { int[] columnConditionPair = otherConditions[ i ]; final int column = columnConditionPair[ 0 ]; Object keyData = dataKeys[ column ]; ValueMetaInterface keyMeta = keysMeta.getValueMeta( column ); int lookupIndex = column + lookupShift; Object cmpData = lookupRow[ lookupIndex ]; ValueMetaInterface cmpMeta = lookupMeta.getValueMeta( lookupIndex ); int condition = columnConditionPair[ 1 ]; if ( condition == DatabaseLookupMeta.CONDITION_BETWEEN ) { matches = ( keyMeta.compare( keyData, cmpMeta, cmpData ) >= 0 ); if ( matches ) { lookupShift++; lookupIndex++; ValueMetaInterface cmpMeta2 = lookupMeta.getValueMeta( lookupIndex ); Object cmpData2 = lookupRow[ lookupIndex ]; matches = ( keyMeta.compare( keyData, cmpMeta2, cmpData2 ) <= 0 ); } } else { matches = false; stepData.hasDBCondition = true; } } if ( matches ) { return data[ candidate ]; } else { candidate = candidates.nextSetBit( candidate + 1 ); } } return null; } ReadAllCache( DatabaseLookupData stepData, Object[][] keys, RowMetaInterface keysMeta, Object[][] data ); @Override Object[] getRowFromCache( RowMetaInterface lookupMeta, Object[] lookupRow ); @Override void storeRowInCache( DatabaseLookupMeta meta, RowMetaInterface lookupMeta, Object[] lookupRow, Object[] add ); }
@Test public void testGetRepoList_includeSubfolders() throws KettleException { init( repo, "/", true, ".*", "", All, 4 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_excludeSubfolders() throws KettleException { init( repo, "/", false, ".*", "", All, 0 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_transOnly() throws KettleException { init( repo, "/", true, ".*", "", Transformations, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_jobsOnly() throws KettleException { init( repo, "/", true, ".*", "", Jobs, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testDataEventSerializer() throws Exception { Operation operation = mock( Operation.class ); when( operation.getId() ).thenReturn( "foo" ); Date date = new Date(); URI uri = new URI( "http: List<Object> objects = new ArrayList<>(); objects.add( 100 ); objects.add( 100.50 ); objects.add( new BigDecimal( "10000000000000000000.50" ) ); objects.add( true ); objects.add( date ); objects.add( "A String" ); objects.add( uri ); List<String> names = new ArrayList<>(); names.add( "some int" ); names.add( "some Double" ); names.add( "some Decimal" ); names.add( "some Boolean" ); names.add( "some Date" ); names.add( "some String" ); names.add( "some Serializable" ); List<Class> classes = Arrays .asList( Integer.class, Double.class, BigDecimal.class, Boolean.class, Date.class, String.class, Object.class ); Row row = new DeserializedRow( names, classes, objects ); List<Row> rowsList = Collections.singletonList( row ); Rows rows = new Rows( rowsList, Rows.TYPE.OUT, Rows.STATE.ACTIVE ); DataEvent<Operation> dataEvent = new DataEvent<>( operation, rows ); DataEventSerializer serializer = new DataEventSerializer(); String serialized = serializer.serialize( dataEvent ); System.out.println( serialized ); DataEvent deserialized = serializer.deserialize( serialized ); assertTrue( serializer.getSupportedClasses().contains( DataEvent.class ) ); assertEquals( dataEvent, deserialized ); }
public DataEventSerializer() { super( DataEvent.class ); SimpleModule module = new SimpleModule(); module.addSerializer( DataEvent.class, new JsonSerializer<DataEvent>() { @Override public void serialize( DataEvent dataEvent, JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); Rows rows = (Rows) dataEvent.getData(); jsonGenerator.writeStringField( "model-id", dataEvent.getSource().getId() ); jsonGenerator.writeStringField( "type", rows.getType().toString() ); jsonGenerator.writeStringField( "state", rows.getState().toString() ); jsonGenerator.writeArrayFieldStart( "rows" ); for ( Row row : rows ) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart( "names" ); for ( String name : row.getColumnNames() ) { jsonGenerator.writeString( name ); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart( "objects" ); for ( Object obj : row.getObjects() ) { jsonGenerator.writeStartObject(); if ( obj == null ) { jsonGenerator.writeStringField( "type", "Null" ); jsonGenerator.writeEndObject(); continue; } switch ( obj.getClass().getSimpleName() ) { case "String": jsonGenerator.writeStringField( "type", "String" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Date": jsonGenerator.writeStringField( "type", "Date" ); jsonGenerator.writeStringField( "obj", DATE_TIME_INSTANCE.format( (Date) obj ) ); break; case "Integer": jsonGenerator.writeStringField( "type", "Integer" ); jsonGenerator.writeNumberField( "obj", (Integer) obj ); break; case "Long": jsonGenerator.writeStringField( "type", "Long" ); jsonGenerator.writeNumberField( "obj", (Long) obj ); break; case "Double": jsonGenerator.writeStringField( "type", "Double" ); jsonGenerator.writeNumberField( "obj", (Double) obj ); break; case "BigDecimal": jsonGenerator.writeStringField( "type", "BigDecimal" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Boolean": jsonGenerator.writeStringField( "type", "Boolean" ); jsonGenerator.writeBooleanField( "obj", (Boolean) obj ); break; case "byte[]": jsonGenerator.writeStringField( "type", "byte[]" ); jsonGenerator.writeStringField( "obj", new String( ( (byte[]) obj ), "UTF-8" ) ); break; default: if ( obj instanceof Serializable ) { jsonGenerator.writeStringField( "type", "Object" ); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream( outputStream ); objectOutputStream.writeObject( obj ); objectOutputStream.close(); outputStream.close(); byte[] bytes = outputStream.toByteArray(); jsonGenerator.writeStringField( "obj", Base64.encodeBase64String( bytes ) ); } } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } } ); module.addDeserializer( DataEvent.class, new StdNodeBasedDeserializer<DataEvent>( DataEvent.class ) { @Override public DataEvent convert( JsonNode jsonNode, DeserializationContext deserializationContext ) throws IOException { Rows.TYPE type = Rows.TYPE.valueOf( jsonNode.get( "type" ).asText() ); Rows.STATE state = Rows.STATE.valueOf( jsonNode.get( "state" ).asText() ); List<Row> rows = new ArrayList<>(); JsonNode json_rows = jsonNode.get( "rows" ); for ( JsonNode row : json_rows ) { List<Class> types = new ArrayList<>(); List<String> names = new ArrayList<>(); for ( JsonNode name : row.get( "names" ) ) { names.add( name.asText() ); } List<Object> objects = new ArrayList<>(); for ( JsonNode obj : row.get( "objects" ) ) { JsonNode t = obj.get( "type" ); JsonNode rawObject = obj.get( "obj" ); Object object = null; String objType = t.asText(); switch ( objType ) { case "Null": types.add( Void.class ); break; case "String": types.add( String.class ); object = rawObject.asText(); break; case "Integer": types.add( Integer.class ); object = rawObject.asInt(); break; case "Long": types.add( Long.class ); object = rawObject.asLong(); break; case "Date": types.add( Date.class ); try { object = DATE_TIME_INSTANCE.parse( rawObject.asText() ); } catch ( ParseException e ) { e.printStackTrace(); } break; case "Double": types.add( Double.class ); object = rawObject.asDouble(); break; case "BigDecimal": types.add( BigDecimal.class ); object = new BigDecimal( rawObject.asText() ); break; case "Boolean": types.add( Boolean.class ); object = rawObject.asBoolean(); break; case "byte[]": types.add( byte[].class ); object = rawObject.asText().getBytes( "UTF-8" ); break; case "Object": try { types.add( Object.class ); object = new ObjectInputStream( new ByteArrayInputStream( Base64.decodeBase64( rawObject.asText() ) ) ).readObject(); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } break; } objects.add( object ); } Row r = new DeserializedRow( names, types, objects ); rows.add( r ); } Rows rowsObj = new Rows( rows, type, state ); return new DataEvent( new RemoteSource( jsonNode.get( "model-id" ).asText() ), rowsObj ); } } ); mapper.registerModule( module ); }
DataEventSerializer extends BaseSerializer<DataEvent> { public DataEventSerializer() { super( DataEvent.class ); SimpleModule module = new SimpleModule(); module.addSerializer( DataEvent.class, new JsonSerializer<DataEvent>() { @Override public void serialize( DataEvent dataEvent, JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); Rows rows = (Rows) dataEvent.getData(); jsonGenerator.writeStringField( "model-id", dataEvent.getSource().getId() ); jsonGenerator.writeStringField( "type", rows.getType().toString() ); jsonGenerator.writeStringField( "state", rows.getState().toString() ); jsonGenerator.writeArrayFieldStart( "rows" ); for ( Row row : rows ) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart( "names" ); for ( String name : row.getColumnNames() ) { jsonGenerator.writeString( name ); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart( "objects" ); for ( Object obj : row.getObjects() ) { jsonGenerator.writeStartObject(); if ( obj == null ) { jsonGenerator.writeStringField( "type", "Null" ); jsonGenerator.writeEndObject(); continue; } switch ( obj.getClass().getSimpleName() ) { case "String": jsonGenerator.writeStringField( "type", "String" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Date": jsonGenerator.writeStringField( "type", "Date" ); jsonGenerator.writeStringField( "obj", DATE_TIME_INSTANCE.format( (Date) obj ) ); break; case "Integer": jsonGenerator.writeStringField( "type", "Integer" ); jsonGenerator.writeNumberField( "obj", (Integer) obj ); break; case "Long": jsonGenerator.writeStringField( "type", "Long" ); jsonGenerator.writeNumberField( "obj", (Long) obj ); break; case "Double": jsonGenerator.writeStringField( "type", "Double" ); jsonGenerator.writeNumberField( "obj", (Double) obj ); break; case "BigDecimal": jsonGenerator.writeStringField( "type", "BigDecimal" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Boolean": jsonGenerator.writeStringField( "type", "Boolean" ); jsonGenerator.writeBooleanField( "obj", (Boolean) obj ); break; case "byte[]": jsonGenerator.writeStringField( "type", "byte[]" ); jsonGenerator.writeStringField( "obj", new String( ( (byte[]) obj ), "UTF-8" ) ); break; default: if ( obj instanceof Serializable ) { jsonGenerator.writeStringField( "type", "Object" ); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream( outputStream ); objectOutputStream.writeObject( obj ); objectOutputStream.close(); outputStream.close(); byte[] bytes = outputStream.toByteArray(); jsonGenerator.writeStringField( "obj", Base64.encodeBase64String( bytes ) ); } } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } } ); module.addDeserializer( DataEvent.class, new StdNodeBasedDeserializer<DataEvent>( DataEvent.class ) { @Override public DataEvent convert( JsonNode jsonNode, DeserializationContext deserializationContext ) throws IOException { Rows.TYPE type = Rows.TYPE.valueOf( jsonNode.get( "type" ).asText() ); Rows.STATE state = Rows.STATE.valueOf( jsonNode.get( "state" ).asText() ); List<Row> rows = new ArrayList<>(); JsonNode json_rows = jsonNode.get( "rows" ); for ( JsonNode row : json_rows ) { List<Class> types = new ArrayList<>(); List<String> names = new ArrayList<>(); for ( JsonNode name : row.get( "names" ) ) { names.add( name.asText() ); } List<Object> objects = new ArrayList<>(); for ( JsonNode obj : row.get( "objects" ) ) { JsonNode t = obj.get( "type" ); JsonNode rawObject = obj.get( "obj" ); Object object = null; String objType = t.asText(); switch ( objType ) { case "Null": types.add( Void.class ); break; case "String": types.add( String.class ); object = rawObject.asText(); break; case "Integer": types.add( Integer.class ); object = rawObject.asInt(); break; case "Long": types.add( Long.class ); object = rawObject.asLong(); break; case "Date": types.add( Date.class ); try { object = DATE_TIME_INSTANCE.parse( rawObject.asText() ); } catch ( ParseException e ) { e.printStackTrace(); } break; case "Double": types.add( Double.class ); object = rawObject.asDouble(); break; case "BigDecimal": types.add( BigDecimal.class ); object = new BigDecimal( rawObject.asText() ); break; case "Boolean": types.add( Boolean.class ); object = rawObject.asBoolean(); break; case "byte[]": types.add( byte[].class ); object = rawObject.asText().getBytes( "UTF-8" ); break; case "Object": try { types.add( Object.class ); object = new ObjectInputStream( new ByteArrayInputStream( Base64.decodeBase64( rawObject.asText() ) ) ).readObject(); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } break; } objects.add( object ); } Row r = new DeserializedRow( names, types, objects ); rows.add( r ); } Rows rowsObj = new Rows( rows, type, state ); return new DataEvent( new RemoteSource( jsonNode.get( "model-id" ).asText() ), rowsObj ); } } ); mapper.registerModule( module ); } }
DataEventSerializer extends BaseSerializer<DataEvent> { public DataEventSerializer() { super( DataEvent.class ); SimpleModule module = new SimpleModule(); module.addSerializer( DataEvent.class, new JsonSerializer<DataEvent>() { @Override public void serialize( DataEvent dataEvent, JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); Rows rows = (Rows) dataEvent.getData(); jsonGenerator.writeStringField( "model-id", dataEvent.getSource().getId() ); jsonGenerator.writeStringField( "type", rows.getType().toString() ); jsonGenerator.writeStringField( "state", rows.getState().toString() ); jsonGenerator.writeArrayFieldStart( "rows" ); for ( Row row : rows ) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart( "names" ); for ( String name : row.getColumnNames() ) { jsonGenerator.writeString( name ); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart( "objects" ); for ( Object obj : row.getObjects() ) { jsonGenerator.writeStartObject(); if ( obj == null ) { jsonGenerator.writeStringField( "type", "Null" ); jsonGenerator.writeEndObject(); continue; } switch ( obj.getClass().getSimpleName() ) { case "String": jsonGenerator.writeStringField( "type", "String" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Date": jsonGenerator.writeStringField( "type", "Date" ); jsonGenerator.writeStringField( "obj", DATE_TIME_INSTANCE.format( (Date) obj ) ); break; case "Integer": jsonGenerator.writeStringField( "type", "Integer" ); jsonGenerator.writeNumberField( "obj", (Integer) obj ); break; case "Long": jsonGenerator.writeStringField( "type", "Long" ); jsonGenerator.writeNumberField( "obj", (Long) obj ); break; case "Double": jsonGenerator.writeStringField( "type", "Double" ); jsonGenerator.writeNumberField( "obj", (Double) obj ); break; case "BigDecimal": jsonGenerator.writeStringField( "type", "BigDecimal" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Boolean": jsonGenerator.writeStringField( "type", "Boolean" ); jsonGenerator.writeBooleanField( "obj", (Boolean) obj ); break; case "byte[]": jsonGenerator.writeStringField( "type", "byte[]" ); jsonGenerator.writeStringField( "obj", new String( ( (byte[]) obj ), "UTF-8" ) ); break; default: if ( obj instanceof Serializable ) { jsonGenerator.writeStringField( "type", "Object" ); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream( outputStream ); objectOutputStream.writeObject( obj ); objectOutputStream.close(); outputStream.close(); byte[] bytes = outputStream.toByteArray(); jsonGenerator.writeStringField( "obj", Base64.encodeBase64String( bytes ) ); } } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } } ); module.addDeserializer( DataEvent.class, new StdNodeBasedDeserializer<DataEvent>( DataEvent.class ) { @Override public DataEvent convert( JsonNode jsonNode, DeserializationContext deserializationContext ) throws IOException { Rows.TYPE type = Rows.TYPE.valueOf( jsonNode.get( "type" ).asText() ); Rows.STATE state = Rows.STATE.valueOf( jsonNode.get( "state" ).asText() ); List<Row> rows = new ArrayList<>(); JsonNode json_rows = jsonNode.get( "rows" ); for ( JsonNode row : json_rows ) { List<Class> types = new ArrayList<>(); List<String> names = new ArrayList<>(); for ( JsonNode name : row.get( "names" ) ) { names.add( name.asText() ); } List<Object> objects = new ArrayList<>(); for ( JsonNode obj : row.get( "objects" ) ) { JsonNode t = obj.get( "type" ); JsonNode rawObject = obj.get( "obj" ); Object object = null; String objType = t.asText(); switch ( objType ) { case "Null": types.add( Void.class ); break; case "String": types.add( String.class ); object = rawObject.asText(); break; case "Integer": types.add( Integer.class ); object = rawObject.asInt(); break; case "Long": types.add( Long.class ); object = rawObject.asLong(); break; case "Date": types.add( Date.class ); try { object = DATE_TIME_INSTANCE.parse( rawObject.asText() ); } catch ( ParseException e ) { e.printStackTrace(); } break; case "Double": types.add( Double.class ); object = rawObject.asDouble(); break; case "BigDecimal": types.add( BigDecimal.class ); object = new BigDecimal( rawObject.asText() ); break; case "Boolean": types.add( Boolean.class ); object = rawObject.asBoolean(); break; case "byte[]": types.add( byte[].class ); object = rawObject.asText().getBytes( "UTF-8" ); break; case "Object": try { types.add( Object.class ); object = new ObjectInputStream( new ByteArrayInputStream( Base64.decodeBase64( rawObject.asText() ) ) ).readObject(); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } break; } objects.add( object ); } Row r = new DeserializedRow( names, types, objects ); rows.add( r ); } Rows rowsObj = new Rows( rows, type, state ); return new DataEvent( new RemoteSource( jsonNode.get( "model-id" ).asText() ), rowsObj ); } } ); mapper.registerModule( module ); } DataEventSerializer(); }
DataEventSerializer extends BaseSerializer<DataEvent> { public DataEventSerializer() { super( DataEvent.class ); SimpleModule module = new SimpleModule(); module.addSerializer( DataEvent.class, new JsonSerializer<DataEvent>() { @Override public void serialize( DataEvent dataEvent, JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); Rows rows = (Rows) dataEvent.getData(); jsonGenerator.writeStringField( "model-id", dataEvent.getSource().getId() ); jsonGenerator.writeStringField( "type", rows.getType().toString() ); jsonGenerator.writeStringField( "state", rows.getState().toString() ); jsonGenerator.writeArrayFieldStart( "rows" ); for ( Row row : rows ) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart( "names" ); for ( String name : row.getColumnNames() ) { jsonGenerator.writeString( name ); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart( "objects" ); for ( Object obj : row.getObjects() ) { jsonGenerator.writeStartObject(); if ( obj == null ) { jsonGenerator.writeStringField( "type", "Null" ); jsonGenerator.writeEndObject(); continue; } switch ( obj.getClass().getSimpleName() ) { case "String": jsonGenerator.writeStringField( "type", "String" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Date": jsonGenerator.writeStringField( "type", "Date" ); jsonGenerator.writeStringField( "obj", DATE_TIME_INSTANCE.format( (Date) obj ) ); break; case "Integer": jsonGenerator.writeStringField( "type", "Integer" ); jsonGenerator.writeNumberField( "obj", (Integer) obj ); break; case "Long": jsonGenerator.writeStringField( "type", "Long" ); jsonGenerator.writeNumberField( "obj", (Long) obj ); break; case "Double": jsonGenerator.writeStringField( "type", "Double" ); jsonGenerator.writeNumberField( "obj", (Double) obj ); break; case "BigDecimal": jsonGenerator.writeStringField( "type", "BigDecimal" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Boolean": jsonGenerator.writeStringField( "type", "Boolean" ); jsonGenerator.writeBooleanField( "obj", (Boolean) obj ); break; case "byte[]": jsonGenerator.writeStringField( "type", "byte[]" ); jsonGenerator.writeStringField( "obj", new String( ( (byte[]) obj ), "UTF-8" ) ); break; default: if ( obj instanceof Serializable ) { jsonGenerator.writeStringField( "type", "Object" ); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream( outputStream ); objectOutputStream.writeObject( obj ); objectOutputStream.close(); outputStream.close(); byte[] bytes = outputStream.toByteArray(); jsonGenerator.writeStringField( "obj", Base64.encodeBase64String( bytes ) ); } } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } } ); module.addDeserializer( DataEvent.class, new StdNodeBasedDeserializer<DataEvent>( DataEvent.class ) { @Override public DataEvent convert( JsonNode jsonNode, DeserializationContext deserializationContext ) throws IOException { Rows.TYPE type = Rows.TYPE.valueOf( jsonNode.get( "type" ).asText() ); Rows.STATE state = Rows.STATE.valueOf( jsonNode.get( "state" ).asText() ); List<Row> rows = new ArrayList<>(); JsonNode json_rows = jsonNode.get( "rows" ); for ( JsonNode row : json_rows ) { List<Class> types = new ArrayList<>(); List<String> names = new ArrayList<>(); for ( JsonNode name : row.get( "names" ) ) { names.add( name.asText() ); } List<Object> objects = new ArrayList<>(); for ( JsonNode obj : row.get( "objects" ) ) { JsonNode t = obj.get( "type" ); JsonNode rawObject = obj.get( "obj" ); Object object = null; String objType = t.asText(); switch ( objType ) { case "Null": types.add( Void.class ); break; case "String": types.add( String.class ); object = rawObject.asText(); break; case "Integer": types.add( Integer.class ); object = rawObject.asInt(); break; case "Long": types.add( Long.class ); object = rawObject.asLong(); break; case "Date": types.add( Date.class ); try { object = DATE_TIME_INSTANCE.parse( rawObject.asText() ); } catch ( ParseException e ) { e.printStackTrace(); } break; case "Double": types.add( Double.class ); object = rawObject.asDouble(); break; case "BigDecimal": types.add( BigDecimal.class ); object = new BigDecimal( rawObject.asText() ); break; case "Boolean": types.add( Boolean.class ); object = rawObject.asBoolean(); break; case "byte[]": types.add( byte[].class ); object = rawObject.asText().getBytes( "UTF-8" ); break; case "Object": try { types.add( Object.class ); object = new ObjectInputStream( new ByteArrayInputStream( Base64.decodeBase64( rawObject.asText() ) ) ).readObject(); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } break; } objects.add( object ); } Row r = new DeserializedRow( names, types, objects ); rows.add( r ); } Rows rowsObj = new Rows( rows, type, state ); return new DataEvent( new RemoteSource( jsonNode.get( "model-id" ).asText() ), rowsObj ); } } ); mapper.registerModule( module ); } DataEventSerializer(); }
DataEventSerializer extends BaseSerializer<DataEvent> { public DataEventSerializer() { super( DataEvent.class ); SimpleModule module = new SimpleModule(); module.addSerializer( DataEvent.class, new JsonSerializer<DataEvent>() { @Override public void serialize( DataEvent dataEvent, JsonGenerator jsonGenerator, SerializerProvider serializerProvider ) throws IOException, JsonProcessingException { jsonGenerator.writeStartObject(); Rows rows = (Rows) dataEvent.getData(); jsonGenerator.writeStringField( "model-id", dataEvent.getSource().getId() ); jsonGenerator.writeStringField( "type", rows.getType().toString() ); jsonGenerator.writeStringField( "state", rows.getState().toString() ); jsonGenerator.writeArrayFieldStart( "rows" ); for ( Row row : rows ) { jsonGenerator.writeStartObject(); jsonGenerator.writeArrayFieldStart( "names" ); for ( String name : row.getColumnNames() ) { jsonGenerator.writeString( name ); } jsonGenerator.writeEndArray(); jsonGenerator.writeArrayFieldStart( "objects" ); for ( Object obj : row.getObjects() ) { jsonGenerator.writeStartObject(); if ( obj == null ) { jsonGenerator.writeStringField( "type", "Null" ); jsonGenerator.writeEndObject(); continue; } switch ( obj.getClass().getSimpleName() ) { case "String": jsonGenerator.writeStringField( "type", "String" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Date": jsonGenerator.writeStringField( "type", "Date" ); jsonGenerator.writeStringField( "obj", DATE_TIME_INSTANCE.format( (Date) obj ) ); break; case "Integer": jsonGenerator.writeStringField( "type", "Integer" ); jsonGenerator.writeNumberField( "obj", (Integer) obj ); break; case "Long": jsonGenerator.writeStringField( "type", "Long" ); jsonGenerator.writeNumberField( "obj", (Long) obj ); break; case "Double": jsonGenerator.writeStringField( "type", "Double" ); jsonGenerator.writeNumberField( "obj", (Double) obj ); break; case "BigDecimal": jsonGenerator.writeStringField( "type", "BigDecimal" ); jsonGenerator.writeStringField( "obj", obj.toString() ); break; case "Boolean": jsonGenerator.writeStringField( "type", "Boolean" ); jsonGenerator.writeBooleanField( "obj", (Boolean) obj ); break; case "byte[]": jsonGenerator.writeStringField( "type", "byte[]" ); jsonGenerator.writeStringField( "obj", new String( ( (byte[]) obj ), "UTF-8" ) ); break; default: if ( obj instanceof Serializable ) { jsonGenerator.writeStringField( "type", "Object" ); ByteArrayOutputStream outputStream = new ByteArrayOutputStream(); ObjectOutputStream objectOutputStream = new ObjectOutputStream( outputStream ); objectOutputStream.writeObject( obj ); objectOutputStream.close(); outputStream.close(); byte[] bytes = outputStream.toByteArray(); jsonGenerator.writeStringField( "obj", Base64.encodeBase64String( bytes ) ); } } jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } jsonGenerator.writeEndArray(); jsonGenerator.writeEndObject(); } } ); module.addDeserializer( DataEvent.class, new StdNodeBasedDeserializer<DataEvent>( DataEvent.class ) { @Override public DataEvent convert( JsonNode jsonNode, DeserializationContext deserializationContext ) throws IOException { Rows.TYPE type = Rows.TYPE.valueOf( jsonNode.get( "type" ).asText() ); Rows.STATE state = Rows.STATE.valueOf( jsonNode.get( "state" ).asText() ); List<Row> rows = new ArrayList<>(); JsonNode json_rows = jsonNode.get( "rows" ); for ( JsonNode row : json_rows ) { List<Class> types = new ArrayList<>(); List<String> names = new ArrayList<>(); for ( JsonNode name : row.get( "names" ) ) { names.add( name.asText() ); } List<Object> objects = new ArrayList<>(); for ( JsonNode obj : row.get( "objects" ) ) { JsonNode t = obj.get( "type" ); JsonNode rawObject = obj.get( "obj" ); Object object = null; String objType = t.asText(); switch ( objType ) { case "Null": types.add( Void.class ); break; case "String": types.add( String.class ); object = rawObject.asText(); break; case "Integer": types.add( Integer.class ); object = rawObject.asInt(); break; case "Long": types.add( Long.class ); object = rawObject.asLong(); break; case "Date": types.add( Date.class ); try { object = DATE_TIME_INSTANCE.parse( rawObject.asText() ); } catch ( ParseException e ) { e.printStackTrace(); } break; case "Double": types.add( Double.class ); object = rawObject.asDouble(); break; case "BigDecimal": types.add( BigDecimal.class ); object = new BigDecimal( rawObject.asText() ); break; case "Boolean": types.add( Boolean.class ); object = rawObject.asBoolean(); break; case "byte[]": types.add( byte[].class ); object = rawObject.asText().getBytes( "UTF-8" ); break; case "Object": try { types.add( Object.class ); object = new ObjectInputStream( new ByteArrayInputStream( Base64.decodeBase64( rawObject.asText() ) ) ).readObject(); } catch ( ClassNotFoundException e ) { e.printStackTrace(); } break; } objects.add( object ); } Row r = new DeserializedRow( names, types, objects ); rows.add( r ); } Rows rowsObj = new Rows( rows, type, state ); return new DataEvent( new RemoteSource( jsonNode.get( "model-id" ).asText() ), rowsObj ); } } ); mapper.registerModule( module ); } DataEventSerializer(); static final DateFormat DATE_TIME_INSTANCE; }
@Test public void testGetBundle() throws Exception { assertNotNull( Messages.getBundle() ); }
public static ResourceBundle getBundle() { if ( RESOURCE_BUNDLE == null ) { RESOURCE_BUNDLE = ResourceBundle.getBundle( BUNDLE_NAME ); } return RESOURCE_BUNDLE; }
Messages { public static ResourceBundle getBundle() { if ( RESOURCE_BUNDLE == null ) { RESOURCE_BUNDLE = ResourceBundle.getBundle( BUNDLE_NAME ); } return RESOURCE_BUNDLE; } }
Messages { public static ResourceBundle getBundle() { if ( RESOURCE_BUNDLE == null ) { RESOURCE_BUNDLE = ResourceBundle.getBundle( BUNDLE_NAME ); } return RESOURCE_BUNDLE; } private Messages(); }
Messages { public static ResourceBundle getBundle() { if ( RESOURCE_BUNDLE == null ) { RESOURCE_BUNDLE = ResourceBundle.getBundle( BUNDLE_NAME ); } return RESOURCE_BUNDLE; } private Messages(); static ResourceBundle getBundle(); static String getString( String key ); static String getString( String key, String param1 ); static String getString( String key, String param1, String param2 ); static String getString( String key, String param1, String param2, String param3 ); static String getString( String key, String param1, String param2, String param3, String param4 ); }
Messages { public static ResourceBundle getBundle() { if ( RESOURCE_BUNDLE == null ) { RESOURCE_BUNDLE = ResourceBundle.getBundle( BUNDLE_NAME ); } return RESOURCE_BUNDLE; } private Messages(); static ResourceBundle getBundle(); static String getString( String key ); static String getString( String key, String param1 ); static String getString( String key, String param1, String param2 ); static String getString( String key, String param1, String param2, String param3 ); static String getString( String key, String param1, String param2, String param3, String param4 ); }
@Test public void testGetRepoList_nameMask() throws KettleException { init( repo, "/", true, "Trans.*", "", All, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_withoutNameMask() throws KettleException { init( repo, "/", true, "", "", All, 4 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_excludeNameMask() throws KettleException { init( repo, "/", true, ".*", "Trans1.*", All, 3 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_includeSubfolders_Extended() throws KettleException { init( repoExtended, "/", true, ".*", "", All, 4 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_excludeSubfolders_Extended() throws KettleException { init( repoExtended, "/", false, ".*", "", All, 0 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_transOnly_Extended() throws KettleException { init( repoExtended, "/", true, ".*", "", Transformations, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_jobsOnly_Extended() throws KettleException { init( repoExtended, "/", true, ".*", "", Jobs, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_nameMask_Extended() throws KettleException { init( repoExtended, "/", true, "Trans.*", "", All, 2 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_withoutNameMask_Extended() throws KettleException { init( repoExtended, "/", true, "", "", All, 4 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetRepoList_excludeNameMask_Extended() throws KettleException { init( repoExtended, "/", true, ".*", "Trans1.*", All, 3 ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void equals() throws Exception { principal1 = new ActingPrincipal( "suzy" ); principal2 = new ActingPrincipal( "joe" ); assertFalse( principal1.equals( principal2 ) ); assertFalse( principal1.equals( ActingPrincipal.ANONYMOUS ) ); principal2 = new ActingPrincipal( "suzy" ); assertTrue( principal1.equals( principal2 ) ); principal2 = ActingPrincipal.ANONYMOUS; assertTrue( principal2.equals( ActingPrincipal.ANONYMOUS ) ); }
public boolean equals( Object other ) { if ( other instanceof ActingPrincipal ) { ActingPrincipal that = (ActingPrincipal) other; return ( this.isAnonymous() && that.isAnonymous() ) || ( this.getName() != null && this.getName().equals( that.getName() ) ); } else { return false; } }
ActingPrincipal implements Principal, Serializable { public boolean equals( Object other ) { if ( other instanceof ActingPrincipal ) { ActingPrincipal that = (ActingPrincipal) other; return ( this.isAnonymous() && that.isAnonymous() ) || ( this.getName() != null && this.getName().equals( that.getName() ) ); } else { return false; } } }
ActingPrincipal implements Principal, Serializable { public boolean equals( Object other ) { if ( other instanceof ActingPrincipal ) { ActingPrincipal that = (ActingPrincipal) other; return ( this.isAnonymous() && that.isAnonymous() ) || ( this.getName() != null && this.getName().equals( that.getName() ) ); } else { return false; } } ActingPrincipal( String name ); private ActingPrincipal(); }
ActingPrincipal implements Principal, Serializable { public boolean equals( Object other ) { if ( other instanceof ActingPrincipal ) { ActingPrincipal that = (ActingPrincipal) other; return ( this.isAnonymous() && that.isAnonymous() ) || ( this.getName() != null && this.getName().equals( that.getName() ) ); } else { return false; } } ActingPrincipal( String name ); private ActingPrincipal(); @Override String getName(); boolean equals( Object other ); String toString(); int hashCode(); boolean isAnonymous(); }
ActingPrincipal implements Principal, Serializable { public boolean equals( Object other ) { if ( other instanceof ActingPrincipal ) { ActingPrincipal that = (ActingPrincipal) other; return ( this.isAnonymous() && that.isAnonymous() ) || ( this.getName() != null && this.getName().equals( that.getName() ) ); } else { return false; } } ActingPrincipal( String name ); private ActingPrincipal(); @Override String getName(); boolean equals( Object other ); String toString(); int hashCode(); boolean isAnonymous(); static final ActingPrincipal ANONYMOUS; }
@Test public void testShowHidden() throws KettleException { IUser user = Mockito.mock( IUser.class ); Mockito.when( user.isAdmin() ).thenReturn( true ); Mockito.when( repoExtended.getUserInfo() ).thenReturn( user ); init( repoExtended, "/", false, ".*", "", All, 0 ); Mockito.verify( repoExtended, Mockito.never() ) .loadRepositoryDirectoryTree( Mockito.anyString(), Mockito.anyString(), Mockito.anyInt(), Mockito.eq( false ), Mockito.anyBoolean(), anyBoolean() ); Mockito.when( user.isAdmin() ).thenReturn( false ); init( repoExtended, "/", false, ".*", "", All, 0 ); Mockito.verify( repoExtended ) .loadRepositoryDirectoryTree( Mockito.anyString(), Mockito.anyString(), Mockito.anyInt(), Mockito.eq( false ), Mockito.anyBoolean(), Mockito.anyBoolean() ); }
@Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
GetRepositoryNames extends BaseStep implements StepInterface { @Override public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (GetRepositoryNamesMeta) smi; data = (GetRepositoryNamesData) sdi; if ( super.init( smi, sdi ) ) { try { data.list = getRepositoryObjects(); } catch ( Exception e ) { logError( "Error initializing step: ", e ); return false; } data.rownr = 1L; data.filenr = 0; return true; } return false; } GetRepositoryNames( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testGetFields() throws Exception { StringOperationsMeta meta = new StringOperationsMeta(); meta.allocate( 1 ); meta.setFieldInStream( new String[] { "field1" } ); RowMetaInterface rowMetaInterface = new RowMeta(); ValueMetaInterface valueMeta = new ValueMetaString( "field1" ); valueMeta.setStorageMetadata( new ValueMetaString( "field1" ) ); valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ); rowMetaInterface.addValueMeta( valueMeta ); VariableSpace space = mock( VariableSpace.class ); meta.getFields( rowMetaInterface, "STRING_OPERATIONS", null, null, space, null, null ); RowMetaInterface expectedRowMeta = new RowMeta(); expectedRowMeta.addValueMeta( new ValueMetaString( "field1" ) ); assertEquals( expectedRowMeta.toString(), rowMetaInterface.toString() ); }
@Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldOutStream.length; i++ ) { ValueMetaInterface v; String outputField = space.environmentSubstitute( fieldOutStream[i] ); if ( !Utils.isEmpty( outputField ) ) { v = new ValueMetaString( outputField ); v.setLength( 100, -1 ); v.setOrigin( name ); inputRowMeta.addValueMeta( v ); } else { v = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( v == null ) { continue; } v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); int paddingType = getPaddingType()[i]; if ( paddingType == PADDING_LEFT || paddingType == PADDING_RIGHT ) { int padLen = Const.toInt( space.environmentSubstitute( getPadLen()[i] ), 0 ); if ( padLen > v.getLength() ) { v.setLength( padLen ); } } } } }
StringOperationsMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldOutStream.length; i++ ) { ValueMetaInterface v; String outputField = space.environmentSubstitute( fieldOutStream[i] ); if ( !Utils.isEmpty( outputField ) ) { v = new ValueMetaString( outputField ); v.setLength( 100, -1 ); v.setOrigin( name ); inputRowMeta.addValueMeta( v ); } else { v = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( v == null ) { continue; } v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); int paddingType = getPaddingType()[i]; if ( paddingType == PADDING_LEFT || paddingType == PADDING_RIGHT ) { int padLen = Const.toInt( space.environmentSubstitute( getPadLen()[i] ), 0 ); if ( padLen > v.getLength() ) { v.setLength( padLen ); } } } } } }
StringOperationsMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldOutStream.length; i++ ) { ValueMetaInterface v; String outputField = space.environmentSubstitute( fieldOutStream[i] ); if ( !Utils.isEmpty( outputField ) ) { v = new ValueMetaString( outputField ); v.setLength( 100, -1 ); v.setOrigin( name ); inputRowMeta.addValueMeta( v ); } else { v = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( v == null ) { continue; } v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); int paddingType = getPaddingType()[i]; if ( paddingType == PADDING_LEFT || paddingType == PADDING_RIGHT ) { int padLen = Const.toInt( space.environmentSubstitute( getPadLen()[i] ), 0 ); if ( padLen > v.getLength() ) { v.setLength( padLen ); } } } } } StringOperationsMeta(); }
StringOperationsMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldOutStream.length; i++ ) { ValueMetaInterface v; String outputField = space.environmentSubstitute( fieldOutStream[i] ); if ( !Utils.isEmpty( outputField ) ) { v = new ValueMetaString( outputField ); v.setLength( 100, -1 ); v.setOrigin( name ); inputRowMeta.addValueMeta( v ); } else { v = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( v == null ) { continue; } v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); int paddingType = getPaddingType()[i]; if ( paddingType == PADDING_LEFT || paddingType == PADDING_RIGHT ) { int padLen = Const.toInt( space.environmentSubstitute( getPadLen()[i] ), 0 ); if ( padLen > v.getLength() ) { v.setLength( padLen ); } } } } } StringOperationsMeta(); String[] getFieldInStream(); void setFieldInStream( String[] keyStream ); String[] getFieldOutStream(); void setFieldOutStream( String[] keyStream ); String[] getPadLen(); void setPadLen( String[] value ); String[] getPadChar(); void setPadChar( String[] value ); int[] getTrimType(); void setTrimType( int[] trimType ); int[] getLowerUpper(); void setLowerUpper( int[] lowerUpper ); int[] getInitCap(); void setInitCap( int[] value ); int[] getMaskXML(); void setMaskXML( int[] value ); int[] getDigits(); void setDigits( int[] value ); int[] getRemoveSpecialCharacters(); void setRemoveSpecialCharacters( int[] value ); int[] getPaddingType(); void setPaddingType( int[] value ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys ); @Override Object clone(); @Override void setDefault(); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); @Override boolean supportsErrorHandling(); static String getTrimTypeDesc( int i ); static String getLowerUpperDesc( int i ); static String getInitCapDesc( int i ); static String getMaskXMLDesc( int i ); static String getDigitsDesc( int i ); static String getRemoveSpecialCharactersDesc( int i ); static String getPaddingDesc( int i ); static int getTrimTypeByDesc( String tt ); static int getLowerUpperByDesc( String tt ); static int getInitCapByDesc( String tt ); static int getMaskXMLByDesc( String tt ); static int getDigitsByDesc( String tt ); static int getRemoveSpecialCharactersByDesc( String tt ); static int getPaddingByDesc( String tt ); }
StringOperationsMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { for ( int i = 0; i < fieldOutStream.length; i++ ) { ValueMetaInterface v; String outputField = space.environmentSubstitute( fieldOutStream[i] ); if ( !Utils.isEmpty( outputField ) ) { v = new ValueMetaString( outputField ); v.setLength( 100, -1 ); v.setOrigin( name ); inputRowMeta.addValueMeta( v ); } else { v = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( v == null ) { continue; } v.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); int paddingType = getPaddingType()[i]; if ( paddingType == PADDING_LEFT || paddingType == PADDING_RIGHT ) { int padLen = Const.toInt( space.environmentSubstitute( getPadLen()[i] ), 0 ); if ( padLen > v.getLength() ) { v.setLength( padLen ); } } } } } StringOperationsMeta(); String[] getFieldInStream(); void setFieldInStream( String[] keyStream ); String[] getFieldOutStream(); void setFieldOutStream( String[] keyStream ); String[] getPadLen(); void setPadLen( String[] value ); String[] getPadChar(); void setPadChar( String[] value ); int[] getTrimType(); void setTrimType( int[] trimType ); int[] getLowerUpper(); void setLowerUpper( int[] lowerUpper ); int[] getInitCap(); void setInitCap( int[] value ); int[] getMaskXML(); void setMaskXML( int[] value ); int[] getDigits(); void setDigits( int[] value ); int[] getRemoveSpecialCharacters(); void setRemoveSpecialCharacters( int[] value ); int[] getPaddingType(); void setPaddingType( int[] value ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys ); @Override Object clone(); @Override void setDefault(); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); @Override boolean supportsErrorHandling(); static String getTrimTypeDesc( int i ); static String getLowerUpperDesc( int i ); static String getInitCapDesc( int i ); static String getMaskXMLDesc( int i ); static String getDigitsDesc( int i ); static String getRemoveSpecialCharactersDesc( int i ); static String getPaddingDesc( int i ); static int getTrimTypeByDesc( String tt ); static int getLowerUpperByDesc( String tt ); static int getInitCapByDesc( String tt ); static int getMaskXMLByDesc( String tt ); static int getDigitsByDesc( String tt ); static int getRemoveSpecialCharactersByDesc( String tt ); static int getPaddingByDesc( String tt ); static final String[] trimTypeCode; static final int TRIM_NONE; static final int TRIM_LEFT; static final int TRIM_RIGHT; static final int TRIM_BOTH; static final String[] trimTypeDesc; static final String[] lowerUpperCode; static final int LOWER_UPPER_NONE; static final int LOWER_UPPER_LOWER; static final int LOWER_UPPER_UPPER; static final String[] lowerUpperDesc; static final String[] initCapDesc; static final String[] initCapCode; static final int INIT_CAP_NO; static final int INIT_CAP_YES; static final String[] digitsCode; static final int DIGITS_NONE; static final int DIGITS_ONLY; static final int DIGITS_REMOVE; static final String[] digitsDesc; static final String[] maskXMLDesc; static final String[] maskXMLCode; static final int MASK_NONE; static final int MASK_ESCAPE_XML; static final int MASK_CDATA; static final int MASK_UNESCAPE_XML; static final int MASK_ESCAPE_SQL; static final int MASK_ESCAPE_HTML; static final int MASK_UNESCAPE_HTML; static final String[] removeSpecialCharactersCode; static final int REMOVE_SPECIAL_CHARACTERS_NONE; static final int REMOVE_SPECIAL_CHARACTERS_CR; static final int REMOVE_SPECIAL_CHARACTERS_LF; static final int REMOVE_SPECIAL_CHARACTERS_CRLF; static final int REMOVE_SPECIAL_CHARACTERS_TAB; static final int REMOVE_SPECIAL_CHARACTERS_ESPACE; static final String[] removeSpecialCharactersDesc; static final String[] paddingDesc; static final String[] paddingCode; static final int PADDING_NONE; static final int PADDING_LEFT; static final int PADDING_RIGHT; }
@Test public void testCreateDatabase() throws IOException { assertNull( data.db ); data.createDatabase( mdbFile ); assertNotNull( data.db ); assertTrue( mdbFile.exists() ); assertNull( data.table ); data.truncateTable(); assertNull( data.table ); data.closeDatabase(); }
void createDatabase( File databaseFile ) throws IOException { db = Database.create( databaseFile ); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createDatabase( File databaseFile ) throws IOException { db = Database.create( databaseFile ); } }
AccessOutputData extends BaseStepData implements StepDataInterface { void createDatabase( File databaseFile ) throws IOException { db = Database.create( databaseFile ); } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createDatabase( File databaseFile ) throws IOException { db = Database.create( databaseFile ); } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createDatabase( File databaseFile ) throws IOException { db = Database.create( databaseFile ); } AccessOutputData(); public Database db; public Table table; public List<Object[]> rows; public RowMetaInterface outputRowMeta; public boolean oneFileOpened; }
@Test public void testCreateTable() throws IOException { data.createDatabase( mdbFile ); data.createTable( "thisSampleTable", generateRowMeta() ); assertTrue( data.db.getTableNames().contains( "thisSampleTable" ) ); data.closeDatabase(); }
void createTable( String tableName, RowMetaInterface rowMeta ) throws IOException { List<Column> columns = AccessOutputMeta.getColumns( rowMeta ); db.createTable( tableName, columns ); table = db.getTable( tableName ); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createTable( String tableName, RowMetaInterface rowMeta ) throws IOException { List<Column> columns = AccessOutputMeta.getColumns( rowMeta ); db.createTable( tableName, columns ); table = db.getTable( tableName ); } }
AccessOutputData extends BaseStepData implements StepDataInterface { void createTable( String tableName, RowMetaInterface rowMeta ) throws IOException { List<Column> columns = AccessOutputMeta.getColumns( rowMeta ); db.createTable( tableName, columns ); table = db.getTable( tableName ); } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createTable( String tableName, RowMetaInterface rowMeta ) throws IOException { List<Column> columns = AccessOutputMeta.getColumns( rowMeta ); db.createTable( tableName, columns ); table = db.getTable( tableName ); } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void createTable( String tableName, RowMetaInterface rowMeta ) throws IOException { List<Column> columns = AccessOutputMeta.getColumns( rowMeta ); db.createTable( tableName, columns ); table = db.getTable( tableName ); } AccessOutputData(); public Database db; public Table table; public List<Object[]> rows; public RowMetaInterface outputRowMeta; public boolean oneFileOpened; }
@Test public void testTruncateTable() throws IOException { data.createDatabase( mdbFile ); data.createTable( "TruncatingThisTable", generateRowMeta() ); data.addRowsToTable( generateRowData( 10 ) ); assertEquals( 10, data.table.getRowCount() ); data.truncateTable(); assertEquals( 0, data.table.getRowCount() ); data.addRowToTable( generateRowData( 1 ).get( 0 ) ); assertEquals( 1, data.table.getRowCount() ); data.closeDatabase(); }
void truncateTable() throws IOException { if ( table == null ) { return; } Cursor tableRows = Cursor.createCursor( table ); while ( tableRows.moveToNextRow() ) { tableRows.deleteCurrentRow(); } }
AccessOutputData extends BaseStepData implements StepDataInterface { void truncateTable() throws IOException { if ( table == null ) { return; } Cursor tableRows = Cursor.createCursor( table ); while ( tableRows.moveToNextRow() ) { tableRows.deleteCurrentRow(); } } }
AccessOutputData extends BaseStepData implements StepDataInterface { void truncateTable() throws IOException { if ( table == null ) { return; } Cursor tableRows = Cursor.createCursor( table ); while ( tableRows.moveToNextRow() ) { tableRows.deleteCurrentRow(); } } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void truncateTable() throws IOException { if ( table == null ) { return; } Cursor tableRows = Cursor.createCursor( table ); while ( tableRows.moveToNextRow() ) { tableRows.deleteCurrentRow(); } } AccessOutputData(); }
AccessOutputData extends BaseStepData implements StepDataInterface { void truncateTable() throws IOException { if ( table == null ) { return; } Cursor tableRows = Cursor.createCursor( table ); while ( tableRows.moveToNextRow() ) { tableRows.deleteCurrentRow(); } } AccessOutputData(); public Database db; public Table table; public List<Object[]> rows; public RowMetaInterface outputRowMeta; public boolean oneFileOpened; }
@Test public void testGetXML() { OraBulkLoaderMeta oraBulkLoaderMeta = new OraBulkLoaderMeta(); oraBulkLoaderMeta.setFieldTable( new String[] { "fieldTable1", "fieldTable2" } ); oraBulkLoaderMeta.setFieldStream( new String[] { "fieldStreamValue1" } ); oraBulkLoaderMeta.setDateMask( new String[] {} ); oraBulkLoaderMeta.afterInjectionSynchronization(); oraBulkLoaderMeta.getXML(); Assert.assertEquals( oraBulkLoaderMeta.getFieldStream().length, oraBulkLoaderMeta.getDateMask().length ); }
public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval .append( " " ).append( XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "commit", commitSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bind_size", bindSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "read_size", readSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "errors", maxErrors ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_method", loadMethod ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_action", loadAction ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sqlldr", sqlldr ) ); retval.append( " " ).append( XMLHandler.addTagValue( "control_file", controlFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "data_file", dataFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_file", badFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "discard_file", discardFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "direct_path", directPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "erase_files", eraseFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dbname_override", dbNameOverride ) ); retval.append( " " ).append( XMLHandler.addTagValue( "character_set", characterSetName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_warning", failOnWarning ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_error", failOnError ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parallel", parallel ) ); retval.append( " " ).append( XMLHandler.addTagValue( "alt_rec_term", altRecordTerm ) ); for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_mask", dateMask[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); }
OraBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesDatabaseConnectionInformation { public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval .append( " " ).append( XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "commit", commitSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bind_size", bindSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "read_size", readSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "errors", maxErrors ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_method", loadMethod ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_action", loadAction ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sqlldr", sqlldr ) ); retval.append( " " ).append( XMLHandler.addTagValue( "control_file", controlFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "data_file", dataFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_file", badFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "discard_file", discardFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "direct_path", directPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "erase_files", eraseFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dbname_override", dbNameOverride ) ); retval.append( " " ).append( XMLHandler.addTagValue( "character_set", characterSetName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_warning", failOnWarning ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_error", failOnError ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parallel", parallel ) ); retval.append( " " ).append( XMLHandler.addTagValue( "alt_rec_term", altRecordTerm ) ); for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_mask", dateMask[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); } }
OraBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesDatabaseConnectionInformation { public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval .append( " " ).append( XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "commit", commitSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bind_size", bindSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "read_size", readSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "errors", maxErrors ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_method", loadMethod ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_action", loadAction ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sqlldr", sqlldr ) ); retval.append( " " ).append( XMLHandler.addTagValue( "control_file", controlFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "data_file", dataFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_file", badFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "discard_file", discardFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "direct_path", directPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "erase_files", eraseFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dbname_override", dbNameOverride ) ); retval.append( " " ).append( XMLHandler.addTagValue( "character_set", characterSetName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_warning", failOnWarning ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_error", failOnError ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parallel", parallel ) ); retval.append( " " ).append( XMLHandler.addTagValue( "alt_rec_term", altRecordTerm ) ); for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_mask", dateMask[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); } OraBulkLoaderMeta(); }
OraBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesDatabaseConnectionInformation { public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval .append( " " ).append( XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "commit", commitSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bind_size", bindSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "read_size", readSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "errors", maxErrors ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_method", loadMethod ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_action", loadAction ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sqlldr", sqlldr ) ); retval.append( " " ).append( XMLHandler.addTagValue( "control_file", controlFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "data_file", dataFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_file", badFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "discard_file", discardFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "direct_path", directPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "erase_files", eraseFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dbname_override", dbNameOverride ) ); retval.append( " " ).append( XMLHandler.addTagValue( "character_set", characterSetName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_warning", failOnWarning ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_error", failOnError ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parallel", parallel ) ); retval.append( " " ).append( XMLHandler.addTagValue( "alt_rec_term", altRecordTerm ) ); for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_mask", dateMask[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); } OraBulkLoaderMeta(); @Injection( name = "CONNECTION_NAME" ) void setConnection( String connectionName ); int getCommitSizeAsInt( VariableSpace varSpace ); String getCommitSize(); void setCommitSize( String commitSize ); DatabaseMeta getDatabaseMeta(); void setDatabaseMeta( DatabaseMeta database ); String getTableName(); void setTableName( String tableName ); String getSqlldr(); void setSqlldr( String sqlldr ); String[] getFieldTable(); void setFieldTable( String[] updateLookup ); String[] getFieldStream(); void setFieldStream( String[] updateStream ); String[] getDateMask(); void setDateMask( String[] dateMask ); boolean isFailOnWarning(); void setFailOnWarning( boolean failOnWarning ); boolean isFailOnError(); void setFailOnError( boolean failOnError ); String getCharacterSetName(); void setCharacterSetName( String characterSetName ); String getAltRecordTerm(); void setAltRecordTerm( String altRecordTerm ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrvalues ); Object clone(); void setDefault(); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ); void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); DatabaseMeta[] getUsedDatabaseConnections(); boolean isDirectPath(); void setDirectPath( boolean directPath ); RowMetaInterface getRequiredFields( VariableSpace space ); String getSchemaName(); void setSchemaName( String schemaName ); String getBadFile(); void setBadFile( String badFile ); String getControlFile(); void setControlFile( String controlFile ); String getDataFile(); void setDataFile( String dataFile ); String getDiscardFile(); void setDiscardFile( String discardFile ); String getLogFile(); void setLogFile( String logFile ); void setLoadAction( String action ); String getLoadAction(); void setLoadMethod( String method ); String getLoadMethod(); String getEncoding(); void setEncoding( String encoding ); String getDelimiter(); String getEnclosure(); boolean isEraseFiles(); void setEraseFiles( boolean eraseFiles ); int getBindSizeAsInt( VariableSpace varSpace ); String getBindSize(); void setBindSize( String bindSize ); int getMaxErrorsAsInt( VariableSpace varSpace ); String getMaxErrors(); void setMaxErrors( String maxErrors ); int getReadSizeAsInt( VariableSpace varSpace ); String getReadSize(); void setReadSize( String readSize ); String getDbNameOverride(); void setDbNameOverride( String dbNameOverride ); boolean isParallel(); void setParallel( boolean parallel ); @Override String getMissingDatabaseConnectionInformationMessage(); @AfterInjection void afterInjectionSynchronization(); }
OraBulkLoaderMeta extends BaseStepMeta implements StepMetaInterface, ProvidesDatabaseConnectionInformation { public String getXML() { StringBuilder retval = new StringBuilder( 300 ); retval .append( " " ).append( XMLHandler.addTagValue( "connection", databaseMeta == null ? "" : databaseMeta.getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "commit", commitSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bind_size", bindSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "read_size", readSize ) ); retval.append( " " ).append( XMLHandler.addTagValue( "errors", maxErrors ) ); retval.append( " " ).append( XMLHandler.addTagValue( "schema", schemaName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "table", tableName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_method", loadMethod ) ); retval.append( " " ).append( XMLHandler.addTagValue( "load_action", loadAction ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sqlldr", sqlldr ) ); retval.append( " " ).append( XMLHandler.addTagValue( "control_file", controlFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "data_file", dataFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "log_file", logFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_file", badFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "discard_file", discardFile ) ); retval.append( " " ).append( XMLHandler.addTagValue( "direct_path", directPath ) ); retval.append( " " ).append( XMLHandler.addTagValue( "erase_files", eraseFiles ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " ).append( XMLHandler.addTagValue( "dbname_override", dbNameOverride ) ); retval.append( " " ).append( XMLHandler.addTagValue( "character_set", characterSetName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_warning", failOnWarning ) ); retval.append( " " ).append( XMLHandler.addTagValue( "fail_on_error", failOnError ) ); retval.append( " " ).append( XMLHandler.addTagValue( "parallel", parallel ) ); retval.append( " " ).append( XMLHandler.addTagValue( "alt_rec_term", altRecordTerm ) ); for ( int i = 0; i < fieldTable.length; i++ ) { retval.append( " <mapping>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "stream_name", fieldTable[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "field_name", fieldStream[i] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "date_mask", dateMask[i] ) ); retval.append( " </mapping>" ).append( Const.CR ); } return retval.toString(); } OraBulkLoaderMeta(); @Injection( name = "CONNECTION_NAME" ) void setConnection( String connectionName ); int getCommitSizeAsInt( VariableSpace varSpace ); String getCommitSize(); void setCommitSize( String commitSize ); DatabaseMeta getDatabaseMeta(); void setDatabaseMeta( DatabaseMeta database ); String getTableName(); void setTableName( String tableName ); String getSqlldr(); void setSqlldr( String sqlldr ); String[] getFieldTable(); void setFieldTable( String[] updateLookup ); String[] getFieldStream(); void setFieldStream( String[] updateStream ); String[] getDateMask(); void setDateMask( String[] dateMask ); boolean isFailOnWarning(); void setFailOnWarning( boolean failOnWarning ); boolean isFailOnError(); void setFailOnError( boolean failOnError ); String getCharacterSetName(); void setCharacterSetName( String characterSetName ); String getAltRecordTerm(); void setAltRecordTerm( String altRecordTerm ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrvalues ); Object clone(); void setDefault(); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void getFields( RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); SQLStatement getSQLStatements( TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository, IMetaStore metaStore ); void analyseImpact( List<DatabaseImpact> impact, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); DatabaseMeta[] getUsedDatabaseConnections(); boolean isDirectPath(); void setDirectPath( boolean directPath ); RowMetaInterface getRequiredFields( VariableSpace space ); String getSchemaName(); void setSchemaName( String schemaName ); String getBadFile(); void setBadFile( String badFile ); String getControlFile(); void setControlFile( String controlFile ); String getDataFile(); void setDataFile( String dataFile ); String getDiscardFile(); void setDiscardFile( String discardFile ); String getLogFile(); void setLogFile( String logFile ); void setLoadAction( String action ); String getLoadAction(); void setLoadMethod( String method ); String getLoadMethod(); String getEncoding(); void setEncoding( String encoding ); String getDelimiter(); String getEnclosure(); boolean isEraseFiles(); void setEraseFiles( boolean eraseFiles ); int getBindSizeAsInt( VariableSpace varSpace ); String getBindSize(); void setBindSize( String bindSize ); int getMaxErrorsAsInt( VariableSpace varSpace ); String getMaxErrors(); void setMaxErrors( String maxErrors ); int getReadSizeAsInt( VariableSpace varSpace ); String getReadSize(); void setReadSize( String readSize ); String getDbNameOverride(); void setDbNameOverride( String dbNameOverride ); boolean isParallel(); void setParallel( boolean parallel ); @Override String getMissingDatabaseConnectionInformationMessage(); @AfterInjection void afterInjectionSynchronization(); static final String ACTION_APPEND; static final String ACTION_INSERT; static final String ACTION_REPLACE; static final String ACTION_TRUNCATE; static final String METHOD_AUTO_CONCURRENT; static final String METHOD_AUTO_END; static final String METHOD_MANUAL; static final String DATE_MASK_DATE; static final String DATE_MASK_DATETIME; }
@Test public void testCreateCommandLine() throws Exception { StepMockHelper<OraBulkLoaderMeta, OraBulkLoaderData> stepMockHelper = new StepMockHelper<OraBulkLoaderMeta, OraBulkLoaderData>( "TEST_CREATE_COMMANDLINE", OraBulkLoaderMeta.class, OraBulkLoaderData.class ); when( stepMockHelper.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ).thenReturn( stepMockHelper.logChannelInterface ); when( stepMockHelper.trans.isRunning() ).thenReturn( true ); OraBulkLoader oraBulkLoader = new OraBulkLoader( stepMockHelper.stepMeta, stepMockHelper.stepDataInterface, 0, stepMockHelper.transMeta, stepMockHelper.trans ); File tmp = File.createTempFile( "testCreateCOmmandLine", "tmp" ); tmp.deleteOnExit(); OraBulkLoaderMeta meta = new OraBulkLoaderMeta(); meta.setSqlldr( tmp.getAbsolutePath() ); meta.setControlFile( tmp.getAbsolutePath() ); DatabaseMeta dm = mock( DatabaseMeta.class ); when( dm.getUsername() ).thenReturn( "user" ); when( dm.getPassword() ).thenReturn( "Encrypted 2be98afc86aa7f2e4cb298b5eeab387f5" ); meta.setDatabaseMeta( dm ); String cmd = oraBulkLoader.createCommandLine( meta, true ); String expected = tmp.getAbsolutePath() + " control='" + tmp.getAbsolutePath() + "' userid=user/PENTAHO@"; assertEquals( "Comandline for oracle bulkloader is not as expected", expected, cmd ); }
public String createCommandLine( OraBulkLoaderMeta meta, boolean password ) throws KettleException { StringBuilder sb = new StringBuilder( 300 ); if ( meta.getSqlldr() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlldr() ), getTransMeta() ); String sqlldr = KettleVFS.getFilename( fileObject ); sb.append( sqlldr ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving sqlldr string", ex ); } } else { throw new KettleException( "No sqlldr application specified" ); } if ( meta.getControlFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getControlFile() ), getTransMeta() ); sb.append( " control=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving controlfile string", ex ); } } else { throw new KettleException( "No control file specified" ); } if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( meta.getLoadMethod() ) ) { sb.append( " data=\'-\'" ); } if ( meta.getLogFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getLogFile() ), getTransMeta() ); sb.append( " log=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving logfile string", ex ); } } if ( meta.getBadFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getBadFile() ), getTransMeta() ); sb.append( " bad=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving badfile string", ex ); } } if ( meta.getDiscardFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getDiscardFile() ), getTransMeta() ); sb.append( " discard=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving discardfile string", ex ); } } DatabaseMeta dm = meta.getDatabaseMeta(); if ( dm != null ) { String user = Const.NVL( dm.getUsername(), "" ); String pass = Const.NVL( Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( dm.getPassword() ) ), "" ); if ( !password ) { pass = "******"; } String dns = Const.NVL( dm.getDatabaseName(), "" ); sb.append( " userid=" ).append( environmentSubstitute( user ) ).append( "/" ).append( environmentSubstitute( pass ) ).append( "@" ); String overrideName = meta.getDbNameOverride(); if ( Utils.isEmpty( Const.rtrim( overrideName ) ) ) { sb.append( environmentSubstitute( dns ) ); } else { sb.append( environmentSubstitute( overrideName ) ); } } else { throw new KettleException( "No connection specified" ); } if ( meta.isDirectPath() ) { sb.append( " DIRECT=TRUE" ); if ( getStepMeta().getCopies() > 1 || meta.isParallel() ) { sb.append( " PARALLEL=TRUE" ); } } return sb.toString(); }
OraBulkLoader extends BaseStep implements StepInterface { public String createCommandLine( OraBulkLoaderMeta meta, boolean password ) throws KettleException { StringBuilder sb = new StringBuilder( 300 ); if ( meta.getSqlldr() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlldr() ), getTransMeta() ); String sqlldr = KettleVFS.getFilename( fileObject ); sb.append( sqlldr ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving sqlldr string", ex ); } } else { throw new KettleException( "No sqlldr application specified" ); } if ( meta.getControlFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getControlFile() ), getTransMeta() ); sb.append( " control=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving controlfile string", ex ); } } else { throw new KettleException( "No control file specified" ); } if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( meta.getLoadMethod() ) ) { sb.append( " data=\'-\'" ); } if ( meta.getLogFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getLogFile() ), getTransMeta() ); sb.append( " log=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving logfile string", ex ); } } if ( meta.getBadFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getBadFile() ), getTransMeta() ); sb.append( " bad=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving badfile string", ex ); } } if ( meta.getDiscardFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getDiscardFile() ), getTransMeta() ); sb.append( " discard=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving discardfile string", ex ); } } DatabaseMeta dm = meta.getDatabaseMeta(); if ( dm != null ) { String user = Const.NVL( dm.getUsername(), "" ); String pass = Const.NVL( Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( dm.getPassword() ) ), "" ); if ( !password ) { pass = "******"; } String dns = Const.NVL( dm.getDatabaseName(), "" ); sb.append( " userid=" ).append( environmentSubstitute( user ) ).append( "/" ).append( environmentSubstitute( pass ) ).append( "@" ); String overrideName = meta.getDbNameOverride(); if ( Utils.isEmpty( Const.rtrim( overrideName ) ) ) { sb.append( environmentSubstitute( dns ) ); } else { sb.append( environmentSubstitute( overrideName ) ); } } else { throw new KettleException( "No connection specified" ); } if ( meta.isDirectPath() ) { sb.append( " DIRECT=TRUE" ); if ( getStepMeta().getCopies() > 1 || meta.isParallel() ) { sb.append( " PARALLEL=TRUE" ); } } return sb.toString(); } }
OraBulkLoader extends BaseStep implements StepInterface { public String createCommandLine( OraBulkLoaderMeta meta, boolean password ) throws KettleException { StringBuilder sb = new StringBuilder( 300 ); if ( meta.getSqlldr() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlldr() ), getTransMeta() ); String sqlldr = KettleVFS.getFilename( fileObject ); sb.append( sqlldr ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving sqlldr string", ex ); } } else { throw new KettleException( "No sqlldr application specified" ); } if ( meta.getControlFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getControlFile() ), getTransMeta() ); sb.append( " control=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving controlfile string", ex ); } } else { throw new KettleException( "No control file specified" ); } if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( meta.getLoadMethod() ) ) { sb.append( " data=\'-\'" ); } if ( meta.getLogFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getLogFile() ), getTransMeta() ); sb.append( " log=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving logfile string", ex ); } } if ( meta.getBadFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getBadFile() ), getTransMeta() ); sb.append( " bad=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving badfile string", ex ); } } if ( meta.getDiscardFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getDiscardFile() ), getTransMeta() ); sb.append( " discard=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving discardfile string", ex ); } } DatabaseMeta dm = meta.getDatabaseMeta(); if ( dm != null ) { String user = Const.NVL( dm.getUsername(), "" ); String pass = Const.NVL( Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( dm.getPassword() ) ), "" ); if ( !password ) { pass = "******"; } String dns = Const.NVL( dm.getDatabaseName(), "" ); sb.append( " userid=" ).append( environmentSubstitute( user ) ).append( "/" ).append( environmentSubstitute( pass ) ).append( "@" ); String overrideName = meta.getDbNameOverride(); if ( Utils.isEmpty( Const.rtrim( overrideName ) ) ) { sb.append( environmentSubstitute( dns ) ); } else { sb.append( environmentSubstitute( overrideName ) ); } } else { throw new KettleException( "No connection specified" ); } if ( meta.isDirectPath() ) { sb.append( " DIRECT=TRUE" ); if ( getStepMeta().getCopies() > 1 || meta.isParallel() ) { sb.append( " PARALLEL=TRUE" ); } } return sb.toString(); } OraBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
OraBulkLoader extends BaseStep implements StepInterface { public String createCommandLine( OraBulkLoaderMeta meta, boolean password ) throws KettleException { StringBuilder sb = new StringBuilder( 300 ); if ( meta.getSqlldr() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlldr() ), getTransMeta() ); String sqlldr = KettleVFS.getFilename( fileObject ); sb.append( sqlldr ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving sqlldr string", ex ); } } else { throw new KettleException( "No sqlldr application specified" ); } if ( meta.getControlFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getControlFile() ), getTransMeta() ); sb.append( " control=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving controlfile string", ex ); } } else { throw new KettleException( "No control file specified" ); } if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( meta.getLoadMethod() ) ) { sb.append( " data=\'-\'" ); } if ( meta.getLogFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getLogFile() ), getTransMeta() ); sb.append( " log=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving logfile string", ex ); } } if ( meta.getBadFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getBadFile() ), getTransMeta() ); sb.append( " bad=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving badfile string", ex ); } } if ( meta.getDiscardFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getDiscardFile() ), getTransMeta() ); sb.append( " discard=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving discardfile string", ex ); } } DatabaseMeta dm = meta.getDatabaseMeta(); if ( dm != null ) { String user = Const.NVL( dm.getUsername(), "" ); String pass = Const.NVL( Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( dm.getPassword() ) ), "" ); if ( !password ) { pass = "******"; } String dns = Const.NVL( dm.getDatabaseName(), "" ); sb.append( " userid=" ).append( environmentSubstitute( user ) ).append( "/" ).append( environmentSubstitute( pass ) ).append( "@" ); String overrideName = meta.getDbNameOverride(); if ( Utils.isEmpty( Const.rtrim( overrideName ) ) ) { sb.append( environmentSubstitute( dns ) ); } else { sb.append( environmentSubstitute( overrideName ) ); } } else { throw new KettleException( "No connection specified" ); } if ( meta.isDirectPath() ) { sb.append( " DIRECT=TRUE" ); if ( getStepMeta().getCopies() > 1 || meta.isParallel() ) { sb.append( " PARALLEL=TRUE" ); } } return sb.toString(); } OraBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); String getControlFileContents( OraBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ); void createControlFile( String filename, Object[] row, OraBulkLoaderMeta meta ); String createCommandLine( OraBulkLoaderMeta meta, boolean password ); void checkExitVal( int exitVal ); boolean execute( OraBulkLoaderMeta meta, boolean wait ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
OraBulkLoader extends BaseStep implements StepInterface { public String createCommandLine( OraBulkLoaderMeta meta, boolean password ) throws KettleException { StringBuilder sb = new StringBuilder( 300 ); if ( meta.getSqlldr() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getSqlldr() ), getTransMeta() ); String sqlldr = KettleVFS.getFilename( fileObject ); sb.append( sqlldr ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving sqlldr string", ex ); } } else { throw new KettleException( "No sqlldr application specified" ); } if ( meta.getControlFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getControlFile() ), getTransMeta() ); sb.append( " control=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving controlfile string", ex ); } } else { throw new KettleException( "No control file specified" ); } if ( OraBulkLoaderMeta.METHOD_AUTO_CONCURRENT.equals( meta.getLoadMethod() ) ) { sb.append( " data=\'-\'" ); } if ( meta.getLogFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getLogFile() ), getTransMeta() ); sb.append( " log=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving logfile string", ex ); } } if ( meta.getBadFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getBadFile() ), getTransMeta() ); sb.append( " bad=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving badfile string", ex ); } } if ( meta.getDiscardFile() != null ) { try { FileObject fileObject = KettleVFS.getFileObject( environmentSubstitute( meta.getDiscardFile() ), getTransMeta() ); sb.append( " discard=\'" ); sb.append( KettleVFS.getFilename( fileObject ) ); sb.append( "\'" ); } catch ( KettleFileException ex ) { throw new KettleException( "Error retrieving discardfile string", ex ); } } DatabaseMeta dm = meta.getDatabaseMeta(); if ( dm != null ) { String user = Const.NVL( dm.getUsername(), "" ); String pass = Const.NVL( Encr.decryptPasswordOptionallyEncrypted( environmentSubstitute( dm.getPassword() ) ), "" ); if ( !password ) { pass = "******"; } String dns = Const.NVL( dm.getDatabaseName(), "" ); sb.append( " userid=" ).append( environmentSubstitute( user ) ).append( "/" ).append( environmentSubstitute( pass ) ).append( "@" ); String overrideName = meta.getDbNameOverride(); if ( Utils.isEmpty( Const.rtrim( overrideName ) ) ) { sb.append( environmentSubstitute( dns ) ); } else { sb.append( environmentSubstitute( overrideName ) ); } } else { throw new KettleException( "No connection specified" ); } if ( meta.isDirectPath() ) { sb.append( " DIRECT=TRUE" ); if ( getStepMeta().getCopies() > 1 || meta.isParallel() ) { sb.append( " PARALLEL=TRUE" ); } } return sb.toString(); } OraBulkLoader( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); String getControlFileContents( OraBulkLoaderMeta meta, RowMetaInterface rm, Object[] r ); void createControlFile( String filename, Object[] row, OraBulkLoaderMeta meta ); String createCommandLine( OraBulkLoaderMeta meta, boolean password ); void checkExitVal( int exitVal ); boolean execute( OraBulkLoaderMeta meta, boolean wait ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); static final int EX_SUCC; static final int EX_WARN; }
@Test public void testGetFields() throws KettleStepException { ReplaceStringMeta meta = new ReplaceStringMeta(); meta.setFieldInStream( new String[] { FIELD_NAME } ); meta.setFieldOutStream( new String[] { FIELD_NAME } ); ValueMetaInterface inputFieldMeta = mock( ValueMetaInterface.class ); when( inputFieldMeta.getStringEncoding() ).thenReturn( ENCODING_NAME ); RowMetaInterface inputRowMeta = mock( RowMetaInterface.class ); when( inputRowMeta.searchValueMeta( anyString() ) ).thenReturn( inputFieldMeta ); StepMeta nextStep = mock( StepMeta.class ); VariableSpace space = mock( VariableSpace.class ); Repository repository = mock( Repository.class ); IMetaStore metaStore = mock( IMetaStore.class ); meta.getFields( inputRowMeta, "test", null, nextStep, space, repository, metaStore ); ArgumentCaptor<ValueMetaInterface> argument = ArgumentCaptor.forClass( ValueMetaInterface.class ); verify( inputRowMeta ).addValueMeta( argument.capture() ); assertEquals( ENCODING_NAME, argument.getValue().getStringEncoding() ); }
public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int nrFields = fieldInStream == null ? 0 : fieldInStream.length; for ( int i = 0; i < nrFields; i++ ) { String fieldName = space.environmentSubstitute( fieldOutStream[i] ); ValueMetaInterface valueMeta; if ( !Utils.isEmpty( fieldOutStream[i] ) ) { valueMeta = new ValueMetaString( fieldName ); valueMeta.setOrigin( name ); ValueMetaInterface sourceField = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( sourceField != null ) { valueMeta.setStringEncoding( sourceField.getStringEncoding() ); } inputRowMeta.addValueMeta( valueMeta ); } else { valueMeta = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( valueMeta == null ) { continue; } valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } } }
ReplaceStringMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int nrFields = fieldInStream == null ? 0 : fieldInStream.length; for ( int i = 0; i < nrFields; i++ ) { String fieldName = space.environmentSubstitute( fieldOutStream[i] ); ValueMetaInterface valueMeta; if ( !Utils.isEmpty( fieldOutStream[i] ) ) { valueMeta = new ValueMetaString( fieldName ); valueMeta.setOrigin( name ); ValueMetaInterface sourceField = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( sourceField != null ) { valueMeta.setStringEncoding( sourceField.getStringEncoding() ); } inputRowMeta.addValueMeta( valueMeta ); } else { valueMeta = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( valueMeta == null ) { continue; } valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } } } }
ReplaceStringMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int nrFields = fieldInStream == null ? 0 : fieldInStream.length; for ( int i = 0; i < nrFields; i++ ) { String fieldName = space.environmentSubstitute( fieldOutStream[i] ); ValueMetaInterface valueMeta; if ( !Utils.isEmpty( fieldOutStream[i] ) ) { valueMeta = new ValueMetaString( fieldName ); valueMeta.setOrigin( name ); ValueMetaInterface sourceField = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( sourceField != null ) { valueMeta.setStringEncoding( sourceField.getStringEncoding() ); } inputRowMeta.addValueMeta( valueMeta ); } else { valueMeta = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( valueMeta == null ) { continue; } valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } } } ReplaceStringMeta(); }
ReplaceStringMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int nrFields = fieldInStream == null ? 0 : fieldInStream.length; for ( int i = 0; i < nrFields; i++ ) { String fieldName = space.environmentSubstitute( fieldOutStream[i] ); ValueMetaInterface valueMeta; if ( !Utils.isEmpty( fieldOutStream[i] ) ) { valueMeta = new ValueMetaString( fieldName ); valueMeta.setOrigin( name ); ValueMetaInterface sourceField = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( sourceField != null ) { valueMeta.setStringEncoding( sourceField.getStringEncoding() ); } inputRowMeta.addValueMeta( valueMeta ); } else { valueMeta = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( valueMeta == null ) { continue; } valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } } } ReplaceStringMeta(); String[] getFieldInStream(); void setFieldInStream( String[] keyStream ); int[] getCaseSensitive(); int[] getWholeWord(); void setWholeWord( int[] wholeWord ); int[] getUseRegEx(); void setUseRegEx( int[] useRegEx ); boolean[] isSetEmptyString(); void setEmptyString( boolean[] setEmptyString ); String[] getFieldOutStream(); void setFieldOutStream( String[] keyStream ); String[] getReplaceString(); void setReplaceString( String[] replaceString ); String[] getReplaceByString(); void setReplaceByString( String[] replaceByString ); String[] getFieldReplaceByString(); void setFieldReplaceByString( String[] replaceFieldByString ); void setCaseSensitive( int[] caseSensitive ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys ); Object clone(); void setDefault(); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); static String getCaseSensitiveDesc( int i ); static String getWholeWordDesc( int i ); static String getUseRegExDesc( int i ); static int getCaseSensitiveByDesc( String tt ); static int getWholeWordByDesc( String tt ); static int getUseRegExByDesc( String tt ); @AfterInjection void afterInjectionSynchronization(); }
ReplaceStringMeta extends BaseStepMeta implements StepMetaInterface { public void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException { int nrFields = fieldInStream == null ? 0 : fieldInStream.length; for ( int i = 0; i < nrFields; i++ ) { String fieldName = space.environmentSubstitute( fieldOutStream[i] ); ValueMetaInterface valueMeta; if ( !Utils.isEmpty( fieldOutStream[i] ) ) { valueMeta = new ValueMetaString( fieldName ); valueMeta.setOrigin( name ); ValueMetaInterface sourceField = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( sourceField != null ) { valueMeta.setStringEncoding( sourceField.getStringEncoding() ); } inputRowMeta.addValueMeta( valueMeta ); } else { valueMeta = inputRowMeta.searchValueMeta( fieldInStream[i] ); if ( valueMeta == null ) { continue; } valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); } } } ReplaceStringMeta(); String[] getFieldInStream(); void setFieldInStream( String[] keyStream ); int[] getCaseSensitive(); int[] getWholeWord(); void setWholeWord( int[] wholeWord ); int[] getUseRegEx(); void setUseRegEx( int[] useRegEx ); boolean[] isSetEmptyString(); void setEmptyString( boolean[] setEmptyString ); String[] getFieldOutStream(); void setFieldOutStream( String[] keyStream ); String[] getReplaceString(); void setReplaceString( String[] replaceString ); String[] getReplaceByString(); void setReplaceByString( String[] replaceByString ); String[] getFieldReplaceByString(); void setFieldReplaceByString( String[] replaceFieldByString ); void setCaseSensitive( int[] caseSensitive ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int nrkeys ); Object clone(); void setDefault(); String getXML(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void getFields( RowMetaInterface inputRowMeta, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepinfo, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); static String getCaseSensitiveDesc( int i ); static String getWholeWordDesc( int i ); static String getUseRegExDesc( int i ); static int getCaseSensitiveByDesc( String tt ); static int getWholeWordByDesc( String tt ); static int getUseRegExByDesc( String tt ); @AfterInjection void afterInjectionSynchronization(); static final String[] caseSensitiveCode; static final String[] caseSensitiveDesc; static final int CASE_SENSITIVE_NO; static final int CASE_SENSITIVE_YES; static final String[] wholeWordDesc; static final String[] wholeWordCode; static final int WHOLE_WORD_NO; static final int WHOLE_WORD_YES; static final String[] useRegExDesc; static final String[] useRegExCode; static final int USE_REGEX_NO; static final int USE_REGEX_YES; }
@Test public void testGetOneRow() throws Exception { ReplaceStringData data = new ReplaceStringData(); ReplaceString replaceString = new ReplaceString( stepMockHelper.stepMeta, data, 0, stepMockHelper.transMeta, stepMockHelper.trans ); RowMetaInterface inputRowMeta = new RowMeta(); inputRowMeta.addValueMeta( 0, new ValueMetaString( "SomeDataMeta" ) ); inputRowMeta.addValueMeta( 1, new ValueMetaString( "AnotherDataMeta" ) ); replaceString.init( stepMockHelper.processRowsStepMetaInterface, data ); replaceString.setInputRowMeta( inputRowMeta ); data.outputRowMeta = inputRowMeta; data.outputRowMeta.addValueMeta( new ValueMetaString( "AnotherDataMeta" ) ); data.inputFieldsNr = 2; data.numFields = 2; data.inStreamNrs = new int[] { 1, 1 }; data.patterns = new Pattern[] { Pattern.compile( "a" ), Pattern.compile( "t" ) }; data.replaceFieldIndex = new int[] { -1, -1 }; data.outStreamNrs = new String[] { "", "1" }; data.replaceByString = new String[] { "1", "2" }; data.setEmptyString = new boolean[] { false, false }; Object[] output = replaceString.getOneRow( inputRowMeta, row ); assertArrayEquals( "Output varies", expectedRow, output ); }
synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { rowData[data.inputFieldsNr + index++] = value; } } return rowData; }
ReplaceString extends BaseStep implements StepInterface { synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { rowData[data.inputFieldsNr + index++] = value; } } return rowData; } }
ReplaceString extends BaseStep implements StepInterface { synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { rowData[data.inputFieldsNr + index++] = value; } } return rowData; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
ReplaceString extends BaseStep implements StepInterface { synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { rowData[data.inputFieldsNr + index++] = value; } } return rowData; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); static String replaceString( String originalString, Pattern pattern, String replaceByString ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
ReplaceString extends BaseStep implements StepInterface { synchronized Object[] getOneRow( RowMetaInterface rowMeta, Object[] row ) throws KettleException { Object[] rowData = RowDataUtil.resizeArray( row, data.outputRowMeta.size() ); int index = 0; Set<Integer> numFieldsAlreadyBeenTransformed = new HashSet<Integer>(); for ( int i = 0; i < data.numFields; i++ ) { RowMetaInterface currentRowMeta = ( numFieldsAlreadyBeenTransformed.contains( data.inStreamNrs[i] ) ) ? data.outputRowMeta : getInputRowMeta(); String value = replaceString( currentRowMeta.getString( rowData, data.inStreamNrs[i] ), data.patterns[i], getResolvedReplaceByString( i, row ) ); if ( Utils.isEmpty( data.outStreamNrs[i] ) ) { rowData[data.inStreamNrs[i]] = value; numFieldsAlreadyBeenTransformed.add( data.inStreamNrs[i] ); } else { rowData[data.inputFieldsNr + index++] = value; } } return rowData; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); static String replaceString( String originalString, Pattern pattern, String replaceByString ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void testSynchronizeDifferentFieldsArraysLengths() throws Exception { ReplaceStringData data = new ReplaceStringData(); ReplaceString replaceString = new ReplaceString( stepMockHelper.stepMeta, data, 0, stepMockHelper.transMeta, stepMockHelper.trans ); ReplaceStringMeta meta = new ReplaceStringMeta(); replaceString.init( meta, data ); meta.setFieldInStream( new String[] { "input1", "input2" } ); meta.setFieldOutStream( new String[] { "out" } ); meta.setUseRegEx( new int[] { 1 } ); meta.setCaseSensitive( new int[] { 0 } ); meta.setWholeWord( new int[] { 1 } ); meta.setReplaceString( new String[] { "string" } ); meta.setReplaceByString( new String[] { "string" } ); meta.setEmptyString( new boolean[] { true } ); meta.setFieldReplaceByString( new String[] { "string" } ); meta.afterInjectionSynchronization(); Assert.assertEquals( meta.getFieldInStream().length, meta.getFieldOutStream().length ); Assert.assertEquals( StringUtils.EMPTY, meta.getFieldOutStream()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getUseRegEx().length ); Assert.assertEquals( 0, meta.getUseRegEx()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getCaseSensitive().length ); Assert.assertEquals( 0, meta.getCaseSensitive()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getWholeWord().length ); Assert.assertEquals( 0, meta.getWholeWord()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getReplaceString().length ); Assert.assertEquals( StringUtils.EMPTY, meta.getReplaceString()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getReplaceByString().length ); Assert.assertEquals( StringUtils.EMPTY, meta.getReplaceByString()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.isSetEmptyString().length ); Assert.assertEquals( false, meta.isSetEmptyString()[ 1 ] ); Assert.assertEquals( meta.getFieldInStream().length, meta.getFieldReplaceByString().length ); Assert.assertEquals( StringUtils.EMPTY, meta.getFieldReplaceByString()[ 1 ] ); }
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ReplaceStringMeta) smi; data = (ReplaceStringData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; }
ReplaceString extends BaseStep implements StepInterface { public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ReplaceStringMeta) smi; data = (ReplaceStringData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; } }
ReplaceString extends BaseStep implements StepInterface { public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ReplaceStringMeta) smi; data = (ReplaceStringData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
ReplaceString extends BaseStep implements StepInterface { public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ReplaceStringMeta) smi; data = (ReplaceStringData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); static String replaceString( String originalString, Pattern pattern, String replaceByString ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
ReplaceString extends BaseStep implements StepInterface { public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (ReplaceStringMeta) smi; data = (ReplaceStringData) sdi; if ( super.init( smi, sdi ) ) { return true; } return false; } ReplaceString( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); static String replaceString( String originalString, Pattern pattern, String replaceByString ); boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); boolean init( StepMetaInterface smi, StepDataInterface sdi ); void dispose( StepMetaInterface smi, StepDataInterface sdi ); }
@Test public void isAnonymous() throws Exception { assertTrue( ActingPrincipal.ANONYMOUS.isAnonymous() ); assertFalse( new ActingPrincipal( "harold" ).isAnonymous() ); assertFalse( new ActingPrincipal( "" ).isAnonymous() ); }
public boolean isAnonymous() { return anonymous; }
ActingPrincipal implements Principal, Serializable { public boolean isAnonymous() { return anonymous; } }
ActingPrincipal implements Principal, Serializable { public boolean isAnonymous() { return anonymous; } ActingPrincipal( String name ); private ActingPrincipal(); }
ActingPrincipal implements Principal, Serializable { public boolean isAnonymous() { return anonymous; } ActingPrincipal( String name ); private ActingPrincipal(); @Override String getName(); boolean equals( Object other ); String toString(); int hashCode(); boolean isAnonymous(); }
ActingPrincipal implements Principal, Serializable { public boolean isAnonymous() { return anonymous; } ActingPrincipal( String name ); private ActingPrincipal(); @Override String getName(); boolean equals( Object other ); String toString(); int hashCode(); boolean isAnonymous(); static final ActingPrincipal ANONYMOUS; }
@Test public void testSetDefault() throws Exception { IfNullMeta inm = new IfNullMeta(); inm.setDefault(); assertTrue( ( inm.getValueTypes() != null ) && ( inm.getValueTypes().length == 0 ) ); assertTrue( ( inm.getFields() != null ) && ( inm.getFields().length == 0 ) ); assertFalse( inm.isSelectFields() ); assertFalse( inm.isSelectValuesType() ); }
public void setDefault() { replaceAllByValue = null; replaceAllMask = null; selectFields = false; selectValuesType = false; setEmptyStringAll = false; int nrfields = 0; int nrtypes = 0; allocate( nrtypes, nrfields ); }
IfNullMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { replaceAllByValue = null; replaceAllMask = null; selectFields = false; selectValuesType = false; setEmptyStringAll = false; int nrfields = 0; int nrtypes = 0; allocate( nrtypes, nrfields ); } }
IfNullMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { replaceAllByValue = null; replaceAllMask = null; selectFields = false; selectValuesType = false; setEmptyStringAll = false; int nrfields = 0; int nrtypes = 0; allocate( nrtypes, nrfields ); } IfNullMeta(); }
IfNullMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { replaceAllByValue = null; replaceAllMask = null; selectFields = false; selectValuesType = false; setEmptyStringAll = false; int nrfields = 0; int nrtypes = 0; allocate( nrtypes, nrfields ); } IfNullMeta(); boolean isSetEmptyStringAll(); void setEmptyStringAll( boolean setEmptyStringAll ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); void allocate( int nrtypes, int nrfields ); boolean isSelectFields(); void setSelectFields( boolean selectFields ); void setSelectValuesType( boolean selectValuesType ); boolean isSelectValuesType(); void setReplaceAllByValue( String replaceValue ); String getReplaceAllByValue(); void setReplaceAllMask( String replaceAllMask ); String getReplaceAllMask(); Fields[] getFields(); void setFields( Fields[] fields ); ValueTypes[] getValueTypes(); void setValueTypes( ValueTypes[] valueTypes ); String getXML(); void setDefault(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
IfNullMeta extends BaseStepMeta implements StepMetaInterface { public void setDefault() { replaceAllByValue = null; replaceAllMask = null; selectFields = false; selectValuesType = false; setEmptyStringAll = false; int nrfields = 0; int nrtypes = 0; allocate( nrtypes, nrfields ); } IfNullMeta(); boolean isSetEmptyStringAll(); void setEmptyStringAll( boolean setEmptyStringAll ); void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); Object clone(); void allocate( int nrtypes, int nrfields ); boolean isSelectFields(); void setSelectFields( boolean selectFields ); void setSelectValuesType( boolean selectValuesType ); boolean isSelectValuesType(); void setReplaceAllByValue( String replaceValue ); String getReplaceAllByValue(); void setReplaceAllMask( String replaceAllMask ); String getReplaceAllMask(); Fields[] getFields(); void setFields( Fields[] fields ); ValueTypes[] getValueTypes(); void setValueTypes( ValueTypes[] valueTypes ); String getXML(); void setDefault(); void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta tr, Trans trans ); StepDataInterface getStepData(); boolean supportsErrorHandling(); }
@Test public void testOpenConnection_1() throws Exception { when( connection.authenticateWithPassword( username, password ) ).thenReturn( true ); assertNotNull( SSHData.OpenConnection( server, port, username, password, false, null, null, 0, null, null, 0, null, null ) ); verify( connection ).connect(); verify( connection ).authenticateWithPassword( username, password ); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test( expected = KettleException.class ) public void testOpenConnection_2() throws Exception { when( connection.authenticateWithPassword( username, password ) ).thenReturn( false ); SSHData.OpenConnection( server, port, username, password, false, null, null, 0, null, null, 0, null, null ); verify( connection ).connect(); verify( connection ).authenticateWithPassword( username, password ); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test( expected = KettleException.class ) public void testOpenConnectionUseKey_1() throws Exception { when( fileObject.exists() ).thenReturn( false ); SSHData.OpenConnection( server, port, null, null, true, null, null, 0, null, null, 0, null, null ); verify( fileObject ).exists(); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test public void testOpenConnectionUseKey_2() throws Exception { when( fileObject.exists() ).thenReturn( true ); when( fileObject.getContent() ).thenReturn( fileContent ); when( fileContent.getSize() ).thenReturn( 1000L ); when( fileContent.getInputStream() ).thenReturn( new ByteArrayInputStream( new byte[] { 1, 2, 3, 4, 5 } ) ); when( variableSpace.environmentSubstitute( passPhrase ) ).thenReturn( passPhrase ); when( connection.authenticateWithPublicKey( eq( username ), Matchers.<char[]>any(), eq( passPhrase ) ) ).thenReturn( true ); SSHData.OpenConnection( server, port, username, null, true, keyFilePath, passPhrase, 0, variableSpace, null, 0, null, null ); verify( connection ).connect(); verify( connection ).authenticateWithPublicKey( eq( username ), Matchers.<char[]>any(), eq( passPhrase ) ); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test public void testOpenConnectionProxy() throws Exception { when( connection.authenticateWithPassword( username, password ) ).thenReturn( true ); assertNotNull( SSHData.OpenConnection( server, port, username, password, false, null, null, 0, null, proxyHost, proxyPort, proxyUsername, proxyPassword ) ); verify( connection ).connect(); verify( connection ).authenticateWithPassword( username, password ); verify( connection ).setProxyData( any( HTTPProxyData.class ) ); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test public void testOpenConnectionTimeOut() throws Exception { when( connection.authenticateWithPassword( username, password ) ).thenReturn( true ); assertNotNull( SSHData.OpenConnection( server, port, username, password, false, null, null, 100, null, null, proxyPort, proxyUsername, proxyPassword ) ); verify( connection ).connect( isNull( ServerHostKeyVerifier.class ), eq( 0 ), eq( 100 * 1000 ) ); }
public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); }
SSHData extends BaseStepData implements StepDataInterface { public static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ) throws KettleException { Connection conn = null; char[] content = null; boolean isAuthenticated = false; try { if ( useKey ) { if ( Utils.isEmpty( keyFilename ) ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyFileMissing" ) ); } FileObject keyFileObject = KettleVFS.getFileObject( keyFilename ); if ( !keyFileObject.exists() ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.PrivateKeyNotExist", keyFilename ) ); } FileContent keyFileContent = keyFileObject.getContent(); CharArrayWriter charArrayWriter = new CharArrayWriter( (int) keyFileContent.getSize() ); try ( InputStream in = keyFileContent.getInputStream() ) { IOUtils.copy( in, charArrayWriter ); } content = charArrayWriter.toCharArray(); } conn = createConnection( serveur, port ); if ( !Utils.isEmpty( proxyhost ) ) { if ( !Utils.isEmpty( proxyusername ) ) { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport, proxyusername, proxypassword ) ); } else { conn.setProxyData( new HTTPProxyData( proxyhost, proxyport ) ); } } if ( timeOut == 0 ) { conn.connect(); } else { conn.connect( null, 0, timeOut * 1000 ); } if ( useKey ) { isAuthenticated = conn.authenticateWithPublicKey( username, content, space.environmentSubstitute( passPhrase ) ); } else { isAuthenticated = conn.authenticateWithPassword( username, password ); } if ( isAuthenticated == false ) { throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.AuthenticationFailed", username ) ); } } catch ( Exception e ) { if ( conn != null ) { conn.close(); } throw new KettleException( BaseMessages.getString( SSHMeta.PKG, "SSH.Error.ErrorConnecting", serveur, username ), e ); } return conn; } SSHData(); static Connection OpenConnection( String serveur, int port, String username, String password, boolean useKey, String keyFilename, String passPhrase, int timeOut, VariableSpace space, String proxyhost, int proxyport, String proxyusername, String proxypassword ); public int indexOfCommand; public Connection conn; public boolean wroteOneRow; public String commands; public int nrInputFields; public int nrOutputFields; public String stdOutField; public String stdTypeField; public RowMetaInterface outputRowMeta; }
@Test public void testGetFields() { final String stepName = "this step name"; MemoryGroupByMeta meta = new MemoryGroupByMeta(); meta.setDefault(); meta.allocate( 1, 17 ); RowMetaInterface rm = getInputRowMeta(); String[] groupFields = new String[2]; groupFields[0] = "myGroupField1"; groupFields[1] = "myGroupField2"; String[] aggregateFields = new String[24]; String[] subjectFields = new String[24]; int[] aggregateTypes = new int[24]; String[] valueFields = new String[24]; subjectFields[0] = "myString"; aggregateTypes[0] = MemoryGroupByMeta.TYPE_GROUP_CONCAT_COMMA; aggregateFields[0] = "ConcatComma"; valueFields[0] = null; subjectFields[1] = "myString"; aggregateTypes[1] = MemoryGroupByMeta.TYPE_GROUP_CONCAT_STRING; aggregateFields[1] = "ConcatString"; valueFields[1] = "|"; subjectFields[2] = "myString"; aggregateTypes[2] = MemoryGroupByMeta.TYPE_GROUP_COUNT_ALL; aggregateFields[2] = "CountAll"; valueFields[2] = null; subjectFields[3] = "myString"; aggregateTypes[3] = MemoryGroupByMeta.TYPE_GROUP_COUNT_ANY; aggregateFields[3] = "CountAny"; valueFields[3] = null; subjectFields[4] = "myString"; aggregateTypes[4] = MemoryGroupByMeta.TYPE_GROUP_COUNT_DISTINCT; aggregateFields[4] = "CountDistinct"; valueFields[4] = null; subjectFields[5] = "myString"; aggregateTypes[5] = MemoryGroupByMeta.TYPE_GROUP_FIRST; aggregateFields[5] = "First(String)"; valueFields[5] = null; subjectFields[6] = "myInteger"; aggregateTypes[6] = MemoryGroupByMeta.TYPE_GROUP_FIRST; aggregateFields[6] = "First(Integer)"; valueFields[6] = null; subjectFields[7] = "myNumber"; aggregateTypes[7] = MemoryGroupByMeta.TYPE_GROUP_FIRST_INCL_NULL; aggregateFields[7] = "FirstInclNull(Number)"; valueFields[7] = null; subjectFields[8] = "myBigNumber"; aggregateTypes[8] = MemoryGroupByMeta.TYPE_GROUP_FIRST_INCL_NULL; aggregateFields[8] = "FirstInclNull(BigNumber)"; valueFields[8] = null; subjectFields[9] = "myBinary"; aggregateTypes[9] = MemoryGroupByMeta.TYPE_GROUP_LAST; aggregateFields[9] = "Last(Binary)"; valueFields[9] = null; subjectFields[10] = "myBoolean"; aggregateTypes[10] = MemoryGroupByMeta.TYPE_GROUP_LAST; aggregateFields[10] = "Last(Boolean)"; valueFields[10] = null; subjectFields[11] = "myDate"; aggregateTypes[11] = MemoryGroupByMeta.TYPE_GROUP_LAST_INCL_NULL; aggregateFields[11] = "LastInclNull(Date)"; valueFields[11] = null; subjectFields[12] = "myTimestamp"; aggregateTypes[12] = MemoryGroupByMeta.TYPE_GROUP_LAST_INCL_NULL; aggregateFields[12] = "LastInclNull(Timestamp)"; valueFields[12] = null; subjectFields[13] = "myInternetAddress"; aggregateTypes[13] = MemoryGroupByMeta.TYPE_GROUP_MAX; aggregateFields[13] = "Max(InternetAddress)"; valueFields[13] = null; subjectFields[14] = "myString"; aggregateTypes[14] = MemoryGroupByMeta.TYPE_GROUP_MAX; aggregateFields[14] = "Max(String)"; valueFields[14] = null; subjectFields[15] = "myInteger"; aggregateTypes[15] = MemoryGroupByMeta.TYPE_GROUP_MEDIAN; aggregateFields[15] = "Median(Integer)"; valueFields[15] = null; subjectFields[16] = "myNumber"; aggregateTypes[16] = MemoryGroupByMeta.TYPE_GROUP_MIN; aggregateFields[16] = "Min(Number)"; valueFields[16] = null; subjectFields[17] = "myBigNumber"; aggregateTypes[17] = MemoryGroupByMeta.TYPE_GROUP_MIN; aggregateFields[17] = "Min(BigNumber)"; valueFields[17] = null; subjectFields[18] = "myBinary"; aggregateTypes[18] = MemoryGroupByMeta.TYPE_GROUP_PERCENTILE; aggregateFields[18] = "Percentile(Binary)"; valueFields[18] = "0.5"; subjectFields[19] = "myBoolean"; aggregateTypes[19] = MemoryGroupByMeta.TYPE_GROUP_STANDARD_DEVIATION; aggregateFields[19] = "StandardDeviation(Boolean)"; valueFields[19] = null; subjectFields[20] = "myDate"; aggregateTypes[20] = MemoryGroupByMeta.TYPE_GROUP_SUM; aggregateFields[20] = "Sum(Date)"; valueFields[20] = null; subjectFields[21] = "myInteger"; aggregateTypes[21] = MemoryGroupByMeta.TYPE_GROUP_SUM; aggregateFields[21] = "Sum(Integer)"; valueFields[21] = null; subjectFields[22] = "myInteger"; aggregateTypes[22] = MemoryGroupByMeta.TYPE_GROUP_AVERAGE; aggregateFields[22] = "Average(Integer)"; valueFields[22] = null; subjectFields[23] = "myDate"; aggregateTypes[23] = MemoryGroupByMeta.TYPE_GROUP_AVERAGE; aggregateFields[23] = "Average(Date)"; valueFields[23] = null; meta.setGroupField( groupFields ); meta.setSubjectField( subjectFields ); meta.setAggregateType( aggregateTypes ); meta.setAggregateField( aggregateFields ); meta.setValueField( valueFields ); Variables vars = new Variables(); meta.getFields( rm, stepName, null, null, vars, null, null ); assertNotNull( rm ); assertEquals( 26, rm.size() ); assertTrue( rm.indexOfValue( "myGroupField1" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "myGroupField1" ) ).getType() ); assertTrue( rm.indexOfValue( "myGroupField2" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "myGroupField2" ) ).getType() ); assertTrue( rm.indexOfValue( "myGroupField2" ) > rm.indexOfValue( "myGroupField1" ) ); assertTrue( rm.indexOfValue( "ConcatComma" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "ConcatComma" ) ).getType() ); assertTrue( rm.indexOfValue( "ConcatString" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "ConcatString" ) ).getType() ); assertTrue( rm.indexOfValue( "CountAll" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "CountAll" ) ).getType() ); assertTrue( rm.indexOfValue( "CountAny" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "CountAny" ) ).getType() ); assertTrue( rm.indexOfValue( "CountDistinct" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "CountDistinct" ) ).getType() ); assertTrue( rm.indexOfValue( "First(String)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "First(String)" ) ).getType() ); assertTrue( rm.indexOfValue( "First(Integer)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "First(Integer)" ) ).getType() ); assertTrue( rm.indexOfValue( "FirstInclNull(Number)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "FirstInclNull(Number)" ) ).getType() ); assertTrue( rm.indexOfValue( "FirstInclNull(BigNumber)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_BIGNUMBER, rm.getValueMeta( rm.indexOfValue( "FirstInclNull(BigNumber)" ) ).getType() ); assertTrue( rm.indexOfValue( "Last(Binary)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_BINARY, rm.getValueMeta( rm.indexOfValue( "Last(Binary)" ) ).getType() ); assertTrue( rm.indexOfValue( "Last(Boolean)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_BOOLEAN, rm.getValueMeta( rm.indexOfValue( "Last(Boolean)" ) ).getType() ); assertTrue( rm.indexOfValue( "LastInclNull(Date)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_DATE, rm.getValueMeta( rm.indexOfValue( "LastInclNull(Date)" ) ).getType() ); assertTrue( rm.indexOfValue( "LastInclNull(Timestamp)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_TIMESTAMP, rm.getValueMeta( rm.indexOfValue( "LastInclNull(Timestamp)" ) ).getType() ); assertTrue( rm.indexOfValue( "Max(InternetAddress)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INET, rm.getValueMeta( rm.indexOfValue( "Max(InternetAddress)" ) ).getType() ); assertTrue( rm.indexOfValue( "Max(String)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_STRING, rm.getValueMeta( rm.indexOfValue( "Max(String)" ) ).getType() ); assertTrue( rm.indexOfValue( "Median(Integer)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Median(Integer)" ) ).getType() ); assertTrue( rm.indexOfValue( "Min(Number)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Min(Number)" ) ).getType() ); assertTrue( rm.indexOfValue( "Min(BigNumber)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_BIGNUMBER, rm.getValueMeta( rm.indexOfValue( "Min(BigNumber)" ) ).getType() ); assertTrue( rm.indexOfValue( "Percentile(Binary)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Percentile(Binary)" ) ).getType() ); assertTrue( rm.indexOfValue( "StandardDeviation(Boolean)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "StandardDeviation(Boolean)" ) ).getType() ); assertTrue( rm.indexOfValue( "Sum(Date)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Sum(Date)" ) ).getType() ); assertTrue( rm.indexOfValue( "Sum(Integer)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "Sum(Integer)" ) ).getType() ); assertTrue( rm.indexOfValue( "Average(Integer)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_INTEGER, rm.getValueMeta( rm.indexOfValue( "Average(Integer)" ) ).getType() ); assertTrue( rm.indexOfValue( "Average(Date)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Average(Date)" ) ).getType() ); rm = getInputRowMeta(); vars.setVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "Y" ); meta.getFields( rm, stepName, null, null, vars, null, null ); assertNotNull( rm ); assertEquals( 26, rm.size() ); assertTrue( rm.indexOfValue( "Average(Integer)" ) >= 0 ); assertEquals( ValueMetaInterface.TYPE_NUMBER, rm.getValueMeta( rm.indexOfValue( "Average(Integer)" ) ).getType() ); }
@Override public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean compatibilityMode = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "N" ) ); RowMetaInterface fields = new RowMeta(); for ( int i = 0; i < groupField.length; i++ ) { ValueMetaInterface valueMeta = r.searchValueMeta( groupField[i] ); if ( valueMeta != null ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); fields.addValueMeta( valueMeta ); } } for ( int i = 0; i < subjectField.length; i++ ) { ValueMetaInterface subj = r.searchValueMeta( subjectField[i] ); if ( subj != null || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch ( aggregateType[i] ) { case TYPE_GROUP_FIRST: case TYPE_GROUP_LAST: case TYPE_GROUP_FIRST_INCL_NULL: case TYPE_GROUP_LAST_INCL_NULL: case TYPE_GROUP_MIN: case TYPE_GROUP_MAX: value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT: case TYPE_GROUP_COUNT_ALL: case TYPE_GROUP_COUNT_ANY: value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA: value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_SUM: case TYPE_GROUP_AVERAGE: if ( !compatibilityMode && subj.isNumeric() ) { value_type = subj.getType(); } else { value_type = ValueMetaInterface.TYPE_NUMBER; } break; case TYPE_GROUP_MEDIAN: case TYPE_GROUP_PERCENTILE: case TYPE_GROUP_STANDARD_DEVIATION: value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING: value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } if ( aggregateType[i] == TYPE_GROUP_COUNT_ALL || aggregateType[i] == TYPE_GROUP_COUNT_DISTINCT || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } else if ( aggregateType[i] == TYPE_GROUP_SUM && value_type != ValueMetaInterface.TYPE_INTEGER && value_type != ValueMetaInterface.TYPE_NUMBER && value_type != ValueMetaInterface.TYPE_BIGNUMBER ) { value_type = ValueMetaInterface.TYPE_NUMBER; precision = -1; length = -1; } if ( value_type != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( value_name, value_type ); } catch ( KettlePluginException e ) { log.logError( BaseMessages.getString( PKG, "MemoryGroupByMeta.Exception.UnknownValueMetaType" ), value_type, e ); v = new ValueMetaNone( value_name ); } v.setOrigin( origin ); v.setLength( length, precision ); fields.addValueMeta( v ); } } } r.clear(); r.addRowMeta( fields ); }
MemoryGroupByMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean compatibilityMode = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "N" ) ); RowMetaInterface fields = new RowMeta(); for ( int i = 0; i < groupField.length; i++ ) { ValueMetaInterface valueMeta = r.searchValueMeta( groupField[i] ); if ( valueMeta != null ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); fields.addValueMeta( valueMeta ); } } for ( int i = 0; i < subjectField.length; i++ ) { ValueMetaInterface subj = r.searchValueMeta( subjectField[i] ); if ( subj != null || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch ( aggregateType[i] ) { case TYPE_GROUP_FIRST: case TYPE_GROUP_LAST: case TYPE_GROUP_FIRST_INCL_NULL: case TYPE_GROUP_LAST_INCL_NULL: case TYPE_GROUP_MIN: case TYPE_GROUP_MAX: value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT: case TYPE_GROUP_COUNT_ALL: case TYPE_GROUP_COUNT_ANY: value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA: value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_SUM: case TYPE_GROUP_AVERAGE: if ( !compatibilityMode && subj.isNumeric() ) { value_type = subj.getType(); } else { value_type = ValueMetaInterface.TYPE_NUMBER; } break; case TYPE_GROUP_MEDIAN: case TYPE_GROUP_PERCENTILE: case TYPE_GROUP_STANDARD_DEVIATION: value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING: value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } if ( aggregateType[i] == TYPE_GROUP_COUNT_ALL || aggregateType[i] == TYPE_GROUP_COUNT_DISTINCT || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } else if ( aggregateType[i] == TYPE_GROUP_SUM && value_type != ValueMetaInterface.TYPE_INTEGER && value_type != ValueMetaInterface.TYPE_NUMBER && value_type != ValueMetaInterface.TYPE_BIGNUMBER ) { value_type = ValueMetaInterface.TYPE_NUMBER; precision = -1; length = -1; } if ( value_type != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( value_name, value_type ); } catch ( KettlePluginException e ) { log.logError( BaseMessages.getString( PKG, "MemoryGroupByMeta.Exception.UnknownValueMetaType" ), value_type, e ); v = new ValueMetaNone( value_name ); } v.setOrigin( origin ); v.setLength( length, precision ); fields.addValueMeta( v ); } } } r.clear(); r.addRowMeta( fields ); } }
MemoryGroupByMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean compatibilityMode = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "N" ) ); RowMetaInterface fields = new RowMeta(); for ( int i = 0; i < groupField.length; i++ ) { ValueMetaInterface valueMeta = r.searchValueMeta( groupField[i] ); if ( valueMeta != null ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); fields.addValueMeta( valueMeta ); } } for ( int i = 0; i < subjectField.length; i++ ) { ValueMetaInterface subj = r.searchValueMeta( subjectField[i] ); if ( subj != null || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch ( aggregateType[i] ) { case TYPE_GROUP_FIRST: case TYPE_GROUP_LAST: case TYPE_GROUP_FIRST_INCL_NULL: case TYPE_GROUP_LAST_INCL_NULL: case TYPE_GROUP_MIN: case TYPE_GROUP_MAX: value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT: case TYPE_GROUP_COUNT_ALL: case TYPE_GROUP_COUNT_ANY: value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA: value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_SUM: case TYPE_GROUP_AVERAGE: if ( !compatibilityMode && subj.isNumeric() ) { value_type = subj.getType(); } else { value_type = ValueMetaInterface.TYPE_NUMBER; } break; case TYPE_GROUP_MEDIAN: case TYPE_GROUP_PERCENTILE: case TYPE_GROUP_STANDARD_DEVIATION: value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING: value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } if ( aggregateType[i] == TYPE_GROUP_COUNT_ALL || aggregateType[i] == TYPE_GROUP_COUNT_DISTINCT || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } else if ( aggregateType[i] == TYPE_GROUP_SUM && value_type != ValueMetaInterface.TYPE_INTEGER && value_type != ValueMetaInterface.TYPE_NUMBER && value_type != ValueMetaInterface.TYPE_BIGNUMBER ) { value_type = ValueMetaInterface.TYPE_NUMBER; precision = -1; length = -1; } if ( value_type != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( value_name, value_type ); } catch ( KettlePluginException e ) { log.logError( BaseMessages.getString( PKG, "MemoryGroupByMeta.Exception.UnknownValueMetaType" ), value_type, e ); v = new ValueMetaNone( value_name ); } v.setOrigin( origin ); v.setLength( length, precision ); fields.addValueMeta( v ); } } } r.clear(); r.addRowMeta( fields ); } MemoryGroupByMeta(); }
MemoryGroupByMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean compatibilityMode = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "N" ) ); RowMetaInterface fields = new RowMeta(); for ( int i = 0; i < groupField.length; i++ ) { ValueMetaInterface valueMeta = r.searchValueMeta( groupField[i] ); if ( valueMeta != null ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); fields.addValueMeta( valueMeta ); } } for ( int i = 0; i < subjectField.length; i++ ) { ValueMetaInterface subj = r.searchValueMeta( subjectField[i] ); if ( subj != null || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch ( aggregateType[i] ) { case TYPE_GROUP_FIRST: case TYPE_GROUP_LAST: case TYPE_GROUP_FIRST_INCL_NULL: case TYPE_GROUP_LAST_INCL_NULL: case TYPE_GROUP_MIN: case TYPE_GROUP_MAX: value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT: case TYPE_GROUP_COUNT_ALL: case TYPE_GROUP_COUNT_ANY: value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA: value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_SUM: case TYPE_GROUP_AVERAGE: if ( !compatibilityMode && subj.isNumeric() ) { value_type = subj.getType(); } else { value_type = ValueMetaInterface.TYPE_NUMBER; } break; case TYPE_GROUP_MEDIAN: case TYPE_GROUP_PERCENTILE: case TYPE_GROUP_STANDARD_DEVIATION: value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING: value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } if ( aggregateType[i] == TYPE_GROUP_COUNT_ALL || aggregateType[i] == TYPE_GROUP_COUNT_DISTINCT || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } else if ( aggregateType[i] == TYPE_GROUP_SUM && value_type != ValueMetaInterface.TYPE_INTEGER && value_type != ValueMetaInterface.TYPE_NUMBER && value_type != ValueMetaInterface.TYPE_BIGNUMBER ) { value_type = ValueMetaInterface.TYPE_NUMBER; precision = -1; length = -1; } if ( value_type != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( value_name, value_type ); } catch ( KettlePluginException e ) { log.logError( BaseMessages.getString( PKG, "MemoryGroupByMeta.Exception.UnknownValueMetaType" ), value_type, e ); v = new ValueMetaNone( value_name ); } v.setOrigin( origin ); v.setLength( length, precision ); fields.addValueMeta( v ); } } } r.clear(); r.addRowMeta( fields ); } MemoryGroupByMeta(); String[] getAggregateField(); void setAggregateField( String[] aggregateField ); int[] getAggregateType(); void setAggregateType( int[] aggregateType ); String[] getGroupField(); void setGroupField( String[] groupField ); String[] getSubjectField(); void setSubjectField( String[] subjectField ); String[] getValueField(); void setValueField( String[] valueField ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int sizegroup, int nrfields ); @Override Object clone(); static final int getType( String desc ); static final String getTypeDesc( int i ); static final String getTypeDescLong( int i ); @Override void setDefault(); @Override void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); boolean isAlwaysGivingBackOneRow(); void setAlwaysGivingBackOneRow( boolean alwaysGivingBackOneRow ); @AfterInjection void afterInjectionSynchronization(); }
MemoryGroupByMeta extends BaseStepMeta implements StepMetaInterface { @Override public void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ) { boolean compatibilityMode = ValueMetaBase.convertStringToBoolean( space.getVariable( Const.KETTLE_COMPATIBILITY_MEMORY_GROUP_BY_SUM_AVERAGE_RETURN_NUMBER_TYPE, "N" ) ); RowMetaInterface fields = new RowMeta(); for ( int i = 0; i < groupField.length; i++ ) { ValueMetaInterface valueMeta = r.searchValueMeta( groupField[i] ); if ( valueMeta != null ) { valueMeta.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL ); fields.addValueMeta( valueMeta ); } } for ( int i = 0; i < subjectField.length; i++ ) { ValueMetaInterface subj = r.searchValueMeta( subjectField[i] ); if ( subj != null || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { String value_name = aggregateField[i]; int value_type = ValueMetaInterface.TYPE_NONE; int length = -1; int precision = -1; switch ( aggregateType[i] ) { case TYPE_GROUP_FIRST: case TYPE_GROUP_LAST: case TYPE_GROUP_FIRST_INCL_NULL: case TYPE_GROUP_LAST_INCL_NULL: case TYPE_GROUP_MIN: case TYPE_GROUP_MAX: value_type = subj.getType(); break; case TYPE_GROUP_COUNT_DISTINCT: case TYPE_GROUP_COUNT_ALL: case TYPE_GROUP_COUNT_ANY: value_type = ValueMetaInterface.TYPE_INTEGER; break; case TYPE_GROUP_CONCAT_COMMA: value_type = ValueMetaInterface.TYPE_STRING; break; case TYPE_GROUP_SUM: case TYPE_GROUP_AVERAGE: if ( !compatibilityMode && subj.isNumeric() ) { value_type = subj.getType(); } else { value_type = ValueMetaInterface.TYPE_NUMBER; } break; case TYPE_GROUP_MEDIAN: case TYPE_GROUP_PERCENTILE: case TYPE_GROUP_STANDARD_DEVIATION: value_type = ValueMetaInterface.TYPE_NUMBER; break; case TYPE_GROUP_CONCAT_STRING: value_type = ValueMetaInterface.TYPE_STRING; break; default: break; } if ( aggregateType[i] == TYPE_GROUP_COUNT_ALL || aggregateType[i] == TYPE_GROUP_COUNT_DISTINCT || aggregateType[i] == TYPE_GROUP_COUNT_ANY ) { length = ValueMetaInterface.DEFAULT_INTEGER_LENGTH; precision = 0; } else if ( aggregateType[i] == TYPE_GROUP_SUM && value_type != ValueMetaInterface.TYPE_INTEGER && value_type != ValueMetaInterface.TYPE_NUMBER && value_type != ValueMetaInterface.TYPE_BIGNUMBER ) { value_type = ValueMetaInterface.TYPE_NUMBER; precision = -1; length = -1; } if ( value_type != ValueMetaInterface.TYPE_NONE ) { ValueMetaInterface v; try { v = ValueMetaFactory.createValueMeta( value_name, value_type ); } catch ( KettlePluginException e ) { log.logError( BaseMessages.getString( PKG, "MemoryGroupByMeta.Exception.UnknownValueMetaType" ), value_type, e ); v = new ValueMetaNone( value_name ); } v.setOrigin( origin ); v.setLength( length, precision ); fields.addValueMeta( v ); } } } r.clear(); r.addRowMeta( fields ); } MemoryGroupByMeta(); String[] getAggregateField(); void setAggregateField( String[] aggregateField ); int[] getAggregateType(); void setAggregateType( int[] aggregateType ); String[] getGroupField(); void setGroupField( String[] groupField ); String[] getSubjectField(); void setSubjectField( String[] subjectField ); String[] getValueField(); void setValueField( String[] valueField ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); void allocate( int sizegroup, int nrfields ); @Override Object clone(); static final int getType( String desc ); static final String getTypeDesc( int i ); static final String getTypeDescLong( int i ); @Override void setDefault(); @Override void getFields( RowMetaInterface r, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); boolean isAlwaysGivingBackOneRow(); void setAlwaysGivingBackOneRow( boolean alwaysGivingBackOneRow ); @AfterInjection void afterInjectionSynchronization(); static final int TYPE_GROUP_NONE; static final int TYPE_GROUP_SUM; static final int TYPE_GROUP_AVERAGE; static final int TYPE_GROUP_MEDIAN; static final int TYPE_GROUP_PERCENTILE; static final int TYPE_GROUP_MIN; static final int TYPE_GROUP_MAX; static final int TYPE_GROUP_COUNT_ALL; static final int TYPE_GROUP_CONCAT_COMMA; static final int TYPE_GROUP_FIRST; static final int TYPE_GROUP_LAST; static final int TYPE_GROUP_FIRST_INCL_NULL; static final int TYPE_GROUP_LAST_INCL_NULL; static final int TYPE_GROUP_STANDARD_DEVIATION; static final int TYPE_GROUP_CONCAT_STRING; static final int TYPE_GROUP_COUNT_DISTINCT; static final int TYPE_GROUP_COUNT_ANY; static final String[] typeGroupCode; static final String[] typeGroupLongDesc; }
@Test public void hashEntryTest() { HashMap<MemoryGroupByData.HashEntry, String> map = new HashMap<>(); byte[] byteValue1 = "key".getBytes(); Object[] groupData1 = new Object[1]; groupData1[0] = byteValue1; MemoryGroupByData.HashEntry hashEntry1 = data.getHashEntry( groupData1 ); map.put( hashEntry1, "value" ); byte[] byteValue2 = "key".getBytes(); Object[] groupData2 = new Object[1]; groupData2[0] = byteValue2; MemoryGroupByData.HashEntry hashEntry2 = data.getHashEntry( groupData2 ); String value = map.get( hashEntry2 ); assertEquals( "value", value ); }
public HashEntry getHashEntry( Object[] groupData ) { return new HashEntry( groupData ); }
MemoryGroupByData extends BaseStepData implements StepDataInterface { public HashEntry getHashEntry( Object[] groupData ) { return new HashEntry( groupData ); } }
MemoryGroupByData extends BaseStepData implements StepDataInterface { public HashEntry getHashEntry( Object[] groupData ) { return new HashEntry( groupData ); } MemoryGroupByData(); }
MemoryGroupByData extends BaseStepData implements StepDataInterface { public HashEntry getHashEntry( Object[] groupData ) { return new HashEntry( groupData ); } MemoryGroupByData(); HashEntry getHashEntry( Object[] groupData ); void clear(); }
MemoryGroupByData extends BaseStepData implements StepDataInterface { public HashEntry getHashEntry( Object[] groupData ) { return new HashEntry( groupData ); } MemoryGroupByData(); HashEntry getHashEntry( Object[] groupData ); void clear(); public HashMap<HashEntry, Aggregate> map; public RowMetaInterface aggMeta; public RowMetaInterface groupMeta; public RowMetaInterface entryMeta; public RowMetaInterface groupAggMeta; public int[] groupnrs; public int[] subjectnrs; public boolean firstRead; public Object[] groupResult; public boolean hasOutput; public RowMetaInterface inputRowMeta; public RowMetaInterface outputRowMeta; public ValueMetaInterface valueMetaInteger; public ValueMetaInterface valueMetaNumber; public boolean newBatch; }
@Test public void testRegisterUrlNc() throws Exception { namedClusterEmbedManager.registerUrl( "hc: verify( mockMetaStoreFactory ).saveElement( mockNamedCluster1 ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void getHostServerSocketPorts() { transformationMap.allocateServerSocketPort( 1, TEST_HOST, CLUSTERED_RUN_ID, TEST_TRANSFORMATION_NAME, TEST_SOURCE_SLAVE_NAME, TEST_SOURCE_STEP_NAME, TEST_SOURCE_STEP_COPY, TEST_TARGET_SLAVE_NAME, TEST_TARGET_STEP_NAME, TEST_TARGET_STEP_COPY ); List<SocketPortAllocation> actualResult = transformationMap.getHostServerSocketPorts( TEST_HOST ); assertNotNull( actualResult ); assertEquals( 1, actualResult.size() ); }
public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); void addTransformation( String transformationName, String containerObjectId, Trans trans, TransConfiguration transConfiguration ); void registerTransformation( Trans trans, TransConfiguration transConfiguration ); Trans getTransformation( String transformationName ); Trans getTransformation( CarteObjectEntry entry ); TransConfiguration getConfiguration( String transformationName ); TransConfiguration getConfiguration( CarteObjectEntry entry ); void removeTransformation( CarteObjectEntry entry ); List<CarteObjectEntry> getTransformationObjects(); SocketPortAllocation allocateServerSocketPort( int portRangeStart, String hostname, String clusteredRunId, String transformationName, String sourceSlaveName, String sourceStepName, String sourceStepCopy, String targetSlaveName, String targetStepName, String targetStepCopy ); void deallocateServerSocketPorts( String transName, String carteObjectId ); void deallocateServerSocketPorts( CarteObjectEntry entry ); void deallocateServerSocketPort( int port, String hostname ); CarteObjectEntry getFirstCarteObjectEntry( String transName ); SlaveServerConfig getSlaveServerConfig(); void setSlaveServerConfig( SlaveServerConfig slaveServerConfig ); List<SocketPortAllocation> getHostServerSocketPorts( String hostname ); SlaveSequence getSlaveSequence( String name ); boolean isAutomaticSlaveSequenceCreationAllowed(); SlaveSequence createSlaveSequence( String name ); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); void addTransformation( String transformationName, String containerObjectId, Trans trans, TransConfiguration transConfiguration ); void registerTransformation( Trans trans, TransConfiguration transConfiguration ); Trans getTransformation( String transformationName ); Trans getTransformation( CarteObjectEntry entry ); TransConfiguration getConfiguration( String transformationName ); TransConfiguration getConfiguration( CarteObjectEntry entry ); void removeTransformation( CarteObjectEntry entry ); List<CarteObjectEntry> getTransformationObjects(); SocketPortAllocation allocateServerSocketPort( int portRangeStart, String hostname, String clusteredRunId, String transformationName, String sourceSlaveName, String sourceStepName, String sourceStepCopy, String targetSlaveName, String targetStepName, String targetStepCopy ); void deallocateServerSocketPorts( String transName, String carteObjectId ); void deallocateServerSocketPorts( CarteObjectEntry entry ); void deallocateServerSocketPort( int port, String hostname ); CarteObjectEntry getFirstCarteObjectEntry( String transName ); SlaveServerConfig getSlaveServerConfig(); void setSlaveServerConfig( SlaveServerConfig slaveServerConfig ); List<SocketPortAllocation> getHostServerSocketPorts( String hostname ); SlaveSequence getSlaveSequence( String name ); boolean isAutomaticSlaveSequenceCreationAllowed(); SlaveSequence createSlaveSequence( String name ); }
@Test public void testRegisterUrlNotNc() throws Exception { namedClusterEmbedManager.registerUrl( "hdfs: verify( mockMetaStoreFactory, never() ).saveElement( any() ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testRegisterUrlRegularFile() throws Exception { namedClusterEmbedManager.registerUrl( "/" + CLUSTER1_NAME + "/dir1/dir2" ); verify( mockMetaStoreFactory, never() ).saveElement( any() ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testRegisterUrlFullVariable() throws Exception { when( mockNamedClusterService.listNames( mockMeta.getMetaStore() ) ) .thenReturn( Arrays.asList( new String[] { CLUSTER1_NAME, CLUSTER2_NAME } ) ); namedClusterEmbedManager.registerUrl( "${variable)" ); verify( mockMetaStoreFactory ).saveElement( mockNamedCluster1 ); verify( mockMetaStoreFactory ).saveElement( mockNamedCluster2 ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testRegisterUrlClusterVariable() throws Exception { when( mockNamedClusterService.listNames( mockMeta.getMetaStore() ) ) .thenReturn( Arrays.asList( new String[] { CLUSTER1_NAME, CLUSTER2_NAME } ) ); namedClusterEmbedManager.registerUrl( "hc: verify( mockMetaStoreFactory ).saveElement( mockNamedCluster1 ); verify( mockMetaStoreFactory ).saveElement( mockNamedCluster2 ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testRegisterUrlAlreadyRegistered() throws Exception { when( mockMetaStoreFactory.loadElement( CLUSTER1_NAME ) ).thenReturn( mockNamedCluster1 ); namedClusterEmbedManager.registerUrl( "hc: verify( mockMetaStoreFactory, times(0 ) ).saveElement( mockNamedCluster1 ); }
public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void registerUrl( String urlString ) { if ( urlString == null || addedAllClusters == true ) { return; } if ( urlString.startsWith( VARIABLE_START ) ) { addAllClusters(); } Pattern r = Pattern.compile( URL_PATTERN ); Matcher m = r.matcher( urlString ); if ( m.find() ) { String protocol = m.group( PARSE_URL_SCHEME ); String clusterName = m.group( PARSE_URL_AUTHORITY ); if ( "hc".equals( protocol ) ) { if ( clusterName.startsWith( VARIABLE_START ) ) { addAllClusters(); } addClusterToMeta( clusterName ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testClear() throws Exception { when( mockMetaStoreFactory.getElements() ) .thenReturn( Arrays.asList( new NamedClusterOsgi[] { mockNamedCluster1, mockNamedCluster2 } ) ); namedClusterEmbedManager.clear( ); verify( mockMetaStoreFactory ).deleteElement( CLUSTER1_NAME ); verify( mockMetaStoreFactory ).deleteElement( CLUSTER2_NAME ); }
public void clear() { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { addedAllClusters = false; addedAnyClusters = false; if ( embeddedMetaStoreFactory != null ) { try { List<NamedClusterOsgi> list = embeddedMetaStoreFactory.getElements(); for ( NamedClusterOsgi nc : list ) { namedClusterPool.put( nc.getName(), nc ); embeddedMetaStoreFactory.deleteElement( nc.getName() ); } } catch ( MetaStoreException e ) { logMetaStoreException( e ); } } } }
NamedClusterEmbedManager { public void clear() { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { addedAllClusters = false; addedAnyClusters = false; if ( embeddedMetaStoreFactory != null ) { try { List<NamedClusterOsgi> list = embeddedMetaStoreFactory.getElements(); for ( NamedClusterOsgi nc : list ) { namedClusterPool.put( nc.getName(), nc ); embeddedMetaStoreFactory.deleteElement( nc.getName() ); } } catch ( MetaStoreException e ) { logMetaStoreException( e ); } } } } }
NamedClusterEmbedManager { public void clear() { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { addedAllClusters = false; addedAnyClusters = false; if ( embeddedMetaStoreFactory != null ) { try { List<NamedClusterOsgi> list = embeddedMetaStoreFactory.getElements(); for ( NamedClusterOsgi nc : list ) { namedClusterPool.put( nc.getName(), nc ); embeddedMetaStoreFactory.deleteElement( nc.getName() ); } } catch ( MetaStoreException e ) { logMetaStoreException( e ); } } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void clear() { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { addedAllClusters = false; addedAnyClusters = false; if ( embeddedMetaStoreFactory != null ) { try { List<NamedClusterOsgi> list = embeddedMetaStoreFactory.getElements(); for ( NamedClusterOsgi nc : list ) { namedClusterPool.put( nc.getName(), nc ); embeddedMetaStoreFactory.deleteElement( nc.getName() ); } } catch ( MetaStoreException e ) { logMetaStoreException( e ); } } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void clear() { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { addedAllClusters = false; addedAnyClusters = false; if ( embeddedMetaStoreFactory != null ) { try { List<NamedClusterOsgi> list = embeddedMetaStoreFactory.getElements(); for ( NamedClusterOsgi nc : list ) { namedClusterPool.put( nc.getName(), nc ); embeddedMetaStoreFactory.deleteElement( nc.getName() ); } } catch ( MetaStoreException e ) { logMetaStoreException( e ); } } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testPassEmbeddedMetastoreKey() { Variables mockVariables = mock( Variables.class ); namedClusterEmbedManager.passEmbeddedMetastoreKey( mockVariables, "key" ); verify( mockVariables ).setVariable( anyString(), anyString() ); }
public void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ) { if ( nameSpace != null ) { if ( embeddedMetastoreProviderKey != null ) { nameSpace.setVariable( "vfs.hc.embeddedMetastoreKey", embeddedMetastoreProviderKey ); } } }
NamedClusterEmbedManager { public void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ) { if ( nameSpace != null ) { if ( embeddedMetastoreProviderKey != null ) { nameSpace.setVariable( "vfs.hc.embeddedMetastoreKey", embeddedMetastoreProviderKey ); } } } }
NamedClusterEmbedManager { public void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ) { if ( nameSpace != null ) { if ( embeddedMetastoreProviderKey != null ) { nameSpace.setVariable( "vfs.hc.embeddedMetastoreKey", embeddedMetastoreProviderKey ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ) { if ( nameSpace != null ) { if ( embeddedMetastoreProviderKey != null ) { nameSpace.setVariable( "vfs.hc.embeddedMetastoreKey", embeddedMetastoreProviderKey ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ) { if ( nameSpace != null ) { if ( embeddedMetastoreProviderKey != null ) { nameSpace.setVariable( "vfs.hc.embeddedMetastoreKey", embeddedMetastoreProviderKey ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testAddingClusterToMetaData() throws MetaStoreException { namedClusterEmbedManager.addClusterToMeta( CLUSTER1_NAME ); verify( mockMetaStoreFactory ).saveElement( mockNamedCluster1 ); }
public void addClusterToMeta( String clusterName ) { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { NamedClusterOsgi nc = ncso.getNamedClusterByName( clusterName, meta.getMetaStore() ); if ( nc == null ) { nc = namedClusterPool.get( clusterName ); } if ( nc != null ) { addClusterToMeta( nc ); } } }
NamedClusterEmbedManager { public void addClusterToMeta( String clusterName ) { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { NamedClusterOsgi nc = ncso.getNamedClusterByName( clusterName, meta.getMetaStore() ); if ( nc == null ) { nc = namedClusterPool.get( clusterName ); } if ( nc != null ) { addClusterToMeta( nc ); } } } }
NamedClusterEmbedManager { public void addClusterToMeta( String clusterName ) { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { NamedClusterOsgi nc = ncso.getNamedClusterByName( clusterName, meta.getMetaStore() ); if ( nc == null ) { nc = namedClusterPool.get( clusterName ); } if ( nc != null ) { addClusterToMeta( nc ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); }
NamedClusterEmbedManager { public void addClusterToMeta( String clusterName ) { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { NamedClusterOsgi nc = ncso.getNamedClusterByName( clusterName, meta.getMetaStore() ); if ( nc == null ) { nc = namedClusterPool.get( clusterName ); } if ( nc != null ) { addClusterToMeta( nc ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); }
NamedClusterEmbedManager { public void addClusterToMeta( String clusterName ) { NamedClusterServiceOsgi ncso = meta.getNamedClusterServiceOsgi(); if ( ncso != null ) { NamedClusterOsgi nc = ncso.getNamedClusterByName( clusterName, meta.getMetaStore() ); if ( nc == null ) { nc = namedClusterPool.get( clusterName ); } if ( nc != null ) { addClusterToMeta( nc ); } } } NamedClusterEmbedManager( AbstractMeta meta, LogChannelInterface log ); void registerUrl( String urlString ); void clear(); boolean isAddedAnyClusters(); void addClusterToMeta( String clusterName ); void passEmbeddedMetastoreKey( VariableSpace nameSpace, String embeddedMetastoreProviderKey ); static final String NAMESPACE; }
@Test public void testProcessRow() throws KettleException { GroupByMeta groupByMeta = mock( GroupByMeta.class ); GroupByData groupByData = mock( GroupByData.class ); GroupBy groupBySpy = Mockito.spy( new GroupBy( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ) ); doReturn( null ).when( groupBySpy ).getRow(); doReturn( null ).when( groupBySpy ).getInputRowMeta(); RowMetaInterface rowMeta = new RowMeta(); rowMeta.addValueMeta( new ValueMetaInteger( "ROWNR" ) ); List<RowSet> outputRowSets = new ArrayList<RowSet>(); BlockingRowSet rowSet = new BlockingRowSet( 1 ); rowSet.putRow( rowMeta, new Object[] { new Long( 0 ) } ); outputRowSets.add( rowSet ); groupBySpy.setOutputRowSets( outputRowSets ); final String[] sub = { "b" }; doReturn( sub ).when( groupByMeta ).getSubjectField(); final String[] groupField = { "a" }; doReturn( groupField ).when( groupByMeta ).getGroupField(); final String[] aggFields = { "b_g" }; doReturn( aggFields ).when( groupByMeta ).getAggregateField(); final int[] aggType = { GroupByMeta.TYPE_GROUP_CONCAT_COMMA }; doReturn( aggType ).when( groupByMeta ).getAggregateType(); when( mockHelper.transMeta.getPrevStepFields( mockHelper.stepMeta ) ).thenReturn( new RowMeta() ); groupBySpy.processRow( groupByMeta, groupByData ); assertTrue( groupBySpy.getOutputRowSets().get( 0 ).isDone() ); }
@Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (GroupByMeta) smi; data = (GroupByData) sdi; Object[] r = getRow(); if ( first ) { String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" ); allNullsAreZero = ValueMetaBase.convertStringToBoolean( val ); val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" ); minNullIsValued = ValueMetaBase.convertStringToBoolean( val ); data.inputRowMeta = getInputRowMeta(); if ( data.inputRowMeta == null ) { data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.counts = new long[ meta.getSubjectField().length ]; data.subjectnrs = new int[ meta.getSubjectField().length ]; data.cumulativeSumSourceIndexes = new ArrayList<>(); data.cumulativeSumTargetIndexes = new ArrayList<>(); data.cumulativeAvgSourceIndexes = new ArrayList<>(); data.cumulativeAvgTargetIndexes = new ArrayList<>(); for ( int i = 0; i < meta.getSubjectField().length; i++ ) { if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_COUNT_ANY ) { data.subjectnrs[ i ] = 0; } else { data.subjectnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getSubjectField()[i] ); } if ( ( r != null ) && ( data.subjectnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound", meta.getSubjectField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM ) { data.cumulativeSumSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeSumTargetIndexes.add( data.inputRowMeta.size() + i ); } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE ) { data.cumulativeAvgSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeAvgTargetIndexes.add( data.inputRowMeta.size() + i ); } } data.previousSums = new Object[ data.cumulativeSumTargetIndexes.size() ]; data.previousAvgSum = new Object[ data.cumulativeAvgTargetIndexes.size() ]; data.previousAvgCount = new long[ data.cumulativeAvgTargetIndexes.size() ]; data.groupnrs = new int[ meta.getGroupField().length ]; for ( int i = 0; i < meta.getGroupField().length; i++ ) { data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] ); if ( ( r != null ) && ( data.groupnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.GroupFieldCouldNotFound", meta.getGroupField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } } data.valueMetaInteger = new ValueMetaInteger( "count" ); data.valueMetaNumber = new ValueMetaNumber( "sum" ); initGroupMeta( data.inputRowMeta ); } if ( first || data.newBatch ) { newAggregate( r ); } if ( first ) { data.groupAggMeta = new RowMeta(); data.groupAggMeta.addRowMeta( data.groupMeta ); data.groupAggMeta.addRowMeta( data.aggMeta ); } if ( r == null ) { handleLastOfGroup(); setOutputDone(); return false; } if ( first || data.newBatch ) { first = false; data.newBatch = false; data.previous = data.inputRowMeta.cloneRow( r ); } else { calcAggregate( data.previous ); if ( meta.passAllRows() ) { addToBuffer( data.previous ); } } if ( !sameGroup( data.previous, r ) ) { if ( meta.passAllRows() ) { closeOutput(); data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr = 0; while ( row != null ) { int size = data.inputRowMeta.size(); row = RowDataUtil.addRowData( row, size, data.groupResult ); size += data.groupResult.length; lineNr++; if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) { Object lineNrValue = new Long( lineNr ); row = RowDataUtil.addValueData( row, size, lineNrValue ); size++; } addCumulativeSums( row ); addCumulativeAverages( row ); putRow( data.outputRowMeta, row ); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult( data.previous ); if ( result != null ) { putRow( data.groupAggMeta, result ); } } newAggregate( r ); } data.previous = data.inputRowMeta.cloneRow( r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "GroupBy.LineNumber" ) + getLinesRead() ); } } return true; }
GroupBy extends BaseStep implements StepInterface { @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (GroupByMeta) smi; data = (GroupByData) sdi; Object[] r = getRow(); if ( first ) { String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" ); allNullsAreZero = ValueMetaBase.convertStringToBoolean( val ); val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" ); minNullIsValued = ValueMetaBase.convertStringToBoolean( val ); data.inputRowMeta = getInputRowMeta(); if ( data.inputRowMeta == null ) { data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.counts = new long[ meta.getSubjectField().length ]; data.subjectnrs = new int[ meta.getSubjectField().length ]; data.cumulativeSumSourceIndexes = new ArrayList<>(); data.cumulativeSumTargetIndexes = new ArrayList<>(); data.cumulativeAvgSourceIndexes = new ArrayList<>(); data.cumulativeAvgTargetIndexes = new ArrayList<>(); for ( int i = 0; i < meta.getSubjectField().length; i++ ) { if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_COUNT_ANY ) { data.subjectnrs[ i ] = 0; } else { data.subjectnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getSubjectField()[i] ); } if ( ( r != null ) && ( data.subjectnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound", meta.getSubjectField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM ) { data.cumulativeSumSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeSumTargetIndexes.add( data.inputRowMeta.size() + i ); } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE ) { data.cumulativeAvgSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeAvgTargetIndexes.add( data.inputRowMeta.size() + i ); } } data.previousSums = new Object[ data.cumulativeSumTargetIndexes.size() ]; data.previousAvgSum = new Object[ data.cumulativeAvgTargetIndexes.size() ]; data.previousAvgCount = new long[ data.cumulativeAvgTargetIndexes.size() ]; data.groupnrs = new int[ meta.getGroupField().length ]; for ( int i = 0; i < meta.getGroupField().length; i++ ) { data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] ); if ( ( r != null ) && ( data.groupnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.GroupFieldCouldNotFound", meta.getGroupField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } } data.valueMetaInteger = new ValueMetaInteger( "count" ); data.valueMetaNumber = new ValueMetaNumber( "sum" ); initGroupMeta( data.inputRowMeta ); } if ( first || data.newBatch ) { newAggregate( r ); } if ( first ) { data.groupAggMeta = new RowMeta(); data.groupAggMeta.addRowMeta( data.groupMeta ); data.groupAggMeta.addRowMeta( data.aggMeta ); } if ( r == null ) { handleLastOfGroup(); setOutputDone(); return false; } if ( first || data.newBatch ) { first = false; data.newBatch = false; data.previous = data.inputRowMeta.cloneRow( r ); } else { calcAggregate( data.previous ); if ( meta.passAllRows() ) { addToBuffer( data.previous ); } } if ( !sameGroup( data.previous, r ) ) { if ( meta.passAllRows() ) { closeOutput(); data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr = 0; while ( row != null ) { int size = data.inputRowMeta.size(); row = RowDataUtil.addRowData( row, size, data.groupResult ); size += data.groupResult.length; lineNr++; if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) { Object lineNrValue = new Long( lineNr ); row = RowDataUtil.addValueData( row, size, lineNrValue ); size++; } addCumulativeSums( row ); addCumulativeAverages( row ); putRow( data.outputRowMeta, row ); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult( data.previous ); if ( result != null ) { putRow( data.groupAggMeta, result ); } } newAggregate( r ); } data.previous = data.inputRowMeta.cloneRow( r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "GroupBy.LineNumber" ) + getLinesRead() ); } } return true; } }
GroupBy extends BaseStep implements StepInterface { @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (GroupByMeta) smi; data = (GroupByData) sdi; Object[] r = getRow(); if ( first ) { String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" ); allNullsAreZero = ValueMetaBase.convertStringToBoolean( val ); val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" ); minNullIsValued = ValueMetaBase.convertStringToBoolean( val ); data.inputRowMeta = getInputRowMeta(); if ( data.inputRowMeta == null ) { data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.counts = new long[ meta.getSubjectField().length ]; data.subjectnrs = new int[ meta.getSubjectField().length ]; data.cumulativeSumSourceIndexes = new ArrayList<>(); data.cumulativeSumTargetIndexes = new ArrayList<>(); data.cumulativeAvgSourceIndexes = new ArrayList<>(); data.cumulativeAvgTargetIndexes = new ArrayList<>(); for ( int i = 0; i < meta.getSubjectField().length; i++ ) { if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_COUNT_ANY ) { data.subjectnrs[ i ] = 0; } else { data.subjectnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getSubjectField()[i] ); } if ( ( r != null ) && ( data.subjectnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound", meta.getSubjectField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM ) { data.cumulativeSumSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeSumTargetIndexes.add( data.inputRowMeta.size() + i ); } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE ) { data.cumulativeAvgSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeAvgTargetIndexes.add( data.inputRowMeta.size() + i ); } } data.previousSums = new Object[ data.cumulativeSumTargetIndexes.size() ]; data.previousAvgSum = new Object[ data.cumulativeAvgTargetIndexes.size() ]; data.previousAvgCount = new long[ data.cumulativeAvgTargetIndexes.size() ]; data.groupnrs = new int[ meta.getGroupField().length ]; for ( int i = 0; i < meta.getGroupField().length; i++ ) { data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] ); if ( ( r != null ) && ( data.groupnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.GroupFieldCouldNotFound", meta.getGroupField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } } data.valueMetaInteger = new ValueMetaInteger( "count" ); data.valueMetaNumber = new ValueMetaNumber( "sum" ); initGroupMeta( data.inputRowMeta ); } if ( first || data.newBatch ) { newAggregate( r ); } if ( first ) { data.groupAggMeta = new RowMeta(); data.groupAggMeta.addRowMeta( data.groupMeta ); data.groupAggMeta.addRowMeta( data.aggMeta ); } if ( r == null ) { handleLastOfGroup(); setOutputDone(); return false; } if ( first || data.newBatch ) { first = false; data.newBatch = false; data.previous = data.inputRowMeta.cloneRow( r ); } else { calcAggregate( data.previous ); if ( meta.passAllRows() ) { addToBuffer( data.previous ); } } if ( !sameGroup( data.previous, r ) ) { if ( meta.passAllRows() ) { closeOutput(); data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr = 0; while ( row != null ) { int size = data.inputRowMeta.size(); row = RowDataUtil.addRowData( row, size, data.groupResult ); size += data.groupResult.length; lineNr++; if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) { Object lineNrValue = new Long( lineNr ); row = RowDataUtil.addValueData( row, size, lineNrValue ); size++; } addCumulativeSums( row ); addCumulativeAverages( row ); putRow( data.outputRowMeta, row ); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult( data.previous ); if ( result != null ) { putRow( data.groupAggMeta, result ); } } newAggregate( r ); } data.previous = data.inputRowMeta.cloneRow( r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "GroupBy.LineNumber" ) + getLinesRead() ); } } return true; } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GroupBy extends BaseStep implements StepInterface { @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (GroupByMeta) smi; data = (GroupByData) sdi; Object[] r = getRow(); if ( first ) { String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" ); allNullsAreZero = ValueMetaBase.convertStringToBoolean( val ); val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" ); minNullIsValued = ValueMetaBase.convertStringToBoolean( val ); data.inputRowMeta = getInputRowMeta(); if ( data.inputRowMeta == null ) { data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.counts = new long[ meta.getSubjectField().length ]; data.subjectnrs = new int[ meta.getSubjectField().length ]; data.cumulativeSumSourceIndexes = new ArrayList<>(); data.cumulativeSumTargetIndexes = new ArrayList<>(); data.cumulativeAvgSourceIndexes = new ArrayList<>(); data.cumulativeAvgTargetIndexes = new ArrayList<>(); for ( int i = 0; i < meta.getSubjectField().length; i++ ) { if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_COUNT_ANY ) { data.subjectnrs[ i ] = 0; } else { data.subjectnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getSubjectField()[i] ); } if ( ( r != null ) && ( data.subjectnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound", meta.getSubjectField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM ) { data.cumulativeSumSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeSumTargetIndexes.add( data.inputRowMeta.size() + i ); } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE ) { data.cumulativeAvgSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeAvgTargetIndexes.add( data.inputRowMeta.size() + i ); } } data.previousSums = new Object[ data.cumulativeSumTargetIndexes.size() ]; data.previousAvgSum = new Object[ data.cumulativeAvgTargetIndexes.size() ]; data.previousAvgCount = new long[ data.cumulativeAvgTargetIndexes.size() ]; data.groupnrs = new int[ meta.getGroupField().length ]; for ( int i = 0; i < meta.getGroupField().length; i++ ) { data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] ); if ( ( r != null ) && ( data.groupnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.GroupFieldCouldNotFound", meta.getGroupField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } } data.valueMetaInteger = new ValueMetaInteger( "count" ); data.valueMetaNumber = new ValueMetaNumber( "sum" ); initGroupMeta( data.inputRowMeta ); } if ( first || data.newBatch ) { newAggregate( r ); } if ( first ) { data.groupAggMeta = new RowMeta(); data.groupAggMeta.addRowMeta( data.groupMeta ); data.groupAggMeta.addRowMeta( data.aggMeta ); } if ( r == null ) { handleLastOfGroup(); setOutputDone(); return false; } if ( first || data.newBatch ) { first = false; data.newBatch = false; data.previous = data.inputRowMeta.cloneRow( r ); } else { calcAggregate( data.previous ); if ( meta.passAllRows() ) { addToBuffer( data.previous ); } } if ( !sameGroup( data.previous, r ) ) { if ( meta.passAllRows() ) { closeOutput(); data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr = 0; while ( row != null ) { int size = data.inputRowMeta.size(); row = RowDataUtil.addRowData( row, size, data.groupResult ); size += data.groupResult.length; lineNr++; if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) { Object lineNrValue = new Long( lineNr ); row = RowDataUtil.addValueData( row, size, lineNrValue ); size++; } addCumulativeSums( row ); addCumulativeAverages( row ); putRow( data.outputRowMeta, row ); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult( data.previous ); if ( result != null ) { putRow( data.groupAggMeta, result ); } } newAggregate( r ); } data.previous = data.inputRowMeta.cloneRow( r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "GroupBy.LineNumber" ) + getLinesRead() ); } } return true; } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
GroupBy extends BaseStep implements StepInterface { @Override public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (GroupByMeta) smi; data = (GroupByData) sdi; Object[] r = getRow(); if ( first ) { String val = getVariable( Const.KETTLE_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" ); allNullsAreZero = ValueMetaBase.convertStringToBoolean( val ); val = getVariable( Const.KETTLE_AGGREGATION_MIN_NULL_IS_VALUED, "N" ); minNullIsValued = ValueMetaBase.convertStringToBoolean( val ); data.inputRowMeta = getInputRowMeta(); if ( data.inputRowMeta == null ) { data.inputRowMeta = getTransMeta().getPrevStepFields( getStepMeta() ); } data.outputRowMeta = data.inputRowMeta.clone(); meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore ); data.counts = new long[ meta.getSubjectField().length ]; data.subjectnrs = new int[ meta.getSubjectField().length ]; data.cumulativeSumSourceIndexes = new ArrayList<>(); data.cumulativeSumTargetIndexes = new ArrayList<>(); data.cumulativeAvgSourceIndexes = new ArrayList<>(); data.cumulativeAvgTargetIndexes = new ArrayList<>(); for ( int i = 0; i < meta.getSubjectField().length; i++ ) { if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_COUNT_ANY ) { data.subjectnrs[ i ] = 0; } else { data.subjectnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getSubjectField()[i] ); } if ( ( r != null ) && ( data.subjectnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.AggregateSubjectFieldCouldNotFound", meta.getSubjectField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_SUM ) { data.cumulativeSumSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeSumTargetIndexes.add( data.inputRowMeta.size() + i ); } if ( meta.getAggregateType()[ i ] == GroupByMeta.TYPE_GROUP_CUMULATIVE_AVERAGE ) { data.cumulativeAvgSourceIndexes.add( data.subjectnrs[ i ] ); data.cumulativeAvgTargetIndexes.add( data.inputRowMeta.size() + i ); } } data.previousSums = new Object[ data.cumulativeSumTargetIndexes.size() ]; data.previousAvgSum = new Object[ data.cumulativeAvgTargetIndexes.size() ]; data.previousAvgCount = new long[ data.cumulativeAvgTargetIndexes.size() ]; data.groupnrs = new int[ meta.getGroupField().length ]; for ( int i = 0; i < meta.getGroupField().length; i++ ) { data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[i] ); if ( ( r != null ) && ( data.groupnrs[ i ] < 0 ) ) { logError( BaseMessages.getString( PKG, "GroupBy.Log.GroupFieldCouldNotFound", meta.getGroupField()[ i ] ) ); setErrors( 1 ); stopAll(); return false; } } data.valueMetaInteger = new ValueMetaInteger( "count" ); data.valueMetaNumber = new ValueMetaNumber( "sum" ); initGroupMeta( data.inputRowMeta ); } if ( first || data.newBatch ) { newAggregate( r ); } if ( first ) { data.groupAggMeta = new RowMeta(); data.groupAggMeta.addRowMeta( data.groupMeta ); data.groupAggMeta.addRowMeta( data.aggMeta ); } if ( r == null ) { handleLastOfGroup(); setOutputDone(); return false; } if ( first || data.newBatch ) { first = false; data.newBatch = false; data.previous = data.inputRowMeta.cloneRow( r ); } else { calcAggregate( data.previous ); if ( meta.passAllRows() ) { addToBuffer( data.previous ); } } if ( !sameGroup( data.previous, r ) ) { if ( meta.passAllRows() ) { closeOutput(); data.groupResult = getAggregateResult(); Object[] row = getRowFromBuffer(); long lineNr = 0; while ( row != null ) { int size = data.inputRowMeta.size(); row = RowDataUtil.addRowData( row, size, data.groupResult ); size += data.groupResult.length; lineNr++; if ( meta.isAddingLineNrInGroup() && !Utils.isEmpty( meta.getLineNrInGroupField() ) ) { Object lineNrValue = new Long( lineNr ); row = RowDataUtil.addValueData( row, size, lineNrValue ); size++; } addCumulativeSums( row ); addCumulativeAverages( row ); putRow( data.outputRowMeta, row ); row = getRowFromBuffer(); } closeInput(); } else { Object[] result = buildResult( data.previous ); if ( result != null ) { putRow( data.groupAggMeta, result ); } } newAggregate( r ); } data.previous = data.inputRowMeta.cloneRow( r ); if ( checkFeedback( getLinesRead() ) ) { if ( log.isBasic() ) { logBasic( BaseMessages.getString( PKG, "GroupBy.LineNumber" ) + getLinesRead() ); } } return true; } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
@Test public void testTempFileIsDeleted_AfterCallingDisposeMethod() throws Exception { GroupByData groupByData = new GroupByData(); groupByData.tempFile = File.createTempFile( "test", ".txt" ); groupByData.fosToTempFile = new FileOutputStream( groupByData.tempFile ); groupByData.fisToTmpFile = new FileInputStream( groupByData.tempFile ); GroupBy groupBySpy = Mockito.spy( new GroupBy( mockHelper.stepMeta, groupByData, 0, mockHelper.transMeta, mockHelper.trans ) ); assertTrue( groupByData.tempFile.exists() ); groupBySpy.dispose( mock( StepMetaInterface.class ), groupByData ); assertFalse( groupByData.tempFile.exists() ); }
@Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); }
GroupBy extends BaseStep implements StepInterface { @Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); } }
GroupBy extends BaseStep implements StepInterface { @Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GroupBy extends BaseStep implements StepInterface { @Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
GroupBy extends BaseStep implements StepInterface { @Override public void dispose( StepMetaInterface smi, StepDataInterface sdi ) { if ( data.tempFile != null ) { try { closeInput(); closeOutput(); } catch ( KettleFileException e ) { log.logError( e.getLocalizedMessage() ); } boolean tempFileDeleted = data.tempFile.delete(); if ( !tempFileDeleted && log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath() ) ); } } super.dispose( smi, sdi ); } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
@Test public void getHostServerSocketPortsWithoutAllocatedPorts() { List<SocketPortAllocation> actualResult = transformationMap.getHostServerSocketPorts( TEST_HOST ); assertNotNull( actualResult ); assertTrue( actualResult.isEmpty() ); }
public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); void addTransformation( String transformationName, String containerObjectId, Trans trans, TransConfiguration transConfiguration ); void registerTransformation( Trans trans, TransConfiguration transConfiguration ); Trans getTransformation( String transformationName ); Trans getTransformation( CarteObjectEntry entry ); TransConfiguration getConfiguration( String transformationName ); TransConfiguration getConfiguration( CarteObjectEntry entry ); void removeTransformation( CarteObjectEntry entry ); List<CarteObjectEntry> getTransformationObjects(); SocketPortAllocation allocateServerSocketPort( int portRangeStart, String hostname, String clusteredRunId, String transformationName, String sourceSlaveName, String sourceStepName, String sourceStepCopy, String targetSlaveName, String targetStepName, String targetStepCopy ); void deallocateServerSocketPorts( String transName, String carteObjectId ); void deallocateServerSocketPorts( CarteObjectEntry entry ); void deallocateServerSocketPort( int port, String hostname ); CarteObjectEntry getFirstCarteObjectEntry( String transName ); SlaveServerConfig getSlaveServerConfig(); void setSlaveServerConfig( SlaveServerConfig slaveServerConfig ); List<SocketPortAllocation> getHostServerSocketPorts( String hostname ); SlaveSequence getSlaveSequence( String name ); boolean isAutomaticSlaveSequenceCreationAllowed(); SlaveSequence createSlaveSequence( String name ); }
TransformationMap { public List<SocketPortAllocation> getHostServerSocketPorts( String hostname ) { List<SocketPortAllocation> ports = hostServerSocketPortsMap.get( hostname ); return ports == null ? Collections.emptyList() : Collections.unmodifiableList( ports ); } TransformationMap(); void addTransformation( String transformationName, String containerObjectId, Trans trans, TransConfiguration transConfiguration ); void registerTransformation( Trans trans, TransConfiguration transConfiguration ); Trans getTransformation( String transformationName ); Trans getTransformation( CarteObjectEntry entry ); TransConfiguration getConfiguration( String transformationName ); TransConfiguration getConfiguration( CarteObjectEntry entry ); void removeTransformation( CarteObjectEntry entry ); List<CarteObjectEntry> getTransformationObjects(); SocketPortAllocation allocateServerSocketPort( int portRangeStart, String hostname, String clusteredRunId, String transformationName, String sourceSlaveName, String sourceStepName, String sourceStepCopy, String targetSlaveName, String targetStepName, String targetStepCopy ); void deallocateServerSocketPorts( String transName, String carteObjectId ); void deallocateServerSocketPorts( CarteObjectEntry entry ); void deallocateServerSocketPort( int port, String hostname ); CarteObjectEntry getFirstCarteObjectEntry( String transName ); SlaveServerConfig getSlaveServerConfig(); void setSlaveServerConfig( SlaveServerConfig slaveServerConfig ); List<SocketPortAllocation> getHostServerSocketPorts( String hostname ); SlaveSequence getSlaveSequence( String name ); boolean isAutomaticSlaveSequenceCreationAllowed(); SlaveSequence createSlaveSequence( String name ); }
@Test public void testAddToBuffer() throws KettleException, FileSystemException { GroupByData groupByData = new GroupByData(); ArrayList listMock = mock( ArrayList.class ); when( listMock.size() ).thenReturn( 5001 ); groupByData.bufferList = listMock; groupByData.rowsOnFile = 0; RowMetaInterface inputRowMetaMock = mock( RowMetaInterface.class ); groupByData.inputRowMeta = inputRowMetaMock; GroupBy groupBySpy = Mockito.spy( new GroupBy( mockHelper.stepMeta, groupByData, 0, mockHelper.transMeta, mockHelper.trans ) ); GroupByMeta groupByMetaMock = mock( GroupByMeta.class ); when( groupByMetaMock.getPrefix() ).thenReturn( "group-by-test-temp-file-" ); when( groupBySpy.getMeta() ).thenReturn( groupByMetaMock ); String userDir = System.getProperty( "user.dir" ); String vfsFilePath = "file: when( groupBySpy.environmentSubstitute( anyString() ) ).thenReturn( vfsFilePath ); Object[] row = { "abc" }; groupBySpy.addToBuffer( row ); assertTrue( groupByData.tempFile.exists() ); groupBySpy.dispose( groupByMetaMock, groupByData ); assertFalse( groupByData.tempFile.exists() ); verify( groupBySpy, times( 1 ) ).retrieveVfsPath( anyString() ); }
void addToBuffer( Object[] row ) throws KettleFileException { data.bufferList.add( row ); if ( data.bufferList.size() > 5000 && data.rowsOnFile == 0 ) { String pathToTmp = environmentSubstitute( getMeta().getDirectory() ); try { File ioFile = new File( pathToTmp ); if ( !ioFile.exists() ) { pathToTmp = retrieveVfsPath( pathToTmp ); } data.tempFile = File.createTempFile( getMeta().getPrefix(), ".tmp", new File( pathToTmp ) ); data.fosToTempFile = new FileOutputStream( data.tempFile ); data.dosToTempFile = new DataOutputStream( data.fosToTempFile ); data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCreateTemporaryFile" ), e ); } Object[] oldest = data.bufferList.get( 0 ); data.inputRowMeta.writeData( data.dosToTempFile, oldest ); data.bufferList.remove( 0 ); data.rowsOnFile++; } }
GroupBy extends BaseStep implements StepInterface { void addToBuffer( Object[] row ) throws KettleFileException { data.bufferList.add( row ); if ( data.bufferList.size() > 5000 && data.rowsOnFile == 0 ) { String pathToTmp = environmentSubstitute( getMeta().getDirectory() ); try { File ioFile = new File( pathToTmp ); if ( !ioFile.exists() ) { pathToTmp = retrieveVfsPath( pathToTmp ); } data.tempFile = File.createTempFile( getMeta().getPrefix(), ".tmp", new File( pathToTmp ) ); data.fosToTempFile = new FileOutputStream( data.tempFile ); data.dosToTempFile = new DataOutputStream( data.fosToTempFile ); data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCreateTemporaryFile" ), e ); } Object[] oldest = data.bufferList.get( 0 ); data.inputRowMeta.writeData( data.dosToTempFile, oldest ); data.bufferList.remove( 0 ); data.rowsOnFile++; } } }
GroupBy extends BaseStep implements StepInterface { void addToBuffer( Object[] row ) throws KettleFileException { data.bufferList.add( row ); if ( data.bufferList.size() > 5000 && data.rowsOnFile == 0 ) { String pathToTmp = environmentSubstitute( getMeta().getDirectory() ); try { File ioFile = new File( pathToTmp ); if ( !ioFile.exists() ) { pathToTmp = retrieveVfsPath( pathToTmp ); } data.tempFile = File.createTempFile( getMeta().getPrefix(), ".tmp", new File( pathToTmp ) ); data.fosToTempFile = new FileOutputStream( data.tempFile ); data.dosToTempFile = new DataOutputStream( data.fosToTempFile ); data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCreateTemporaryFile" ), e ); } Object[] oldest = data.bufferList.get( 0 ); data.inputRowMeta.writeData( data.dosToTempFile, oldest ); data.bufferList.remove( 0 ); data.rowsOnFile++; } } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
GroupBy extends BaseStep implements StepInterface { void addToBuffer( Object[] row ) throws KettleFileException { data.bufferList.add( row ); if ( data.bufferList.size() > 5000 && data.rowsOnFile == 0 ) { String pathToTmp = environmentSubstitute( getMeta().getDirectory() ); try { File ioFile = new File( pathToTmp ); if ( !ioFile.exists() ) { pathToTmp = retrieveVfsPath( pathToTmp ); } data.tempFile = File.createTempFile( getMeta().getPrefix(), ".tmp", new File( pathToTmp ) ); data.fosToTempFile = new FileOutputStream( data.tempFile ); data.dosToTempFile = new DataOutputStream( data.fosToTempFile ); data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCreateTemporaryFile" ), e ); } Object[] oldest = data.bufferList.get( 0 ); data.inputRowMeta.writeData( data.dosToTempFile, oldest ); data.bufferList.remove( 0 ); data.rowsOnFile++; } } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
GroupBy extends BaseStep implements StepInterface { void addToBuffer( Object[] row ) throws KettleFileException { data.bufferList.add( row ); if ( data.bufferList.size() > 5000 && data.rowsOnFile == 0 ) { String pathToTmp = environmentSubstitute( getMeta().getDirectory() ); try { File ioFile = new File( pathToTmp ); if ( !ioFile.exists() ) { pathToTmp = retrieveVfsPath( pathToTmp ); } data.tempFile = File.createTempFile( getMeta().getPrefix(), ".tmp", new File( pathToTmp ) ); data.fosToTempFile = new FileOutputStream( data.tempFile ); data.dosToTempFile = new DataOutputStream( data.fosToTempFile ); data.firstRead = true; } catch ( IOException e ) { throw new KettleFileException( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToCreateTemporaryFile" ), e ); } Object[] oldest = data.bufferList.get( 0 ); data.inputRowMeta.writeData( data.dosToTempFile, oldest ); data.bufferList.remove( 0 ); data.rowsOnFile++; } } GroupBy( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void batchComplete(); GroupByMeta getMeta(); }
@Test public void testNormilizeAllocation() throws KettleException { Assert.assertEquals( 3, meta.getFileName().length ); Assert.assertEquals( 3, meta.getFileMask().length ); Assert.assertEquals( 3, meta.getExcludeFileMask().length ); Assert.assertEquals( 3, meta.getFileRequired().length ); Assert.assertEquals( 3, meta.getIncludeSubFolders().length ); Assert.assertEquals( 4, meta.getSheetName().length ); Assert.assertEquals( 4, meta.getStartRow().length ); Assert.assertEquals( 4, meta.getStartColumn().length ); Assert.assertArrayEquals( new String[] { "1", "2", "3" }, meta.getFileName() ); Assert.assertArrayEquals( new String[] { "1", "2", "3", "4" }, meta.getSheetName() ); for ( String str : meta.getFileMask() ) { Assert.assertEquals( null, str ); } for ( String str : meta.getExcludeFileMask() ) { Assert.assertEquals( null, str ); } for ( String str : meta.getFileRequired() ) { Assert.assertEquals( null, str ); } for ( String str : meta.getIncludeSubFolders() ) { Assert.assertEquals( null, str ); } for ( int itr : meta.getStartRow() ) { Assert.assertEquals( 0, itr ); } for ( int itr : meta.getStartColumn() ) { Assert.assertEquals( 0, itr ); } }
public void normilizeAllocation() { int nrfiles = 0; int nrsheets = 0; if ( fileName != null ) { nrfiles = fileName.length; } else { fileName = new String[ 0 ]; } if ( sheetName != null ) { nrsheets = sheetName.length; } else { sheetName = new String[ 0 ]; } if ( field == null ) { field = new ExcelInputField[ 0 ]; } fileMask = normilizeArray( fileMask, nrfiles ); excludeFileMask = normilizeArray( excludeFileMask, nrfiles ); fileRequired = normilizeArray( fileRequired, nrfiles ); includeSubFolders = normilizeArray( includeSubFolders, nrfiles ); startRow = normilizeArray( startRow, nrsheets ); startColumn = normilizeArray( startColumn, nrsheets ); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { public void normilizeAllocation() { int nrfiles = 0; int nrsheets = 0; if ( fileName != null ) { nrfiles = fileName.length; } else { fileName = new String[ 0 ]; } if ( sheetName != null ) { nrsheets = sheetName.length; } else { sheetName = new String[ 0 ]; } if ( field == null ) { field = new ExcelInputField[ 0 ]; } fileMask = normilizeArray( fileMask, nrfiles ); excludeFileMask = normilizeArray( excludeFileMask, nrfiles ); fileRequired = normilizeArray( fileRequired, nrfiles ); includeSubFolders = normilizeArray( includeSubFolders, nrfiles ); startRow = normilizeArray( startRow, nrsheets ); startColumn = normilizeArray( startColumn, nrsheets ); } }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { public void normilizeAllocation() { int nrfiles = 0; int nrsheets = 0; if ( fileName != null ) { nrfiles = fileName.length; } else { fileName = new String[ 0 ]; } if ( sheetName != null ) { nrsheets = sheetName.length; } else { sheetName = new String[ 0 ]; } if ( field == null ) { field = new ExcelInputField[ 0 ]; } fileMask = normilizeArray( fileMask, nrfiles ); excludeFileMask = normilizeArray( excludeFileMask, nrfiles ); fileRequired = normilizeArray( fileRequired, nrfiles ); includeSubFolders = normilizeArray( includeSubFolders, nrfiles ); startRow = normilizeArray( startRow, nrsheets ); startColumn = normilizeArray( startColumn, nrsheets ); } ExcelInputMeta(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { public void normilizeAllocation() { int nrfiles = 0; int nrsheets = 0; if ( fileName != null ) { nrfiles = fileName.length; } else { fileName = new String[ 0 ]; } if ( sheetName != null ) { nrsheets = sheetName.length; } else { sheetName = new String[ 0 ]; } if ( field == null ) { field = new ExcelInputField[ 0 ]; } fileMask = normilizeArray( fileMask, nrfiles ); excludeFileMask = normilizeArray( excludeFileMask, nrfiles ); fileRequired = normilizeArray( fileRequired, nrfiles ); includeSubFolders = normilizeArray( includeSubFolders, nrfiles ); startRow = normilizeArray( startRow, nrsheets ); startColumn = normilizeArray( startColumn, nrsheets ); } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { public void normilizeAllocation() { int nrfiles = 0; int nrsheets = 0; if ( fileName != null ) { nrfiles = fileName.length; } else { fileName = new String[ 0 ]; } if ( sheetName != null ) { nrsheets = sheetName.length; } else { sheetName = new String[ 0 ]; } if ( field == null ) { field = new ExcelInputField[ 0 ]; } fileMask = normilizeArray( fileMask, nrfiles ); excludeFileMask = normilizeArray( excludeFileMask, nrfiles ); fileRequired = normilizeArray( fileRequired, nrfiles ); includeSubFolders = normilizeArray( includeSubFolders, nrfiles ); startRow = normilizeArray( startRow, nrsheets ); startColumn = normilizeArray( startColumn, nrsheets ); } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); static final String[] RequiredFilesDesc; static final String[] RequiredFilesCode; static final int TYPE_TRIM_NONE; static final int TYPE_TRIM_LEFT; static final int TYPE_TRIM_RIGHT; static final int TYPE_TRIM_BOTH; static final String[] type_trim_code; static final String[] type_trim_desc; static final String STRING_SEPARATOR; }
@Test public void testGetXML() throws KettleException { Assert.assertEquals( " <header>N</header>" + SystemUtils.LINE_SEPARATOR + " <noempty>N</noempty>" + SystemUtils.LINE_SEPARATOR + " <stoponempty>N</stoponempty>" + SystemUtils.LINE_SEPARATOR + " <filefield/>" + SystemUtils.LINE_SEPARATOR + " <sheetfield/>" + SystemUtils.LINE_SEPARATOR + " <sheetrownumfield/>" + SystemUtils.LINE_SEPARATOR + " <rownumfield/>" + SystemUtils.LINE_SEPARATOR + " <sheetfield/>" + SystemUtils.LINE_SEPARATOR + " <filefield/>" + SystemUtils.LINE_SEPARATOR + " <limit>0</limit>" + SystemUtils.LINE_SEPARATOR + " <encoding/>" + SystemUtils.LINE_SEPARATOR + " <add_to_result_filenames>N</add_to_result_filenames>" + SystemUtils.LINE_SEPARATOR + " <accept_filenames>N</accept_filenames>" + SystemUtils.LINE_SEPARATOR + " <accept_field/>" + SystemUtils.LINE_SEPARATOR + " <accept_stepname/>" + SystemUtils.LINE_SEPARATOR + " <file>" + SystemUtils.LINE_SEPARATOR + " <name>1</name>" + SystemUtils.LINE_SEPARATOR + " <filemask/>" + SystemUtils.LINE_SEPARATOR + " <exclude_filemask/>" + SystemUtils.LINE_SEPARATOR + " <file_required/>" + SystemUtils.LINE_SEPARATOR + " <include_subfolders/>" + SystemUtils.LINE_SEPARATOR + " <name>2</name>" + SystemUtils.LINE_SEPARATOR + " <filemask/>" + SystemUtils.LINE_SEPARATOR + " <exclude_filemask/>" + SystemUtils.LINE_SEPARATOR + " <file_required/>" + SystemUtils.LINE_SEPARATOR + " <include_subfolders/>" + SystemUtils.LINE_SEPARATOR + " <name>3</name>" + SystemUtils.LINE_SEPARATOR + " <filemask/>" + SystemUtils.LINE_SEPARATOR + " <exclude_filemask/>" + SystemUtils.LINE_SEPARATOR + " <file_required/>" + SystemUtils.LINE_SEPARATOR + " <include_subfolders/>" + SystemUtils.LINE_SEPARATOR + " </file>" + SystemUtils.LINE_SEPARATOR + " <fields>" + SystemUtils.LINE_SEPARATOR + " <field>" + SystemUtils.LINE_SEPARATOR + " <name>1</name>" + SystemUtils.LINE_SEPARATOR + " <type>String</type>" + SystemUtils.LINE_SEPARATOR + " <length>1</length>" + SystemUtils.LINE_SEPARATOR + " <precision>-1</precision>" + SystemUtils.LINE_SEPARATOR + " <trim_type>none</trim_type>" + SystemUtils.LINE_SEPARATOR + " <repeat>N</repeat>" + SystemUtils.LINE_SEPARATOR + " <format/>" + SystemUtils.LINE_SEPARATOR + " <currency/>" + SystemUtils.LINE_SEPARATOR + " <decimal/>" + SystemUtils.LINE_SEPARATOR + " <group/>" + SystemUtils.LINE_SEPARATOR + " </field>" + SystemUtils.LINE_SEPARATOR + " <field>" + SystemUtils.LINE_SEPARATOR + " <name>2</name>" + SystemUtils.LINE_SEPARATOR + " <type>String</type>" + SystemUtils.LINE_SEPARATOR + " <length>2</length>" + SystemUtils.LINE_SEPARATOR + " <precision>-1</precision>" + SystemUtils.LINE_SEPARATOR + " <trim_type>none</trim_type>" + SystemUtils.LINE_SEPARATOR + " <repeat>N</repeat>" + SystemUtils.LINE_SEPARATOR + " <format/>" + SystemUtils.LINE_SEPARATOR + " <currency/>" + SystemUtils.LINE_SEPARATOR + " <decimal/>" + SystemUtils.LINE_SEPARATOR + " <group/>" + SystemUtils.LINE_SEPARATOR + " </field>" + SystemUtils.LINE_SEPARATOR + " </fields>" + SystemUtils.LINE_SEPARATOR + " <sheets>" + SystemUtils.LINE_SEPARATOR + " <sheet>" + SystemUtils.LINE_SEPARATOR + " <name>1</name>" + SystemUtils.LINE_SEPARATOR + " <startrow>0</startrow>" + SystemUtils.LINE_SEPARATOR + " <startcol>0</startcol>" + SystemUtils.LINE_SEPARATOR + " </sheet>" + SystemUtils.LINE_SEPARATOR + " <sheet>" + SystemUtils.LINE_SEPARATOR + " <name>2</name>" + SystemUtils.LINE_SEPARATOR + " <startrow>0</startrow>" + SystemUtils.LINE_SEPARATOR + " <startcol>0</startcol>" + SystemUtils.LINE_SEPARATOR + " </sheet>" + SystemUtils.LINE_SEPARATOR + " <sheet>" + SystemUtils.LINE_SEPARATOR + " <name>3</name>" + SystemUtils.LINE_SEPARATOR + " <startrow>0</startrow>" + SystemUtils.LINE_SEPARATOR + " <startcol>0</startcol>" + SystemUtils.LINE_SEPARATOR + " </sheet>" + SystemUtils.LINE_SEPARATOR + " <sheet>" + SystemUtils.LINE_SEPARATOR + " <name>4</name>" + SystemUtils.LINE_SEPARATOR + " <startrow>0</startrow>" + SystemUtils.LINE_SEPARATOR + " <startcol>0</startcol>" + SystemUtils.LINE_SEPARATOR + " </sheet>" + SystemUtils.LINE_SEPARATOR + " </sheets>" + SystemUtils.LINE_SEPARATOR + " <strict_types>N</strict_types>" + SystemUtils.LINE_SEPARATOR + " <error_ignored>N</error_ignored>" + SystemUtils.LINE_SEPARATOR + " <error_line_skipped>N</error_line_skipped>" + SystemUtils.LINE_SEPARATOR + " <bad_line_files_destination_directory/>" + SystemUtils.LINE_SEPARATOR + " <bad_line_files_extension/>" + SystemUtils.LINE_SEPARATOR + " <error_line_files_destination_directory/>" + SystemUtils.LINE_SEPARATOR + " <error_line_files_extension/>" + SystemUtils.LINE_SEPARATOR + " <line_number_files_destination_directory/>" + SystemUtils.LINE_SEPARATOR + " <line_number_files_extension/>" + SystemUtils.LINE_SEPARATOR + " <shortFileFieldName/>" + SystemUtils.LINE_SEPARATOR + " <pathFieldName/>" + SystemUtils.LINE_SEPARATOR + " <hiddenFieldName/>" + SystemUtils.LINE_SEPARATOR + " <lastModificationTimeFieldName/>" + SystemUtils.LINE_SEPARATOR + " <uriNameFieldName/>" + SystemUtils.LINE_SEPARATOR + " <rootUriNameFieldName/>" + SystemUtils.LINE_SEPARATOR + " <extensionFieldName/>" + SystemUtils.LINE_SEPARATOR + " <sizeFieldName/>" + SystemUtils.LINE_SEPARATOR + " <spreadsheet_type/>" + SystemUtils.LINE_SEPARATOR, meta.getXML() ); }
@Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); normilizeAllocation(); retval.append( " " ).append( XMLHandler.addTagValue( "header", startsWithHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", ignoreEmptyRows ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stoponempty", stopOnEmpty ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetrownumfield", sheetRowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumfield", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : StringUtil.EMPTY_STRING ) ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[ i ] ) ); } retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < field.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field[ i ].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field[ i ].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field[ i ].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field[ i ].getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field[ i ].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field[ i ].isRepeated() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field[ i ].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field[ i ].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field[ i ].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field[ i ].getGroupSymbol() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " <sheets>" ).append( Const.CR ); for ( int i = 0; i < sheetName.length; i++ ) { retval.append( " <sheet>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", sheetName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startrow", startRow[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startcol", startColumn[ i ] ) ); retval.append( " </sheet>" ).append( Const.CR ); } retval.append( " </sheets>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "strict_types", strictTypes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "spreadsheet_type", ( spreadSheetType != null ? spreadSheetType.toString() : StringUtil.EMPTY_STRING ) ) ); return retval.toString(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); normilizeAllocation(); retval.append( " " ).append( XMLHandler.addTagValue( "header", startsWithHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", ignoreEmptyRows ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stoponempty", stopOnEmpty ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetrownumfield", sheetRowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumfield", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : StringUtil.EMPTY_STRING ) ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[ i ] ) ); } retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < field.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field[ i ].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field[ i ].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field[ i ].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field[ i ].getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field[ i ].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field[ i ].isRepeated() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field[ i ].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field[ i ].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field[ i ].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field[ i ].getGroupSymbol() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " <sheets>" ).append( Const.CR ); for ( int i = 0; i < sheetName.length; i++ ) { retval.append( " <sheet>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", sheetName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startrow", startRow[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startcol", startColumn[ i ] ) ); retval.append( " </sheet>" ).append( Const.CR ); } retval.append( " </sheets>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "strict_types", strictTypes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "spreadsheet_type", ( spreadSheetType != null ? spreadSheetType.toString() : StringUtil.EMPTY_STRING ) ) ); return retval.toString(); } }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); normilizeAllocation(); retval.append( " " ).append( XMLHandler.addTagValue( "header", startsWithHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", ignoreEmptyRows ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stoponempty", stopOnEmpty ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetrownumfield", sheetRowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumfield", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : StringUtil.EMPTY_STRING ) ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[ i ] ) ); } retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < field.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field[ i ].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field[ i ].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field[ i ].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field[ i ].getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field[ i ].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field[ i ].isRepeated() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field[ i ].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field[ i ].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field[ i ].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field[ i ].getGroupSymbol() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " <sheets>" ).append( Const.CR ); for ( int i = 0; i < sheetName.length; i++ ) { retval.append( " <sheet>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", sheetName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startrow", startRow[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startcol", startColumn[ i ] ) ); retval.append( " </sheet>" ).append( Const.CR ); } retval.append( " </sheets>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "strict_types", strictTypes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "spreadsheet_type", ( spreadSheetType != null ? spreadSheetType.toString() : StringUtil.EMPTY_STRING ) ) ); return retval.toString(); } ExcelInputMeta(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); normilizeAllocation(); retval.append( " " ).append( XMLHandler.addTagValue( "header", startsWithHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", ignoreEmptyRows ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stoponempty", stopOnEmpty ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetrownumfield", sheetRowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumfield", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : StringUtil.EMPTY_STRING ) ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[ i ] ) ); } retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < field.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field[ i ].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field[ i ].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field[ i ].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field[ i ].getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field[ i ].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field[ i ].isRepeated() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field[ i ].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field[ i ].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field[ i ].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field[ i ].getGroupSymbol() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " <sheets>" ).append( Const.CR ); for ( int i = 0; i < sheetName.length; i++ ) { retval.append( " <sheet>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", sheetName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startrow", startRow[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startcol", startColumn[ i ] ) ); retval.append( " </sheet>" ).append( Const.CR ); } retval.append( " </sheets>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "strict_types", strictTypes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "spreadsheet_type", ( spreadSheetType != null ? spreadSheetType.toString() : StringUtil.EMPTY_STRING ) ) ); return retval.toString(); } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public String getXML() { StringBuilder retval = new StringBuilder( 1024 ); normilizeAllocation(); retval.append( " " ).append( XMLHandler.addTagValue( "header", startsWithHeader ) ); retval.append( " " ).append( XMLHandler.addTagValue( "noempty", ignoreEmptyRows ) ); retval.append( " " ).append( XMLHandler.addTagValue( "stoponempty", stopOnEmpty ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetrownumfield", sheetRowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rownumfield", rowNumberField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sheetfield", sheetField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filefield", fileField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "limit", rowLimit ) ); retval.append( " " ).append( XMLHandler.addTagValue( "encoding", encoding ) ); retval.append( " " + XMLHandler.addTagValue( "add_to_result_filenames", isaddresult ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_filenames", acceptingFilenames ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_field", acceptingField ) ); retval.append( " " ).append( XMLHandler.addTagValue( "accept_stepname", ( acceptingStep != null ? acceptingStep.getName() : StringUtil.EMPTY_STRING ) ) ); retval.append( " <file>" ).append( Const.CR ); for ( int i = 0; i < fileName.length; i++ ) { retval.append( " " ).append( XMLHandler.addTagValue( "name", fileName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "filemask", fileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "exclude_filemask", excludeFileMask[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "file_required", fileRequired[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "include_subfolders", includeSubFolders[ i ] ) ); } retval.append( " </file>" ).append( Const.CR ); retval.append( " <fields>" ).append( Const.CR ); for ( int i = 0; i < field.length; i++ ) { retval.append( " <field>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", field[ i ].getName() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "type", field[ i ].getTypeDesc() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "length", field[ i ].getLength() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "precision", field[ i ].getPrecision() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "trim_type", field[ i ].getTrimTypeCode() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "repeat", field[ i ].isRepeated() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "format", field[ i ].getFormat() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "currency", field[ i ].getCurrencySymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "decimal", field[ i ].getDecimalSymbol() ) ); retval.append( " " ).append( XMLHandler.addTagValue( "group", field[ i ].getGroupSymbol() ) ); retval.append( " </field>" ).append( Const.CR ); } retval.append( " </fields>" ).append( Const.CR ); retval.append( " <sheets>" ).append( Const.CR ); for ( int i = 0; i < sheetName.length; i++ ) { retval.append( " <sheet>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "name", sheetName[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startrow", startRow[ i ] ) ); retval.append( " " ).append( XMLHandler.addTagValue( "startcol", startColumn[ i ] ) ); retval.append( " </sheet>" ).append( Const.CR ); } retval.append( " </sheets>" ).append( Const.CR ); retval.append( " " ).append( XMLHandler.addTagValue( "strict_types", strictTypes ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_ignored", errorIgnored ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_skipped", errorLineSkipped ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_destination_directory", warningFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "bad_line_files_extension", warningFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_destination_directory", errorFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "error_line_files_extension", errorFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_destination_directory", lineNumberFilesDestinationDirectory ) ); retval.append( " " ).append( XMLHandler.addTagValue( "line_number_files_extension", lineNumberFilesExtension ) ); retval.append( " " ).append( XMLHandler.addTagValue( "shortFileFieldName", shortFileFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "pathFieldName", pathFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "hiddenFieldName", hiddenFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "lastModificationTimeFieldName", lastModificationTimeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "uriNameFieldName", uriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "rootUriNameFieldName", rootUriNameFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "extensionFieldName", extensionFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "sizeFieldName", sizeFieldName ) ); retval.append( " " ).append( XMLHandler.addTagValue( "spreadsheet_type", ( spreadSheetType != null ? spreadSheetType.toString() : StringUtil.EMPTY_STRING ) ) ); return retval.toString(); } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); static final String[] RequiredFilesDesc; static final String[] RequiredFilesCode; static final int TYPE_TRIM_NONE; static final int TYPE_TRIM_LEFT; static final int TYPE_TRIM_RIGHT; static final int TYPE_TRIM_BOTH; static final String[] type_trim_code; static final String[] type_trim_desc; static final String STRING_SEPARATOR; }
@Test public void testClone() throws KettleException { ExcelInputMeta clone = (ExcelInputMeta) meta.clone(); Assert.assertEquals( meta.getXML(), clone.getXML() ); }
@Override public Object clone() { ExcelInputMeta retval = (ExcelInputMeta) super.clone(); normilizeAllocation(); int nrfiles = fileName.length; int nrsheets = sheetName.length; int nrfields = field.length; retval.allocate( nrfiles, nrsheets, nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.field[ i ] = (ExcelInputField) field[ i ].clone(); } System.arraycopy( fileName, 0, retval.fileName, 0, nrfiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrfiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrfiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrfiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrfiles ); System.arraycopy( sheetName, 0, retval.sheetName, 0, nrsheets ); System.arraycopy( startColumn, 0, retval.startColumn, 0, nrsheets ); System.arraycopy( startRow, 0, retval.startRow, 0, nrsheets ); return retval; }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public Object clone() { ExcelInputMeta retval = (ExcelInputMeta) super.clone(); normilizeAllocation(); int nrfiles = fileName.length; int nrsheets = sheetName.length; int nrfields = field.length; retval.allocate( nrfiles, nrsheets, nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.field[ i ] = (ExcelInputField) field[ i ].clone(); } System.arraycopy( fileName, 0, retval.fileName, 0, nrfiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrfiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrfiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrfiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrfiles ); System.arraycopy( sheetName, 0, retval.sheetName, 0, nrsheets ); System.arraycopy( startColumn, 0, retval.startColumn, 0, nrsheets ); System.arraycopy( startRow, 0, retval.startRow, 0, nrsheets ); return retval; } }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public Object clone() { ExcelInputMeta retval = (ExcelInputMeta) super.clone(); normilizeAllocation(); int nrfiles = fileName.length; int nrsheets = sheetName.length; int nrfields = field.length; retval.allocate( nrfiles, nrsheets, nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.field[ i ] = (ExcelInputField) field[ i ].clone(); } System.arraycopy( fileName, 0, retval.fileName, 0, nrfiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrfiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrfiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrfiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrfiles ); System.arraycopy( sheetName, 0, retval.sheetName, 0, nrsheets ); System.arraycopy( startColumn, 0, retval.startColumn, 0, nrsheets ); System.arraycopy( startRow, 0, retval.startRow, 0, nrsheets ); return retval; } ExcelInputMeta(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public Object clone() { ExcelInputMeta retval = (ExcelInputMeta) super.clone(); normilizeAllocation(); int nrfiles = fileName.length; int nrsheets = sheetName.length; int nrfields = field.length; retval.allocate( nrfiles, nrsheets, nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.field[ i ] = (ExcelInputField) field[ i ].clone(); } System.arraycopy( fileName, 0, retval.fileName, 0, nrfiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrfiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrfiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrfiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrfiles ); System.arraycopy( sheetName, 0, retval.sheetName, 0, nrsheets ); System.arraycopy( startColumn, 0, retval.startColumn, 0, nrsheets ); System.arraycopy( startRow, 0, retval.startRow, 0, nrsheets ); return retval; } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); }
ExcelInputMeta extends BaseStepMeta implements StepMetaInterface { @Override public Object clone() { ExcelInputMeta retval = (ExcelInputMeta) super.clone(); normilizeAllocation(); int nrfiles = fileName.length; int nrsheets = sheetName.length; int nrfields = field.length; retval.allocate( nrfiles, nrsheets, nrfields ); for ( int i = 0; i < nrfields; i++ ) { retval.field[ i ] = (ExcelInputField) field[ i ].clone(); } System.arraycopy( fileName, 0, retval.fileName, 0, nrfiles ); System.arraycopy( fileMask, 0, retval.fileMask, 0, nrfiles ); System.arraycopy( excludeFileMask, 0, retval.excludeFileMask, 0, nrfiles ); System.arraycopy( fileRequired, 0, retval.fileRequired, 0, nrfiles ); System.arraycopy( includeSubFolders, 0, retval.includeSubFolders, 0, nrfiles ); System.arraycopy( sheetName, 0, retval.sheetName, 0, nrsheets ); System.arraycopy( startColumn, 0, retval.startColumn, 0, nrsheets ); System.arraycopy( startRow, 0, retval.startRow, 0, nrsheets ); return retval; } ExcelInputMeta(); String getShortFileNameField(); void setShortFileNameField( String field ); String getPathField(); void setPathField( String field ); String isHiddenField(); void setIsHiddenField( String field ); String getLastModificationDateField(); void setLastModificationDateField( String field ); String getUriField(); void setUriField( String field ); String getRootUriField(); void setRootUriField( String field ); String getExtensionField(); void setExtensionField( String field ); String getSizeField(); void setSizeField( String field ); ExcelInputField[] getField(); void setField( ExcelInputField[] fields ); String getFileField(); void setFileField( String fileField ); String[] getFileMask(); void setFileMask( String[] fileMask ); @Deprecated String[] getExludeFileMask(); String[] getExcludeFileMask(); void setExcludeFileMask( String[] excludeFileMask ); String[] getIncludeSubFolders(); void setIncludeSubFolders( String[] includeSubFoldersin ); String getRequiredFilesCode( String tt ); String getRequiredFilesDesc( String tt ); String[] getFileName(); void setFileName( String[] fileName ); boolean ignoreEmptyRows(); void setIgnoreEmptyRows( boolean ignoreEmptyRows ); long getRowLimit(); void setRowLimit( long rowLimit ); String getRowNumberField(); void setRowNumberField( String rowNumberField ); String getSheetRowNumberField(); void setSheetRowNumberField( String rowNumberField ); String getSheetField(); void setSheetField( String sheetField ); String[] getSheetName(); void setSheetName( String[] sheetName ); int[] getStartColumn(); void setStartColumn( int[] startColumn ); int[] getStartRow(); void setStartRow( int[] startRow ); boolean startsWithHeader(); void setStartsWithHeader( boolean startsWithHeader ); boolean stopOnEmpty(); void setStopOnEmpty( boolean stopOnEmpty ); @Override void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); @Override Object clone(); String[] normilizeArray( String[] array, int length ); int[] normilizeArray( int[] array, int length ); void normilizeAllocation(); void allocate( int nrfiles, int nrsheets, int nrfields ); void allocateFiles( int nrfiles ); @Override void setDefault(); @Override void getFields( RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String getXML(); @Override void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases ); @Override void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step ); static final int getTrimTypeByCode( String tt ); static final int getTrimTypeByDesc( String tt ); static final String getTrimTypeCode( int i ); static final String getTrimTypeDesc( int i ); String[] getFilePaths( VariableSpace space ); FileInputList getFileList( VariableSpace space ); String getLookupStepname(); @Override void searchInfoAndTargetSteps( List<StepMeta> steps ); String[] getInfoSteps(); @Override void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); RowMetaInterface getEmptyFields(); @Override StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta, Trans trans ); @Override StepDataInterface getStepData(); String getWarningFilesDestinationDirectory(); void setWarningFilesDestinationDirectory( String badLineFilesDestinationDirectory ); String getBadLineFilesExtension(); void setBadLineFilesExtension( String badLineFilesExtension ); boolean isErrorIgnored(); void setErrorIgnored( boolean errorIgnored ); String getErrorFilesDestinationDirectory(); void setErrorFilesDestinationDirectory( String errorLineFilesDestinationDirectory ); String getErrorFilesExtension(); void setErrorFilesExtension( String errorLineFilesExtension ); String getLineNumberFilesDestinationDirectory(); void setLineNumberFilesDestinationDirectory( String lineNumberFilesDestinationDirectory ); String getLineNumberFilesExtension(); void setLineNumberFilesExtension( String lineNumberFilesExtension ); boolean isErrorLineSkipped(); void setErrorLineSkipped( boolean errorLineSkipped ); boolean isStrictTypes(); void setStrictTypes( boolean strictTypes ); String[] getFileRequired(); void setFileRequired( String[] fileRequiredin ); String getAcceptingField(); void setAcceptingField( String acceptingField ); boolean isAcceptingFilenames(); void setAcceptingFilenames( boolean acceptingFilenames ); StepMeta getAcceptingStep(); void setAcceptingStep( StepMeta acceptingStep ); String getAcceptingStepName(); void setAcceptingStepName( String acceptingStepName ); @Override String[] getUsedLibraries(); String getEncoding(); void setEncoding( String encoding ); void setAddResultFile( boolean isaddresult ); boolean isAddResultFile(); boolean readAllSheets(); @Override String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); SpreadSheetType getSpreadSheetType(); void setSpreadSheetType( SpreadSheetType spreadSheetType ); @AfterInjection void afterInjectionSynchronization(); static final String[] RequiredFilesDesc; static final String[] RequiredFilesCode; static final int TYPE_TRIM_NONE; static final int TYPE_TRIM_LEFT; static final int TYPE_TRIM_RIGHT; static final int TYPE_TRIM_BOTH; static final String[] type_trim_code; static final String[] type_trim_desc; static final String STRING_SEPARATOR; }
@Test public void testNullDateCell() throws Exception { final String sheetId = "1"; final String sheetName = "Sheet 1"; XSSFReader reader = mockXSSFReader( sheetId, SHEET_DATE_NO_V, mockSharedStringsTable( "Some Date" ), mockStylesTable( Collections.singletonMap( 2, 165 ), Collections.singletonMap( 165, "M/D/YYYY" ) ) ); StaxPoiSheet spSheet = new StaxPoiSheet( reader, sheetName, sheetId ); KCell cell = spSheet.getRow( 1 )[0]; assertNotNull( cell ); assertEquals( KCellType.DATE, cell.getType() ); cell = spSheet.getRow( 2 )[0]; assertNull( "cell must be null", cell ); }
@Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
@Test public void testReadSameRow() throws Exception { KSheet sheet1 = getSampleSheet(); KCell[] row = sheet1.getRow( 3 ); assertEquals( "Two", row[1].getValue() ); row = sheet1.getRow( 3 ); assertEquals( "Two", row[1].getValue() ); }
@Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
@Test public void testReadRowRA() throws Exception { KSheet sheet1 = getSampleSheet(); KCell[] row = sheet1.getRow( 4 ); assertEquals( "Three", row[1].getValue() ); row = sheet1.getRow( 2 ); assertEquals( "One", row[1].getValue() ); }
@Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
@Test public void testReadEmptyRow() throws Exception { KSheet sheet1 = getSampleSheet(); KCell[] row = sheet1.getRow( 0 ); assertEquals( "empty row expected", 0, row.length ); }
@Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
StaxPoiSheet implements KSheet { @Override public KCell[] getRow( int rownr ) { if ( rownr < 0 || rownr >= numRows ) { throw new ArrayIndexOutOfBoundsException( rownr ); } if ( rownr + 1 < firstRow ) { return new KCell[0]; } if ( rownr > 0 && currentRow == rownr + 1 ) { return currentRowCells; } try { if ( currentRow >= rownr + 1 ) { resetSheetReader(); } while ( sheetReader.hasNext() ) { int event = sheetReader.next(); if ( event == XMLStreamConstants.START_ELEMENT && sheetReader.getLocalName().equals( "row" ) ) { String rowIndicator = sheetReader.getAttributeValue( null, "r" ); currentRow = Integer.parseInt( rowIndicator ); if ( currentRow < rownr + 1 ) { continue; } currentRowCells = parseRow(); return currentRowCells; } } } catch ( Exception e ) { throw new RuntimeException( e ); } numRows = currentRow; return new KCell[] {}; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
@Test public void testReadCells() throws Exception { KSheet sheet = getSampleSheet(); KCell cell = sheet.getCell( 1, 2 ); assertEquals( "One", cell.getValue() ); assertEquals( KCellType.LABEL, cell.getType() ); cell = sheet.getCell( 2, 2 ); assertEquals( KCellType.DATE, cell.getType() ); assertEquals( new Date( 1283817600000L ), cell.getValue() ); cell = sheet.getCell( 1, 3 ); assertEquals( "Two", cell.getValue() ); assertEquals( KCellType.LABEL, cell.getType() ); }
@Override public KCell getCell( int colnr, int rownr ) { if ( rownr == 0 && colnr < headerRow.size() ) { return new StaxPoiCell( headerRow.get( colnr ), rownr ); } KCell[] row = getRow( rownr ); if ( row != null && rownr < row.length ) { return row[colnr]; } return null; }
StaxPoiSheet implements KSheet { @Override public KCell getCell( int colnr, int rownr ) { if ( rownr == 0 && colnr < headerRow.size() ) { return new StaxPoiCell( headerRow.get( colnr ), rownr ); } KCell[] row = getRow( rownr ); if ( row != null && rownr < row.length ) { return row[colnr]; } return null; } }
StaxPoiSheet implements KSheet { @Override public KCell getCell( int colnr, int rownr ) { if ( rownr == 0 && colnr < headerRow.size() ) { return new StaxPoiCell( headerRow.get( colnr ), rownr ); } KCell[] row = getRow( rownr ); if ( row != null && rownr < row.length ) { return row[colnr]; } return null; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); }
StaxPoiSheet implements KSheet { @Override public KCell getCell( int colnr, int rownr ) { if ( rownr == 0 && colnr < headerRow.size() ) { return new StaxPoiCell( headerRow.get( colnr ), rownr ); } KCell[] row = getRow( rownr ); if ( row != null && rownr < row.length ) { return row[colnr]; } return null; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
StaxPoiSheet implements KSheet { @Override public KCell getCell( int colnr, int rownr ) { if ( rownr == 0 && colnr < headerRow.size() ) { return new StaxPoiCell( headerRow.get( colnr ), rownr ); } KCell[] row = getRow( rownr ); if ( row != null && rownr < row.length ) { return row[colnr]; } return null; } StaxPoiSheet( XSSFReader reader, String sheetName, String sheetID ); @Override KCell[] getRow( int rownr ); @Override String getName(); @Override int getRows(); @Override KCell getCell( int colnr, int rownr ); void close(); }
@Test public void cloning() throws Exception { StepMeta meta = createTestMeta(); StepMeta clone = (StepMeta) meta.clone(); assertEquals( meta, clone ); }
@Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; }
StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface, CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface, AttributesInterface, BaseMeta { @Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; } }
StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface, CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface, AttributesInterface, BaseMeta { @Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; } StepMeta( String stepid, String stepname, StepMetaInterface stepMetaInterface ); StepMeta( String stepname, StepMetaInterface stepMetaInterface ); StepMeta(); @Deprecated StepMeta( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ); StepMeta( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); StepMeta( ObjectId id_step ); }
StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface, CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface, AttributesInterface, BaseMeta { @Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; } StepMeta( String stepid, String stepname, StepMetaInterface stepMetaInterface ); StepMeta( String stepname, StepMetaInterface stepMetaInterface ); StepMeta(); @Deprecated StepMeta( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ); StepMeta( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); StepMeta( ObjectId id_step ); @Override String getXML(); String getXML( boolean includeInterface ); void setClusterSchemaAfterLoading( List<ClusterSchema> clusterSchemas ); static StepMeta fromXml( String metaXml ); @Override ObjectId getObjectId(); void setObjectId( ObjectId id ); boolean isDrawn(); boolean isDrawStep(); void setDraw( boolean draw ); void setCopies( int c ); int getCopies(); void drawStep(); void hideStep(); @Override boolean equals( Object obj ); @Override int hashCode(); @Override int compareTo( StepMeta o ); boolean hasChanged(); void setChanged( boolean ch ); void setChanged(); boolean chosesTargetSteps(); @Override Object clone(); void replaceMeta( StepMeta stepMeta ); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); String getStepID(); @Override String getName(); void setName( String sname ); @Override String getDescription(); void setDescription( String description ); @Override void setSelected( boolean sel ); void flipSelected(); @Override boolean isSelected(); void setTerminator(); void setTerminator( boolean t ); boolean hasTerminator(); @Override void setLocation( int x, int y ); @Override void setLocation( Point loc ); @Override Point getLocation(); @Deprecated void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ); @SuppressWarnings( "deprecation" ) void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String toString(); boolean isPartitioned(); boolean isTargetPartitioned(); StepPartitioningMeta getStepPartitioningMeta(); void setStepPartitioningMeta( StepPartitioningMeta stepPartitioningMeta ); ClusterSchema getClusterSchema(); void setClusterSchema( ClusterSchema clusterSchema ); boolean isDistributes(); void setDistributes( boolean distributes ); StepErrorMeta getStepErrorMeta(); void setStepErrorMeta( StepErrorMeta stepErrorMeta ); static final StepMeta findStep( List<StepMeta> steps, ObjectId id ); static final StepMeta findStep( List<StepMeta> steps, String stepname ); boolean supportsErrorHandling(); boolean isDoingErrorHandling(); boolean isSendingErrorRowsToStep( StepMeta targetStep ); @Override String getTypeId(); boolean isMapping(); boolean isSingleThreader(); boolean isEtlMetaInject(); boolean isJobExecutor(); boolean isMappingInput(); boolean isMappingOutput(); List<ResourceReference> getResourceDependencies( TransMeta transMeta ); @Deprecated String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository ); @Override @SuppressWarnings( "deprecation" ) String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); List<RemoteStep> getRemoteInputSteps(); void setRemoteInputSteps( List<RemoteStep> remoteInputSteps ); List<RemoteStep> getRemoteOutputSteps(); void setRemoteOutputSteps( List<RemoteStep> remoteOutputSteps ); StepPartitioningMeta getTargetStepPartitioningMeta(); void setTargetStepPartitioningMeta( StepPartitioningMeta targetStepPartitioningMeta ); boolean isRepartitioning(); @Override String getHolderType(); boolean isClustered(); void setStepID( String stepid ); void setClusterSchemaName( String clusterSchemaName ); void setParentTransMeta( TransMeta parentTransMeta ); TransMeta getParentTransMeta(); RowDistributionInterface getRowDistribution(); void setRowDistribution( RowDistributionInterface rowDistribution ); String getCopiesString(); void setCopiesString( String copiesString ); @Override void setAttributesMap( Map<String, Map<String, String>> attributesMap ); @Override Map<String, Map<String, String>> getAttributesMap(); @Override void setAttribute( String groupName, String key, String value ); @Override void setAttributes( String groupName, Map<String, String> attributes ); @Override Map<String, String> getAttributes( String groupName ); @Override String getAttribute( String groupName, String key ); boolean isMissing(); }
StepMeta extends SharedObjectBase implements Cloneable, Comparable<StepMeta>, GUIPositionInterface, SharedObjectInterface, CheckResultSourceInterface, ResourceExportInterface, ResourceHolderInterface, AttributesInterface, BaseMeta { @Override public Object clone() { StepMeta stepMeta = new StepMeta(); stepMeta.replaceMeta( this ); stepMeta.setObjectId( null ); return stepMeta; } StepMeta( String stepid, String stepname, StepMetaInterface stepMetaInterface ); StepMeta( String stepname, StepMetaInterface stepMetaInterface ); StepMeta(); @Deprecated StepMeta( Node stepnode, List<DatabaseMeta> databases, Map<String, Counter> counters ); StepMeta( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ); StepMeta( ObjectId id_step ); @Override String getXML(); String getXML( boolean includeInterface ); void setClusterSchemaAfterLoading( List<ClusterSchema> clusterSchemas ); static StepMeta fromXml( String metaXml ); @Override ObjectId getObjectId(); void setObjectId( ObjectId id ); boolean isDrawn(); boolean isDrawStep(); void setDraw( boolean draw ); void setCopies( int c ); int getCopies(); void drawStep(); void hideStep(); @Override boolean equals( Object obj ); @Override int hashCode(); @Override int compareTo( StepMeta o ); boolean hasChanged(); void setChanged( boolean ch ); void setChanged(); boolean chosesTargetSteps(); @Override Object clone(); void replaceMeta( StepMeta stepMeta ); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); String getStepID(); @Override String getName(); void setName( String sname ); @Override String getDescription(); void setDescription( String description ); @Override void setSelected( boolean sel ); void flipSelected(); @Override boolean isSelected(); void setTerminator(); void setTerminator( boolean t ); boolean hasTerminator(); @Override void setLocation( int x, int y ); @Override void setLocation( Point loc ); @Override Point getLocation(); @Deprecated void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info ); @SuppressWarnings( "deprecation" ) void check( List<CheckResultInterface> remarks, TransMeta transMeta, RowMetaInterface prev, String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository, IMetaStore metaStore ); @Override String toString(); boolean isPartitioned(); boolean isTargetPartitioned(); StepPartitioningMeta getStepPartitioningMeta(); void setStepPartitioningMeta( StepPartitioningMeta stepPartitioningMeta ); ClusterSchema getClusterSchema(); void setClusterSchema( ClusterSchema clusterSchema ); boolean isDistributes(); void setDistributes( boolean distributes ); StepErrorMeta getStepErrorMeta(); void setStepErrorMeta( StepErrorMeta stepErrorMeta ); static final StepMeta findStep( List<StepMeta> steps, ObjectId id ); static final StepMeta findStep( List<StepMeta> steps, String stepname ); boolean supportsErrorHandling(); boolean isDoingErrorHandling(); boolean isSendingErrorRowsToStep( StepMeta targetStep ); @Override String getTypeId(); boolean isMapping(); boolean isSingleThreader(); boolean isEtlMetaInject(); boolean isJobExecutor(); boolean isMappingInput(); boolean isMappingOutput(); List<ResourceReference> getResourceDependencies( TransMeta transMeta ); @Deprecated String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository ); @Override @SuppressWarnings( "deprecation" ) String exportResources( VariableSpace space, Map<String, ResourceDefinition> definitions, ResourceNamingInterface resourceNamingInterface, Repository repository, IMetaStore metaStore ); List<RemoteStep> getRemoteInputSteps(); void setRemoteInputSteps( List<RemoteStep> remoteInputSteps ); List<RemoteStep> getRemoteOutputSteps(); void setRemoteOutputSteps( List<RemoteStep> remoteOutputSteps ); StepPartitioningMeta getTargetStepPartitioningMeta(); void setTargetStepPartitioningMeta( StepPartitioningMeta targetStepPartitioningMeta ); boolean isRepartitioning(); @Override String getHolderType(); boolean isClustered(); void setStepID( String stepid ); void setClusterSchemaName( String clusterSchemaName ); void setParentTransMeta( TransMeta parentTransMeta ); TransMeta getParentTransMeta(); RowDistributionInterface getRowDistribution(); void setRowDistribution( RowDistributionInterface rowDistribution ); String getCopiesString(); void setCopiesString( String copiesString ); @Override void setAttributesMap( Map<String, Map<String, String>> attributesMap ); @Override Map<String, Map<String, String>> getAttributesMap(); @Override void setAttribute( String groupName, String key, String value ); @Override void setAttributes( String groupName, Map<String, String> attributes ); @Override Map<String, String> getAttributes( String groupName ); @Override String getAttribute( String groupName, String key ); boolean isMissing(); static final String XML_TAG; static final String STRING_ID_MAPPING; static final String STRING_ID_SINGLE_THREADER; static final String STRING_ID_ETL_META_INJECT; static final String STRING_ID_JOB_EXECUTOR; static final String STRING_ID_MAPPING_INPUT; static final String STRING_ID_MAPPING_OUTPUT; }
@Test public void getContextPath() throws Exception { GetPropertiesServlet servlet = new GetPropertiesServlet(); servlet.setJettyMode( true ); HttpServletRequest mockHttpServletRequest = mock( HttpServletRequest.class ); HttpServletResponse mockHttpServletResponse = mock( HttpServletResponse.class ); StringWriter out = new StringWriter(); PrintWriter printWriter = new PrintWriter( out ); when( mockHttpServletRequest.getContextPath() ).thenReturn( GetPropertiesServlet.CONTEXT_PATH ); when( mockHttpServletRequest.getParameter( "xml" ) ).thenReturn( "Y" ); when( mockHttpServletResponse.getWriter() ).thenReturn( printWriter ); when( mockHttpServletResponse.getOutputStream() ).thenReturn( new ServletOutputStream() { private ByteArrayOutputStream baos = new ByteArrayOutputStream(); @Override public void write( int b ) throws IOException { baos.write( b ); } public String toString() { return baos.toString(); } } ); servlet.doGet( mockHttpServletRequest, mockHttpServletResponse ); Assert.assertFalse( mockHttpServletResponse.getOutputStream().toString() .startsWith( "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" ) ); }
@Override public String getContextPath() { return CONTEXT_PATH; }
GetPropertiesServlet extends BodyHttpServlet { @Override public String getContextPath() { return CONTEXT_PATH; } }
GetPropertiesServlet extends BodyHttpServlet { @Override public String getContextPath() { return CONTEXT_PATH; } }
GetPropertiesServlet extends BodyHttpServlet { @Override public String getContextPath() { return CONTEXT_PATH; } @Override String getContextPath(); }
GetPropertiesServlet extends BodyHttpServlet { @Override public String getContextPath() { return CONTEXT_PATH; } @Override String getContextPath(); static final String CONTEXT_PATH; }
@Test public void testGetErrorRowMeta() { VariableSpace vars = new Variables(); vars.setVariable( "VarNumberErrors", "nbrErrors" ); vars.setVariable( "VarErrorDescription", "errorDescription" ); vars.setVariable( "VarErrorFields", "errorFields" ); vars.setVariable( "VarErrorCodes", "errorCodes" ); StepErrorMeta testObject = new StepErrorMeta( vars, new StepMeta(), new StepMeta(), "${VarNumberErrors}", "${VarErrorDescription}", "${VarErrorFields}", "${VarErrorCodes}" ); RowMetaInterface result = testObject.getErrorRowMeta( 10, "some data was bad", "factId", "BAD131" ); assertNotNull( result ); assertEquals( 4, result.size() ); assertEquals( ValueMetaInterface.TYPE_INTEGER, result.getValueMeta( 0 ).getType() ); assertEquals( "nbrErrors", result.getValueMeta( 0 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, result.getValueMeta( 1 ).getType() ); assertEquals( "errorDescription", result.getValueMeta( 1 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, result.getValueMeta( 2 ).getType() ); assertEquals( "errorFields", result.getValueMeta( 2 ).getName() ); assertEquals( ValueMetaInterface.TYPE_STRING, result.getValueMeta( 3 ).getType() ); assertEquals( "errorCodes", result.getValueMeta( 3 ).getName() ); }
public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ) { RowMetaInterface row = new RowMeta(); String nrErr = variables.environmentSubstitute( getNrErrorsValuename() ); if ( !Utils.isEmpty( nrErr ) ) { ValueMetaInterface v = new ValueMetaInteger( nrErr ); v.setLength( 3 ); row.addValueMeta( v ); } String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() ); if ( !Utils.isEmpty( errDesc ) ) { ValueMetaInterface v = new ValueMetaString( errDesc ); row.addValueMeta( v ); } String errFields = variables.environmentSubstitute( getErrorFieldsValuename() ); if ( !Utils.isEmpty( errFields ) ) { ValueMetaInterface v = new ValueMetaString( errFields ); row.addValueMeta( v ); } String errCodes = variables.environmentSubstitute( getErrorCodesValuename() ); if ( !Utils.isEmpty( errCodes ) ) { ValueMetaInterface v = new ValueMetaString( errCodes ); row.addValueMeta( v ); } return row; }
StepErrorMeta extends ChangedFlag implements XMLInterface, Cloneable { public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ) { RowMetaInterface row = new RowMeta(); String nrErr = variables.environmentSubstitute( getNrErrorsValuename() ); if ( !Utils.isEmpty( nrErr ) ) { ValueMetaInterface v = new ValueMetaInteger( nrErr ); v.setLength( 3 ); row.addValueMeta( v ); } String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() ); if ( !Utils.isEmpty( errDesc ) ) { ValueMetaInterface v = new ValueMetaString( errDesc ); row.addValueMeta( v ); } String errFields = variables.environmentSubstitute( getErrorFieldsValuename() ); if ( !Utils.isEmpty( errFields ) ) { ValueMetaInterface v = new ValueMetaString( errFields ); row.addValueMeta( v ); } String errCodes = variables.environmentSubstitute( getErrorCodesValuename() ); if ( !Utils.isEmpty( errCodes ) ) { ValueMetaInterface v = new ValueMetaString( errCodes ); row.addValueMeta( v ); } return row; } }
StepErrorMeta extends ChangedFlag implements XMLInterface, Cloneable { public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ) { RowMetaInterface row = new RowMeta(); String nrErr = variables.environmentSubstitute( getNrErrorsValuename() ); if ( !Utils.isEmpty( nrErr ) ) { ValueMetaInterface v = new ValueMetaInteger( nrErr ); v.setLength( 3 ); row.addValueMeta( v ); } String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() ); if ( !Utils.isEmpty( errDesc ) ) { ValueMetaInterface v = new ValueMetaString( errDesc ); row.addValueMeta( v ); } String errFields = variables.environmentSubstitute( getErrorFieldsValuename() ); if ( !Utils.isEmpty( errFields ) ) { ValueMetaInterface v = new ValueMetaString( errFields ); row.addValueMeta( v ); } String errCodes = variables.environmentSubstitute( getErrorCodesValuename() ); if ( !Utils.isEmpty( errCodes ) ) { ValueMetaInterface v = new ValueMetaString( errCodes ); row.addValueMeta( v ); } return row; } StepErrorMeta( VariableSpace space, StepMeta sourceStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep, String nrErrorsValuename, String errorDescriptionsValuename, String errorFieldsValuename, String errorCodesValuename ); StepErrorMeta( VariableSpace variables, Node node, List<StepMeta> steps ); }
StepErrorMeta extends ChangedFlag implements XMLInterface, Cloneable { public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ) { RowMetaInterface row = new RowMeta(); String nrErr = variables.environmentSubstitute( getNrErrorsValuename() ); if ( !Utils.isEmpty( nrErr ) ) { ValueMetaInterface v = new ValueMetaInteger( nrErr ); v.setLength( 3 ); row.addValueMeta( v ); } String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() ); if ( !Utils.isEmpty( errDesc ) ) { ValueMetaInterface v = new ValueMetaString( errDesc ); row.addValueMeta( v ); } String errFields = variables.environmentSubstitute( getErrorFieldsValuename() ); if ( !Utils.isEmpty( errFields ) ) { ValueMetaInterface v = new ValueMetaString( errFields ); row.addValueMeta( v ); } String errCodes = variables.environmentSubstitute( getErrorCodesValuename() ); if ( !Utils.isEmpty( errCodes ) ) { ValueMetaInterface v = new ValueMetaString( errCodes ); row.addValueMeta( v ); } return row; } StepErrorMeta( VariableSpace space, StepMeta sourceStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep, String nrErrorsValuename, String errorDescriptionsValuename, String errorFieldsValuename, String errorCodesValuename ); StepErrorMeta( VariableSpace variables, Node node, List<StepMeta> steps ); @Override StepErrorMeta clone(); @Override String getXML(); String getErrorCodesValuename(); void setErrorCodesValuename( String errorCodesValuename ); String getErrorDescriptionsValuename(); void setErrorDescriptionsValuename( String errorDescriptionsValuename ); String getErrorFieldsValuename(); void setErrorFieldsValuename( String errorFieldsValuename ); String getNrErrorsValuename(); void setNrErrorsValuename( String nrErrorsValuename ); StepMeta getTargetStep(); void setTargetStep( StepMeta targetStep ); StepMeta getSourceStep(); void setSourceStep( StepMeta sourceStep ); boolean isEnabled(); void setEnabled( boolean enabled ); RowMetaInterface getErrorFields(); RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); void addErrorRowData( Object[] row, int startIndex, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); String getMaxErrors(); void setMaxErrors( String maxErrors ); String getMaxPercentErrors(); void setMaxPercentErrors( String maxPercentErrors ); String getMinPercentRows(); void setMinPercentRows( String minRowsForPercent ); }
StepErrorMeta extends ChangedFlag implements XMLInterface, Cloneable { public RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ) { RowMetaInterface row = new RowMeta(); String nrErr = variables.environmentSubstitute( getNrErrorsValuename() ); if ( !Utils.isEmpty( nrErr ) ) { ValueMetaInterface v = new ValueMetaInteger( nrErr ); v.setLength( 3 ); row.addValueMeta( v ); } String errDesc = variables.environmentSubstitute( getErrorDescriptionsValuename() ); if ( !Utils.isEmpty( errDesc ) ) { ValueMetaInterface v = new ValueMetaString( errDesc ); row.addValueMeta( v ); } String errFields = variables.environmentSubstitute( getErrorFieldsValuename() ); if ( !Utils.isEmpty( errFields ) ) { ValueMetaInterface v = new ValueMetaString( errFields ); row.addValueMeta( v ); } String errCodes = variables.environmentSubstitute( getErrorCodesValuename() ); if ( !Utils.isEmpty( errCodes ) ) { ValueMetaInterface v = new ValueMetaString( errCodes ); row.addValueMeta( v ); } return row; } StepErrorMeta( VariableSpace space, StepMeta sourceStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep ); StepErrorMeta( VariableSpace space, StepMeta sourceStep, StepMeta targetStep, String nrErrorsValuename, String errorDescriptionsValuename, String errorFieldsValuename, String errorCodesValuename ); StepErrorMeta( VariableSpace variables, Node node, List<StepMeta> steps ); @Override StepErrorMeta clone(); @Override String getXML(); String getErrorCodesValuename(); void setErrorCodesValuename( String errorCodesValuename ); String getErrorDescriptionsValuename(); void setErrorDescriptionsValuename( String errorDescriptionsValuename ); String getErrorFieldsValuename(); void setErrorFieldsValuename( String errorFieldsValuename ); String getNrErrorsValuename(); void setNrErrorsValuename( String nrErrorsValuename ); StepMeta getTargetStep(); void setTargetStep( StepMeta targetStep ); StepMeta getSourceStep(); void setSourceStep( StepMeta sourceStep ); boolean isEnabled(); void setEnabled( boolean enabled ); RowMetaInterface getErrorFields(); RowMetaInterface getErrorRowMeta( long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); void addErrorRowData( Object[] row, int startIndex, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); String getMaxErrors(); void setMaxErrors( String maxErrors ); String getMaxPercentErrors(); void setMaxPercentErrors( String maxPercentErrors ); String getMinPercentRows(); void setMinPercentRows( String minRowsForPercent ); static final String XML_ERROR_TAG; static final String XML_SOURCE_STEP_TAG; static final String XML_TARGET_STEP_TAG; }
@Test public void testBaseStepGetLogLevelWontThrowNPEWithNullLog() { when( mockHelper.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) ).thenAnswer( new Answer<LogChannelInterface>() { @Override public LogChannelInterface answer( InvocationOnMock invocation ) throws Throwable { ( (BaseStep) invocation.getArguments()[ 0 ] ).getLogLevel(); return mockHelper.logChannelInterface; } } ); new BaseStep( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ) .getLogLevel(); }
@Override public LogLevel getLogLevel() { return log != null ? log.getLogLevel() : null; }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public LogLevel getLogLevel() { return log != null ? log.getLogLevel() : null; } }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public LogLevel getLogLevel() { return log != null ? log.getLogLevel() : null; } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public LogLevel getLogLevel() { return log != null ? log.getLogLevel() : null; } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public LogLevel getLogLevel() { return log != null ? log.getLogLevel() : null; } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); @Deprecated public long linesRead; @Deprecated public long linesWritten; @Deprecated public long linesInput; @Deprecated public long linesOutput; @Deprecated public long linesUpdated; @Deprecated public long linesSkipped; @Deprecated public long linesRejected; public boolean first; public boolean terminator; public List<Object[]> terminator_rows; }
@Test public void testBuildLog() throws KettleValueException { BaseStep testObject = new BaseStep( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); Date startDate = new Date( (long) 123 ); Date endDate = new Date( (long) 125 ); RowMetaAndData result = testObject.buildLog( "myStepName", 13, 123, 234, 345, 456, 567, startDate, endDate ); assertNotNull( result ); assertEquals( 9, result.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, result.getValueMeta( 0 ).getType() ); assertEquals( "myStepName", result.getString( 0, "default" ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 1 ).getType() ); assertEquals( new Double( 13.0 ), Double.valueOf( result.getNumber( 1, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 2 ).getType() ); assertEquals( new Double( 123 ), Double.valueOf( result.getNumber( 2, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 3 ).getType() ); assertEquals( new Double( 234 ), Double.valueOf( result.getNumber( 3, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 4 ).getType() ); assertEquals( new Double( 345 ), Double.valueOf( result.getNumber( 4, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 5 ).getType() ); assertEquals( new Double( 456 ), Double.valueOf( result.getNumber( 5, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_NUMBER, result.getValueMeta( 6 ).getType() ); assertEquals( new Double( 567 ), Double.valueOf( result.getNumber( 6, 0.1 ) ) ); assertEquals( ValueMetaInterface.TYPE_DATE, result.getValueMeta( 7 ).getType() ); assertEquals( startDate, result.getDate( 7, Calendar.getInstance().getTime() ) ); assertEquals( ValueMetaInterface.TYPE_DATE, result.getValueMeta( 8 ).getType() ); assertEquals( endDate, result.getDate( 8, Calendar.getInstance().getTime() ) ); }
public RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ) { RowMetaInterface r = new RowMeta(); Object[] data = new Object[ 9 ]; int nr = 0; r.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "BaseStep.ColumnName.Stepname" ) ) ); data[ nr ] = sname; nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Copy" ) ) ); data[ nr ] = new Double( copynr ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesReaded" ) ) ); data[ nr ] = new Double( lines_read ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesWritten" ) ) ); data[ nr ] = new Double( lines_written ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesUpdated" ) ) ); data[ nr ] = new Double( lines_updated ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesSkipped" ) ) ); data[ nr ] = new Double( lines_skipped ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Errors" ) ) ); data[ nr ] = new Double( errors ); nr++; r.addValueMeta( new ValueMetaDate( "start_date" ) ); data[ nr ] = start_date; nr++; r.addValueMeta( new ValueMetaDate( "end_date" ) ); data[ nr ] = end_date; nr++; return new RowMetaAndData( r, data ); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { public RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ) { RowMetaInterface r = new RowMeta(); Object[] data = new Object[ 9 ]; int nr = 0; r.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "BaseStep.ColumnName.Stepname" ) ) ); data[ nr ] = sname; nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Copy" ) ) ); data[ nr ] = new Double( copynr ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesReaded" ) ) ); data[ nr ] = new Double( lines_read ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesWritten" ) ) ); data[ nr ] = new Double( lines_written ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesUpdated" ) ) ); data[ nr ] = new Double( lines_updated ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesSkipped" ) ) ); data[ nr ] = new Double( lines_skipped ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Errors" ) ) ); data[ nr ] = new Double( errors ); nr++; r.addValueMeta( new ValueMetaDate( "start_date" ) ); data[ nr ] = start_date; nr++; r.addValueMeta( new ValueMetaDate( "end_date" ) ); data[ nr ] = end_date; nr++; return new RowMetaAndData( r, data ); } }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { public RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ) { RowMetaInterface r = new RowMeta(); Object[] data = new Object[ 9 ]; int nr = 0; r.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "BaseStep.ColumnName.Stepname" ) ) ); data[ nr ] = sname; nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Copy" ) ) ); data[ nr ] = new Double( copynr ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesReaded" ) ) ); data[ nr ] = new Double( lines_read ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesWritten" ) ) ); data[ nr ] = new Double( lines_written ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesUpdated" ) ) ); data[ nr ] = new Double( lines_updated ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesSkipped" ) ) ); data[ nr ] = new Double( lines_skipped ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Errors" ) ) ); data[ nr ] = new Double( errors ); nr++; r.addValueMeta( new ValueMetaDate( "start_date" ) ); data[ nr ] = start_date; nr++; r.addValueMeta( new ValueMetaDate( "end_date" ) ); data[ nr ] = end_date; nr++; return new RowMetaAndData( r, data ); } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { public RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ) { RowMetaInterface r = new RowMeta(); Object[] data = new Object[ 9 ]; int nr = 0; r.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "BaseStep.ColumnName.Stepname" ) ) ); data[ nr ] = sname; nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Copy" ) ) ); data[ nr ] = new Double( copynr ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesReaded" ) ) ); data[ nr ] = new Double( lines_read ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesWritten" ) ) ); data[ nr ] = new Double( lines_written ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesUpdated" ) ) ); data[ nr ] = new Double( lines_updated ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesSkipped" ) ) ); data[ nr ] = new Double( lines_skipped ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Errors" ) ) ); data[ nr ] = new Double( errors ); nr++; r.addValueMeta( new ValueMetaDate( "start_date" ) ); data[ nr ] = start_date; nr++; r.addValueMeta( new ValueMetaDate( "end_date" ) ); data[ nr ] = end_date; nr++; return new RowMetaAndData( r, data ); } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { public RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ) { RowMetaInterface r = new RowMeta(); Object[] data = new Object[ 9 ]; int nr = 0; r.addValueMeta( new ValueMetaString( BaseMessages.getString( PKG, "BaseStep.ColumnName.Stepname" ) ) ); data[ nr ] = sname; nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Copy" ) ) ); data[ nr ] = new Double( copynr ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesReaded" ) ) ); data[ nr ] = new Double( lines_read ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesWritten" ) ) ); data[ nr ] = new Double( lines_written ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesUpdated" ) ) ); data[ nr ] = new Double( lines_updated ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.LinesSkipped" ) ) ); data[ nr ] = new Double( lines_skipped ); nr++; r.addValueMeta( new ValueMetaNumber( BaseMessages.getString( PKG, "BaseStep.ColumnName.Errors" ) ) ); data[ nr ] = new Double( errors ); nr++; r.addValueMeta( new ValueMetaDate( "start_date" ) ); data[ nr ] = start_date; nr++; r.addValueMeta( new ValueMetaDate( "end_date" ) ); data[ nr ] = end_date; nr++; return new RowMetaAndData( r, data ); } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); @Deprecated public long linesRead; @Deprecated public long linesWritten; @Deprecated public long linesInput; @Deprecated public long linesOutput; @Deprecated public long linesUpdated; @Deprecated public long linesSkipped; @Deprecated public long linesRejected; public boolean first; public boolean terminator; public List<Object[]> terminator_rows; }
@Test public void testCleanup() throws IOException { BaseStep baseStep = new BaseStep( mockHelper.stepMeta, mockHelper.stepDataInterface, 0, mockHelper.transMeta, mockHelper.trans ); ServerSocket serverSocketMock = mock( ServerSocket.class ); doReturn( 0 ).when( serverSocketMock ).getLocalPort(); baseStep.setServerSockets( Collections.singletonList( serverSocketMock ) ); SocketRepository socketRepositoryMock = mock( SocketRepository.class ); baseStep.setSocketRepository( socketRepositoryMock ); baseStep.cleanup(); verify( socketRepositoryMock ).releaseSocket( 0 ); }
@Override public void cleanup() { for ( ServerSocket serverSocket : serverSockets ) { try { socketRepository.releaseSocket( serverSocket.getLocalPort() ); logDetailed( BaseMessages.getString( PKG, "BaseStep.Log.ReleasedServerSocketOnPort", serverSocket.getLocalPort() ) ); } catch ( IOException e ) { logError( "Cleanup: Unable to release server socket (" + serverSocket.getLocalPort() + ")", e ); } } List<RemoteStep> remoteInputSteps = getRemoteInputSteps(); if ( remoteInputSteps != null ) { cleanupRemoteSteps( remoteInputSteps ); } List<RemoteStep> remoteOutputSteps = getRemoteOutputSteps(); if ( remoteOutputSteps != null ) { cleanupRemoteSteps( remoteOutputSteps ); } }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public void cleanup() { for ( ServerSocket serverSocket : serverSockets ) { try { socketRepository.releaseSocket( serverSocket.getLocalPort() ); logDetailed( BaseMessages.getString( PKG, "BaseStep.Log.ReleasedServerSocketOnPort", serverSocket.getLocalPort() ) ); } catch ( IOException e ) { logError( "Cleanup: Unable to release server socket (" + serverSocket.getLocalPort() + ")", e ); } } List<RemoteStep> remoteInputSteps = getRemoteInputSteps(); if ( remoteInputSteps != null ) { cleanupRemoteSteps( remoteInputSteps ); } List<RemoteStep> remoteOutputSteps = getRemoteOutputSteps(); if ( remoteOutputSteps != null ) { cleanupRemoteSteps( remoteOutputSteps ); } } }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public void cleanup() { for ( ServerSocket serverSocket : serverSockets ) { try { socketRepository.releaseSocket( serverSocket.getLocalPort() ); logDetailed( BaseMessages.getString( PKG, "BaseStep.Log.ReleasedServerSocketOnPort", serverSocket.getLocalPort() ) ); } catch ( IOException e ) { logError( "Cleanup: Unable to release server socket (" + serverSocket.getLocalPort() + ")", e ); } } List<RemoteStep> remoteInputSteps = getRemoteInputSteps(); if ( remoteInputSteps != null ) { cleanupRemoteSteps( remoteInputSteps ); } List<RemoteStep> remoteOutputSteps = getRemoteOutputSteps(); if ( remoteOutputSteps != null ) { cleanupRemoteSteps( remoteOutputSteps ); } } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public void cleanup() { for ( ServerSocket serverSocket : serverSockets ) { try { socketRepository.releaseSocket( serverSocket.getLocalPort() ); logDetailed( BaseMessages.getString( PKG, "BaseStep.Log.ReleasedServerSocketOnPort", serverSocket.getLocalPort() ) ); } catch ( IOException e ) { logError( "Cleanup: Unable to release server socket (" + serverSocket.getLocalPort() + ")", e ); } } List<RemoteStep> remoteInputSteps = getRemoteInputSteps(); if ( remoteInputSteps != null ) { cleanupRemoteSteps( remoteInputSteps ); } List<RemoteStep> remoteOutputSteps = getRemoteOutputSteps(); if ( remoteOutputSteps != null ) { cleanupRemoteSteps( remoteOutputSteps ); } } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); }
BaseStep implements VariableSpace, StepInterface, LoggingObjectInterface, ExtensionDataInterface { @Override public void cleanup() { for ( ServerSocket serverSocket : serverSockets ) { try { socketRepository.releaseSocket( serverSocket.getLocalPort() ); logDetailed( BaseMessages.getString( PKG, "BaseStep.Log.ReleasedServerSocketOnPort", serverSocket.getLocalPort() ) ); } catch ( IOException e ) { logError( "Cleanup: Unable to release server socket (" + serverSocket.getLocalPort() + ")", e ); } } List<RemoteStep> remoteInputSteps = getRemoteInputSteps(); if ( remoteInputSteps != null ) { cleanupRemoteSteps( remoteInputSteps ); } List<RemoteStep> remoteOutputSteps = getRemoteOutputSteps(); if ( remoteOutputSteps != null ) { cleanupRemoteSteps( remoteOutputSteps ); } } BaseStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ); @Override boolean init( StepMetaInterface smi, StepDataInterface sdi ); @Override void dispose( StepMetaInterface smi, StepDataInterface sdi ); @Override void cleanup(); @Override long getProcessed(); void setCopy( int cop ); @Override int getCopy(); @Override long getErrors(); @Override void setErrors( long e ); @Override long getLinesRead(); long incrementLinesRead(); long decrementLinesRead(); void setLinesRead( long newLinesReadValue ); @Override long getLinesInput(); long incrementLinesInput(); void setLinesInput( long newLinesInputValue ); @Override long getLinesOutput(); long incrementLinesOutput(); void setLinesOutput( long newLinesOutputValue ); @Override long getLinesWritten(); long incrementLinesWritten(); long decrementLinesWritten(); void setLinesWritten( long newLinesWrittenValue ); @Override long getLinesUpdated(); long incrementLinesUpdated(); void setLinesUpdated( long newLinesUpdatedValue ); @Override long getLinesRejected(); long incrementLinesRejected(); @Override void setLinesRejected( long newLinesRejectedValue ); long getLinesSkipped(); long incrementLinesSkipped(); void setLinesSkipped( long newLinesSkippedValue ); @Override String getStepname(); void setStepname( String stepname ); Trans getDispatcher(); String getStatusDescription(); StepMetaInterface getStepMetaInterface(); void setStepMetaInterface( StepMetaInterface stepMetaInterface ); StepDataInterface getStepDataInterface(); void setStepDataInterface( StepDataInterface stepDataInterface ); @Override StepMeta getStepMeta(); void setStepMeta( StepMeta stepMeta ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); @Override Trans getTrans(); @Override void putRow( RowMetaInterface rowMeta, Object[] row ); void putRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void handlePutRowTo( RowMetaInterface rowMeta, Object[] row, RowSet rowSet ); void putError( RowMetaInterface rowMeta, Object[] row, long nrErrors, String errorDescriptions, String fieldNames, String errorCodes ); @Override Object[] getRow(); void setRowHandler( RowHandler rowHandler ); RowHandler getRowHandler(); @Override void identifyErrorOutput(); static void safeModeChecking( RowMetaInterface referenceRowMeta, RowMetaInterface rowMeta ); Object[] getRowFrom( RowSet rowSet ); Object[] handleGetRowFrom( RowSet rowSet ); RowSet findInputRowSet( String sourceStep ); RowSet findInputRowSet( String from, int fromcopy, String to, int tocopy ); RowSet findOutputRowSet( String targetStep ); RowSet findOutputRowSet( String from, int fromcopy, String to, int tocopy ); @Override void setOutputDone(); void dispatch(); boolean isBasic(); boolean isDetailed(); boolean isDebug(); boolean isRowLevel(); void logMinimal( String message ); void logMinimal( String message, Object... arguments ); void logBasic( String message ); void logBasic( String message, Object... arguments ); void logDetailed( String message ); void logDetailed( String message, Object... arguments ); void logDebug( String message ); void logDebug( String message, Object... arguments ); void logRowlevel( String message ); void logRowlevel( String message, Object... arguments ); void logError( String message ); void logError( String message, Throwable e ); void logError( String message, Object... arguments ); int getNextClassNr(); boolean outputIsDone(); @Override void stopAll(); @Override boolean isStopped(); @Override boolean isRunning(); @Override boolean isPaused(); @Override void setStopped( boolean stopped ); @Override void setRunning( boolean running ); @Override void pauseRunning(); @Override void resumeRunning(); void setPaused( boolean paused ); void setPaused( AtomicBoolean paused ); boolean isInitialising(); @Override void markStart(); void setInternalVariables(); @Override void markStop(); @Override long getRuntime(); RowMetaAndData buildLog( String sname, int copynr, long lines_read, long lines_written, long lines_updated, long lines_skipped, long errors, Date start_date, Date end_date ); static final RowMetaInterface getLogFields( String comm ); @Override String toString(); @Override int rowsetOutputSize(); @Override int rowsetInputSize(); @Override void stopRunning( StepMetaInterface stepMetaInterface, StepDataInterface stepDataInterface ); @Deprecated void stopRunning(); void logSummary(); @Override String getStepID(); @Override List<RowSet> getInputRowSets(); void setInputRowSets( List<RowSet> inputRowSets ); @Override List<RowSet> getOutputRowSets(); void setOutputRowSets( List<RowSet> outputRowSets ); boolean isDistributed(); void setDistributed( boolean distributed ); @Override void addRowListener( RowListener rowListener ); @Override void removeRowListener( RowListener rowListener ); @Override List<RowListener> getRowListeners(); void addResultFile( ResultFile resultFile ); @Override Map<String, ResultFile> getResultFiles(); @Override StepExecutionStatus getStatus(); @Override String getPartitionID(); @Override void setPartitionID( String partitionID ); Map<String, BlockingRowSet> getPartitionTargets(); void setPartitionTargets( Map<String, BlockingRowSet> partitionTargets ); int getRepartitioning(); @Override void setRepartitioning( int repartitioning ); @Override boolean isPartitioned(); @Override void setPartitioned( boolean partitioned ); RowMetaInterface getInputRowMeta(); void setInputRowMeta( RowMetaInterface rowMeta ); RowMetaInterface getErrorRowMeta(); void setErrorRowMeta( RowMetaInterface errorRowMeta ); RowMetaInterface getPreviewRowMeta(); void setPreviewRowMeta( RowMetaInterface previewRowMeta ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); String getTypeId(); int getSlaveNr(); int getClusterSize(); int getUniqueStepNrAcrossSlaves(); int getUniqueStepCountAcrossSlaves(); List<ServerSocket> getServerSockets(); void setServerSockets( List<ServerSocket> serverSockets ); @Override void setUsingThreadPriorityManagment( boolean usingThreadPriorityManagment ); @Override boolean isUsingThreadPriorityManagment(); @Override void initBeforeStart(); List<StepListener> getStepListeners(); void setStepListeners( List<StepListener> stepListeners ); @Override boolean processRow( StepMetaInterface smi, StepDataInterface sdi ); @Override boolean canProcessOneRow(); @Override void addStepListener( StepListener stepListener ); @Override boolean isMapping(); SocketRepository getSocketRepository(); void setSocketRepository( SocketRepository socketRepository ); @Override String getObjectName(); @Override LogChannelInterface getLogChannel(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectory getRepositoryDirectory(); @Override String getObjectCopy(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); static void closeQuietly( Closeable cl ); @Override String getContainerObjectId(); void setCarteObjectId( String containerObjectId ); @Override void batchComplete(); List<RemoteStep> getRemoteInputSteps(); List<RemoteStep> getRemoteOutputSteps(); @Override Date getRegistrationDate(); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); @Override Repository getRepository(); @Override void setRepository( Repository repository ); @Override IMetaStore getMetaStore(); @Override void setMetaStore( IMetaStore metaStore ); @Override int getCurrentOutputRowSetNr(); @Override void setCurrentOutputRowSetNr( int index ); @Override int getCurrentInputRowSetNr(); @Override void setCurrentInputRowSetNr( int index ); @Override Map<String, Object> getExtensionDataMap(); @Deprecated public long linesRead; @Deprecated public long linesWritten; @Deprecated public long linesInput; @Deprecated public long linesOutput; @Deprecated public long linesUpdated; @Deprecated public long linesSkipped; @Deprecated public long linesRejected; public boolean first; public boolean terminator; public List<Object[]> terminator_rows; }
@Test public void testPutRowWait() throws Exception { rowProducer.putRowWait( rowMeta, rowData, 1, TimeUnit.MILLISECONDS ); verify( rowSet, times( 1 ) ).putRowWait( rowMeta, rowData, 1, TimeUnit.MILLISECONDS ); }
public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); }
RowProducer { public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); } }
RowProducer { public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); } RowProducer( StepInterface stepInterface, RowSet rowSet ); }
RowProducer { public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); } RowProducer( StepInterface stepInterface, RowSet rowSet ); void putRow( RowMetaInterface rowMeta, Object[] row ); boolean putRow( RowMetaInterface rowMeta, Object[] row, boolean block ); boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ); void finished(); RowSet getRowSet(); void setRowSet( RowSet rowSet ); StepInterface getStepInterface(); void setStepInterface( StepInterface stepInterface ); }
RowProducer { public boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ) { return rowSet.putRowWait( rowMeta, rowData, time, tu ); } RowProducer( StepInterface stepInterface, RowSet rowSet ); void putRow( RowMetaInterface rowMeta, Object[] row ); boolean putRow( RowMetaInterface rowMeta, Object[] row, boolean block ); boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ); void finished(); RowSet getRowSet(); void setRowSet( RowSet rowSet ); StepInterface getStepInterface(); void setStepInterface( StepInterface stepInterface ); }
@Test public void testFinished() throws Exception { rowProducer.finished(); verify( rowSet, times( 1 ) ).setDone(); }
public void finished() { rowSet.setDone(); }
RowProducer { public void finished() { rowSet.setDone(); } }
RowProducer { public void finished() { rowSet.setDone(); } RowProducer( StepInterface stepInterface, RowSet rowSet ); }
RowProducer { public void finished() { rowSet.setDone(); } RowProducer( StepInterface stepInterface, RowSet rowSet ); void putRow( RowMetaInterface rowMeta, Object[] row ); boolean putRow( RowMetaInterface rowMeta, Object[] row, boolean block ); boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ); void finished(); RowSet getRowSet(); void setRowSet( RowSet rowSet ); StepInterface getStepInterface(); void setStepInterface( StepInterface stepInterface ); }
RowProducer { public void finished() { rowSet.setDone(); } RowProducer( StepInterface stepInterface, RowSet rowSet ); void putRow( RowMetaInterface rowMeta, Object[] row ); boolean putRow( RowMetaInterface rowMeta, Object[] row, boolean block ); boolean putRowWait( RowMetaInterface rowMeta, Object[] rowData, long time, TimeUnit tu ); void finished(); RowSet getRowSet(); void setRowSet( RowSet rowSet ); StepInterface getStepInterface(); void setStepInterface( StepInterface stepInterface ); }
@Test public void testFallback() throws KettleException { when( fallbackSupplier.get() ).thenReturn( trans ); transSupplier = new TransSupplier( meta, log, fallbackSupplier ); Trans transRet = transSupplier.get(); verify( fallbackSupplier ).get(); assertEquals( transRet, trans ); }
public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
@Test public void testWebsocketVersion() throws KettleException { props.setProperty( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); when( meta.getVariable( "engine" ) ).thenReturn( "spark" ); when( meta.getVariable( "engine.host" ) ).thenReturn( "hostname" ); when( meta.getVariable( "engine.port" ) ).thenReturn( "8080" ); when( meta.nrTransHops() ).thenReturn( 0 ); when( meta.getTransHop( 0 ) ).thenReturn( transHopMeta ); when( meta.realClone( false ) ).thenReturn( meta ); when( transHopMeta.isEnabled() ).thenReturn( false ); transSupplier = new TransSupplier( meta, log, fallbackSupplier ); Trans transRet = transSupplier.get(); assertTrue( transRet instanceof TransWebSocketEngineAdapter ); }
public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
@Test( expected = RuntimeException.class ) public void testInvalidEngine() throws KettleException { props.setProperty( "KETTLE_AEL_PDI_DAEMON_VERSION", "1.0" ); when( meta.getVariable( "engine" ) ).thenReturn( "invalidEngine" ); transSupplier = new TransSupplier( meta, log, fallbackSupplier ); transSupplier.get(); }
public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
TransSupplier implements Supplier<Trans> { public Trans get() { if ( Utils.isEmpty( transMeta.getVariable( "engine" ) ) ) { log.logBasic( "Using legacy execution engine" ); return fallbackSupplier.get(); } Variables variables = new Variables(); variables.initializeVariablesFrom( null ); String version = variables.getVariable( "KETTLE_AEL_PDI_DAEMON_VERSION", "2.0" ); if ( Const.toDouble( version, 1 ) >= 2 ) { String protocol = transMeta.getVariable( "engine.protocol" ); String host = transMeta.getVariable( "engine.host" ); String port = transMeta.getVariable( "engine.port" ); boolean ssl = "https".equalsIgnoreCase( protocol ) || "wss".equalsIgnoreCase( protocol ); return new TransWebSocketEngineAdapter( transMeta, host, port, ssl ); } else { try { return PluginRegistry.getInstance().getPlugins( EnginePluginType.class ).stream() .filter( useThisEngine() ) .findFirst() .map( plugin -> (Engine) loadPlugin( plugin ) ) .map( engine -> { log.logBasic( "Using execution engine " + engine.getClass().getCanonicalName() ); return (Trans) new TransEngineAdapter( engine, transMeta ); } ) .orElseThrow( () -> new KettleException( "Unable to find engine [" + transMeta.getVariable( "engine" ) + "]" ) ); } catch ( KettleException e ) { log.logError( "Failed to load engine", e ); throw new RuntimeException( e ); } } } TransSupplier( TransMeta transMeta, LogChannelInterface log, Supplier<Trans> fallbackSupplier ); Trans get(); }
@Test public void testGetRow() throws KettleValueException { DatabaseImpact testObject = new DatabaseImpact( DatabaseImpact.TYPE_IMPACT_READ, "myTrans", "aStep", "ProdDB", "DimCustomer", "Customer_Key", "MyValue", "Calculator 2", "SELECT * FROM dimCustomer", "Some remarks" ); RowMetaAndData rmd = testObject.getRow(); assertNotNull( rmd ); assertEquals( 10, rmd.size() ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 0 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ), rmd.getValueMeta( 0 ).getName() ); assertEquals( "Read", rmd.getString( 0, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 1 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ), rmd.getValueMeta( 1 ) .getName() ); assertEquals( "myTrans", rmd.getString( 1, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 2 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ), rmd.getValueMeta( 2 ).getName() ); assertEquals( "aStep", rmd.getString( 2, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 3 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ), rmd.getValueMeta( 3 ) .getName() ); assertEquals( "ProdDB", rmd.getString( 3, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 4 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ), rmd.getValueMeta( 4 ) .getName() ); assertEquals( "DimCustomer", rmd.getString( 4, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 5 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ), rmd.getValueMeta( 5 ) .getName() ); assertEquals( "Customer_Key", rmd.getString( 5, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 6 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ), rmd.getValueMeta( 6 ) .getName() ); assertEquals( "MyValue", rmd.getString( 6, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 7 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ), rmd.getValueMeta( 7 ) .getName() ); assertEquals( "Calculator 2", rmd.getString( 7, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 8 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ), rmd.getValueMeta( 8 ).getName() ); assertEquals( "SELECT * FROM dimCustomer", rmd.getString( 8, "default" ) ); assertEquals( ValueMetaInterface.TYPE_STRING, rmd.getValueMeta( 9 ).getType() ); assertEquals( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ), rmd.getValueMeta( 9 ) .getName() ); assertEquals( "Some remarks", rmd.getString( 9, "default" ) ); }
public RowMetaAndData getRow() { RowMetaAndData r = new RowMetaAndData(); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ) ), getTypeDesc() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ) ), getTransformationName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ) ), getStepName() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ) ), getDatabaseName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ) ), getTable() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ) ), getField() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ) ), getValue() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ) ), getValueOrigin() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ) ), getSQL() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ) ), getRemark() ); return r; }
DatabaseImpact { public RowMetaAndData getRow() { RowMetaAndData r = new RowMetaAndData(); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ) ), getTypeDesc() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ) ), getTransformationName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ) ), getStepName() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ) ), getDatabaseName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ) ), getTable() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ) ), getField() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ) ), getValue() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ) ), getValueOrigin() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ) ), getSQL() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ) ), getRemark() ); return r; } }
DatabaseImpact { public RowMetaAndData getRow() { RowMetaAndData r = new RowMetaAndData(); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ) ), getTypeDesc() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ) ), getTransformationName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ) ), getStepName() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ) ), getDatabaseName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ) ), getTable() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ) ), getField() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ) ), getValue() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ) ), getValueOrigin() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ) ), getSQL() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ) ), getRemark() ); return r; } DatabaseImpact( int type, String transname, String stepname, String dbname, String table, String field, String valuename, String valueorigin, String sql, String remark ); }
DatabaseImpact { public RowMetaAndData getRow() { RowMetaAndData r = new RowMetaAndData(); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ) ), getTypeDesc() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ) ), getTransformationName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ) ), getStepName() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ) ), getDatabaseName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ) ), getTable() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ) ), getField() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ) ), getValue() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ) ), getValueOrigin() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ) ), getSQL() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ) ), getRemark() ); return r; } DatabaseImpact( int type, String transname, String stepname, String dbname, String table, String field, String valuename, String valueorigin, String sql, String remark ); String getTransformationName(); String getStepName(); String getValueOrigin(); String getDatabaseName(); String getTable(); String getField(); String getValue(); String getSQL(); String getRemark(); String getTypeDesc(); static final int getTypeDesc( String typedesc ); int getType(); RowMetaAndData getRow(); }
DatabaseImpact { public RowMetaAndData getRow() { RowMetaAndData r = new RowMetaAndData(); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Type" ) ), getTypeDesc() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Transformation" ) ), getTransformationName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Step" ) ), getStepName() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Database" ) ), getDatabaseName() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Table" ) ), getTable() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Field" ) ), getField() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Value" ) ), getValue() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.ValueOrigin" ) ), getValueOrigin() ); r.addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.SQL" ) ), getSQL() ); r .addValue( new ValueMetaString( BaseMessages.getString( PKG, "DatabaseImpact.RowDesc.Label.Remarks" ) ), getRemark() ); return r; } DatabaseImpact( int type, String transname, String stepname, String dbname, String table, String field, String valuename, String valueorigin, String sql, String remark ); String getTransformationName(); String getStepName(); String getValueOrigin(); String getDatabaseName(); String getTable(); String getField(); String getValue(); String getSQL(); String getRemark(); String getTypeDesc(); static final int getTypeDesc( String typedesc ); int getType(); RowMetaAndData getRow(); static final int TYPE_IMPACT_NONE; static final int TYPE_IMPACT_READ; static final int TYPE_IMPACT_WRITE; static final int TYPE_IMPACT_READ_WRITE; static final int TYPE_IMPACT_TRUNCATE; static final int TYPE_IMPACT_DELETE; static final int TYPE_IMPACT_UPDATE; static final String[] typeDesc; }
@Test public void testDoGetReturn404StatusCode() throws ServletException, IOException { GetRootServlet servlet = new GetRootServlet(); servlet.setJettyMode( true ); HttpServletRequest request = when( mock( HttpServletRequest.class ).getRequestURI() ).thenReturn( "/wrong_path" ).getMock(); HttpServletResponse response = mock( HttpServletResponse.class ); servlet.doGet( request, response ); verify( response ).setStatus( HttpServletResponse.SC_NOT_FOUND ); }
public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().equals( CONTEXT_PATH ) ) { response.setStatus( HttpServletResponse.SC_NOT_FOUND ); return; } if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "GetRootServlet.RootRequested" ) ); } response.setContentType( "text/html;charset=UTF-8" ); response.setStatus( HttpServletResponse.SC_OK ); PrintWriter out = response.getWriter(); out.println( "<HTML>" ); out.println( "<HEAD><TITLE>" + BaseMessages.getString( PKG, "GetRootServlet.KettleSlaveServer.Title" ) + "</TITLE>" ); out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ); out.println( "</HEAD>" ); out.println( "<BODY>" ); out.println( "<H2>" + BaseMessages.getString( PKG, "GetRootServlet.SlaveServerMenu" ) + "</H2>" ); out.println( "<p>" ); out.println( "<a href=\"" + convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">" + BaseMessages.getString( PKG, "GetRootServlet.ShowStatus" ) + "</a><br>" ); out.println( "<p>" ); out.println( "</BODY>" ); out.println( "</HTML>" ); }
GetRootServlet extends BaseHttpServlet implements CartePluginInterface { public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().equals( CONTEXT_PATH ) ) { response.setStatus( HttpServletResponse.SC_NOT_FOUND ); return; } if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "GetRootServlet.RootRequested" ) ); } response.setContentType( "text/html;charset=UTF-8" ); response.setStatus( HttpServletResponse.SC_OK ); PrintWriter out = response.getWriter(); out.println( "<HTML>" ); out.println( "<HEAD><TITLE>" + BaseMessages.getString( PKG, "GetRootServlet.KettleSlaveServer.Title" ) + "</TITLE>" ); out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ); out.println( "</HEAD>" ); out.println( "<BODY>" ); out.println( "<H2>" + BaseMessages.getString( PKG, "GetRootServlet.SlaveServerMenu" ) + "</H2>" ); out.println( "<p>" ); out.println( "<a href=\"" + convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">" + BaseMessages.getString( PKG, "GetRootServlet.ShowStatus" ) + "</a><br>" ); out.println( "<p>" ); out.println( "</BODY>" ); out.println( "</HTML>" ); } }
GetRootServlet extends BaseHttpServlet implements CartePluginInterface { public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().equals( CONTEXT_PATH ) ) { response.setStatus( HttpServletResponse.SC_NOT_FOUND ); return; } if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "GetRootServlet.RootRequested" ) ); } response.setContentType( "text/html;charset=UTF-8" ); response.setStatus( HttpServletResponse.SC_OK ); PrintWriter out = response.getWriter(); out.println( "<HTML>" ); out.println( "<HEAD><TITLE>" + BaseMessages.getString( PKG, "GetRootServlet.KettleSlaveServer.Title" ) + "</TITLE>" ); out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ); out.println( "</HEAD>" ); out.println( "<BODY>" ); out.println( "<H2>" + BaseMessages.getString( PKG, "GetRootServlet.SlaveServerMenu" ) + "</H2>" ); out.println( "<p>" ); out.println( "<a href=\"" + convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">" + BaseMessages.getString( PKG, "GetRootServlet.ShowStatus" ) + "</a><br>" ); out.println( "<p>" ); out.println( "</BODY>" ); out.println( "</HTML>" ); } GetRootServlet(); }
GetRootServlet extends BaseHttpServlet implements CartePluginInterface { public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().equals( CONTEXT_PATH ) ) { response.setStatus( HttpServletResponse.SC_NOT_FOUND ); return; } if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "GetRootServlet.RootRequested" ) ); } response.setContentType( "text/html;charset=UTF-8" ); response.setStatus( HttpServletResponse.SC_OK ); PrintWriter out = response.getWriter(); out.println( "<HTML>" ); out.println( "<HEAD><TITLE>" + BaseMessages.getString( PKG, "GetRootServlet.KettleSlaveServer.Title" ) + "</TITLE>" ); out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ); out.println( "</HEAD>" ); out.println( "<BODY>" ); out.println( "<H2>" + BaseMessages.getString( PKG, "GetRootServlet.SlaveServerMenu" ) + "</H2>" ); out.println( "<p>" ); out.println( "<a href=\"" + convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">" + BaseMessages.getString( PKG, "GetRootServlet.ShowStatus" ) + "</a><br>" ); out.println( "<p>" ); out.println( "</BODY>" ); out.println( "</HTML>" ); } GetRootServlet(); void doGet( HttpServletRequest request, HttpServletResponse response ); String toString(); String getService(); String getContextPath(); }
GetRootServlet extends BaseHttpServlet implements CartePluginInterface { public void doGet( HttpServletRequest request, HttpServletResponse response ) throws ServletException, IOException { if ( isJettyMode() && !request.getRequestURI().equals( CONTEXT_PATH ) ) { response.setStatus( HttpServletResponse.SC_NOT_FOUND ); return; } if ( log.isDebug() ) { logDebug( BaseMessages.getString( PKG, "GetRootServlet.RootRequested" ) ); } response.setContentType( "text/html;charset=UTF-8" ); response.setStatus( HttpServletResponse.SC_OK ); PrintWriter out = response.getWriter(); out.println( "<HTML>" ); out.println( "<HEAD><TITLE>" + BaseMessages.getString( PKG, "GetRootServlet.KettleSlaveServer.Title" ) + "</TITLE>" ); out.println( "<META http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\">" ); out.println( "</HEAD>" ); out.println( "<BODY>" ); out.println( "<H2>" + BaseMessages.getString( PKG, "GetRootServlet.SlaveServerMenu" ) + "</H2>" ); out.println( "<p>" ); out.println( "<a href=\"" + convertContextPath( GetStatusServlet.CONTEXT_PATH ) + "\">" + BaseMessages.getString( PKG, "GetRootServlet.ShowStatus" ) + "</a><br>" ); out.println( "<p>" ); out.println( "</BODY>" ); out.println( "</HTML>" ); } GetRootServlet(); void doGet( HttpServletRequest request, HttpServletResponse response ); String toString(); String getService(); String getContextPath(); static final String CONTEXT_PATH; }
@Test public void testFindDatabaseWithEncodedConnectionName() { DatabaseMeta dbMeta1 = new DatabaseMeta( "encoded_DBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" ); dbMeta1.setDisplayName( "encoded.DBConnection" ); meta.addDatabase( dbMeta1 ); DatabaseMeta dbMeta2 = new DatabaseMeta( "normalDBConnection", "Oracle", "localhost", "access", "test", "111", "test", "test" ); dbMeta2.setDisplayName( "normalDBConnection" ); meta.addDatabase( dbMeta2 ); DatabaseMeta databaseMeta = meta.findDatabase( dbMeta1.getDisplayName() ); assertNotNull( databaseMeta ); assertEquals( databaseMeta.getName(), "encoded_DBConnection" ); assertEquals( databaseMeta.getDisplayName(), "encoded.DBConnection" ); }
public String getName() { if ( transMeta == null ) { return null; } return transMeta.getName(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public String getName() { if ( transMeta == null ) { return null; } return transMeta.getName(); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public String getName() { if ( transMeta == null ) { return null; } return transMeta.getName(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public String getName() { if ( transMeta == null ) { return null; } return transMeta.getName(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public String getName() { if ( transMeta == null ) { return null; } return transMeta.getName(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testLoggingObjectIsNotLeakInMeta() { String expected = meta.log.getLogChannelId(); meta.clear(); String actual = meta.log.getLogChannelId(); assertEquals( "Use same logChannel for empty constructors, or assign General level for clear() calls", expected, actual ); }
@Override public String getLogChannelId() { return log.getLogChannelId(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testLoggingObjectIsNotLeakInTrans() throws KettleException { Repository rep = Mockito.mock( Repository.class ); RepositoryDirectoryInterface repInt = Mockito.mock( RepositoryDirectoryInterface.class ); Mockito.when( rep.loadTransformation( Mockito.anyString(), Mockito.any( RepositoryDirectoryInterface.class ), Mockito .any( ProgressMonitorListener.class ), Mockito.anyBoolean(), Mockito.anyString() ) ).thenReturn( meta ); Mockito.when( rep.findDirectory( Mockito.anyString() ) ).thenReturn( repInt ); Trans trans = new Trans( meta, rep, "junit", "junitDir", "fileName" ); assertEquals( "Log channel General assigned", LogChannel.GENERAL.getLogChannelId(), trans.log .getLogChannelId() ); }
@Override public String getLogChannelId() { return log.getLogChannelId(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { @Override public String getLogChannelId() { return log.getLogChannelId(); } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testTransFinishListenersConcurrentModification() throws KettleException, InterruptedException { CountDownLatch start = new CountDownLatch( 1 ); TransFinishListenerAdder add = new TransFinishListenerAdder( trans, start ); TransFinishListenerFirer firer = new TransFinishListenerFirer( trans, start ); startThreads( add, firer, start ); assertEquals( "All listeners are added: no ConcurrentModificationException", count, add.c ); assertEquals( "All Finish listeners are iterated over: no ConcurrentModificationException", count, firer.c ); }
public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testTransStartListenersConcurrentModification() throws InterruptedException { CountDownLatch start = new CountDownLatch( 1 ); TransFinishListenerAdder add = new TransFinishListenerAdder( trans, start ); TransStartListenerFirer starter = new TransStartListenerFirer( trans, start ); startThreads( add, starter, start ); assertEquals( "All listeners are added: no ConcurrentModificationException", count, add.c ); assertEquals( "All Start listeners are iterated over: no ConcurrentModificationException", count, starter.c ); }
public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testTransStoppedListenersConcurrentModification() throws InterruptedException { CountDownLatch start = new CountDownLatch( 1 ); TransStoppedCaller stopper = new TransStoppedCaller( trans, start ); TransStopListenerAdder adder = new TransStopListenerAdder( trans, start ); startThreads( stopper, adder, start ); assertEquals( "All transformation stop listeners is added", count, adder.c ); assertEquals( "All stop call success", count, stopper.c ); }
public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { public void startThreads() throws KettleException { nrOfFinishedSteps = 0; nrOfActiveSteps = 0; ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStartThreads.id, this ); fireTransStartedListeners(); for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi sid = steps.get( i ); sid.step.markStart(); sid.step.initBeforeStart(); StepListener stepListener = new StepListener() { @Override public void stepActive( Trans trans, StepMeta stepMeta, StepInterface step ) { nrOfActiveSteps++; if ( nrOfActiveSteps == 1 ) { synchronized ( transListeners ) { for ( TransListener listener : transListeners ) { listener.transActive( Trans.this ); } } } } @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { synchronized ( Trans.this ) { nrOfFinishedSteps++; if ( nrOfFinishedSteps >= steps.size() ) { setFinished( true ); addStepPerformanceSnapShot(); try { fireTransFinishedListeners(); } catch ( Exception e ) { step.setErrors( step.getErrors() + 1L ); log.logError( getName() + " : " + BaseMessages.getString( PKG, "Trans.Log.UnexpectedErrorAtTransformationEnd" ), e ); } } if ( step.getErrors() > 0 ) { log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationDetectedErrors" ) ); log.logMinimal( BaseMessages.getString( PKG, "Trans.Log.TransformationIsKillingTheOtherSteps" ) ); killAllNoWait(); } } } }; if ( sid.step instanceof BaseStep ) { ( (BaseStep) sid.step ).getStepListeners().add( 0, stepListener ); } else { sid.step.addStepListener( stepListener ); } } if ( transMeta.isCapturingStepPerformanceSnapShots() ) { stepPerformanceSnapshotSeqNr = new AtomicInteger( 0 ); stepPerformanceSnapShots = new ConcurrentHashMap<>(); String limitString = environmentSubstitute( transMeta.getStepPerformanceCapturingSizeLimit() ); if ( Utils.isEmpty( limitString ) ) { limitString = EnvUtil.getSystemProperty( Const.KETTLE_STEP_PERFORMANCE_SNAPSHOT_LIMIT ); } stepPerformanceSnapshotSizeLimit = Const.toInt( limitString, 0 ); stepPerformanceSnapShotTimer = new Timer( "stepPerformanceSnapShot Timer: " + transMeta.getName() ); TimerTask timerTask = new TimerTask() { @Override public void run() { if ( !isFinished() ) { addStepPerformanceSnapShot(); } } }; stepPerformanceSnapShotTimer.schedule( timerTask, 100, transMeta.getStepPerformanceCapturingDelay() ); } setFinished( false ); setPaused( false ); setStopped( false ); transFinishedBlockingQueue = new ArrayBlockingQueue<>( 10 ); TransListener transListener = new TransAdapter() { @Override public void transFinished( Trans trans ) { try { shutdownHeartbeat( trans != null ? trans.heartbeat : null ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationFinish.id, trans ); } catch ( KettleException e ) { throw new RuntimeException( "Error calling extension point at end of transformation", e ); } if ( transMeta.isCapturingStepPerformanceSnapShots() && stepPerformanceSnapShotTimer != null ) { stepPerformanceSnapShotTimer.cancel(); } transMeta.disposeEmbeddedMetastoreProvider(); setFinished( true ); setRunning( false ); log.snap( Metrics.METRIC_TRANSFORMATION_EXECUTION_STOP ); MetricsLogTable metricsLogTable = transMeta.getMetricsLogTable(); if ( metricsLogTable.isDefined() ) { try { writeMetricsInformation(); } catch ( Exception e ) { log.logError( "Error writing metrics information", e ); errors.incrementAndGet(); } } if ( transMeta.isUsingUniqueConnections() ) { trans.closeUniqueDatabaseConnections( getResult() ); } } }; transListeners.add( 0, transListener ); setRunning( true ); switch ( transMeta.getTransformationType() ) { case Normal: for ( int i = 0; i < steps.size(); i++ ) { final StepMetaDataCombi combi = steps.get( i ); RunThread runThread = new RunThread( combi ); Thread thread = new Thread( runThread ); thread.setName( getName() + " - " + combi.stepname ); ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepBeforeStart.id, combi ); combi.step.addStepListener( new StepAdapter() { @Override public void stepFinished( Trans trans, StepMeta stepMeta, StepInterface step ) { try { ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.StepFinished.id, combi ); } catch ( KettleException e ) { throw new RuntimeException( "Unexpected error in calling extension point upon step finish", e ); } } } ); thread.start(); } break; case SerialSingleThreaded: new Thread( new Runnable() { @Override public void run() { try { for ( StepMetaDataCombi combi : steps ) { combi.step.setUsingThreadPriorityManagment( false ); } Collections.sort( steps, new Comparator<StepMetaDataCombi>() { @Override public int compare( StepMetaDataCombi c1, StepMetaDataCombi c2 ) { boolean c1BeforeC2 = transMeta.findPrevious( c2.stepMeta, c1.stepMeta ); if ( c1BeforeC2 ) { return -1; } else { return 1; } } } ); boolean[] stepDone = new boolean[steps.size()]; int nrDone = 0; while ( nrDone < steps.size() && !isStopped() ) { for ( int i = 0; i < steps.size() && !isStopped(); i++ ) { StepMetaDataCombi combi = steps.get( i ); if ( !stepDone[i] ) { boolean cont = combi.step.processRow( combi.meta, combi.data ); if ( !cont ) { stepDone[i] = true; nrDone++; } } } } } catch ( Exception e ) { errors.addAndGet( 1 ); log.logError( "Error executing single threaded", e ); } finally { for ( int i = 0; i < steps.size(); i++ ) { StepMetaDataCombi combi = steps.get( i ); combi.step.dispose( combi.meta, combi.data ); combi.step.markStop(); } } } } ).start(); break; case SingleThreaded: break; default: break; } ExtensionPointHandler.callExtensionPoint( log, KettleExtensionPoint.TransformationStart.id, this ); heartbeat = startHeartbeat( getHeartbeatIntervalInSeconds() ); if ( steps.isEmpty() ) { fireTransFinishedListeners(); } if ( log.isDetailed() ) { log.logDetailed( BaseMessages.getString( PKG, "Trans.Log.TransformationHasAllocated", String.valueOf( steps .size() ), String.valueOf( rowsets.size() ) ) ); } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testFireTransFinishedListeners() throws Exception { Trans trans = new Trans(); TransListener mockListener = mock( TransListener.class ); trans.setTransListeners( Collections.singletonList( mockListener ) ); trans.fireTransFinishedListeners(); verify( mockListener ).transFinished( trans ); }
protected void fireTransFinishedListeners() throws KettleException { synchronized ( transListeners ) { if ( transListeners.size() == 0 ) { return; } List<KettleException> badGuys = new ArrayList<>( transListeners.size() ); for ( TransListener transListener : transListeners ) { try { transListener.transFinished( this ); } catch ( KettleException e ) { badGuys.add( e ); } } if ( transFinishedBlockingQueue != null ) { transFinishedBlockingQueue.add( new Object() ); } if ( !badGuys.isEmpty() ) { throw new KettleException( badGuys.get( 0 ) ); } } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { protected void fireTransFinishedListeners() throws KettleException { synchronized ( transListeners ) { if ( transListeners.size() == 0 ) { return; } List<KettleException> badGuys = new ArrayList<>( transListeners.size() ); for ( TransListener transListener : transListeners ) { try { transListener.transFinished( this ); } catch ( KettleException e ) { badGuys.add( e ); } } if ( transFinishedBlockingQueue != null ) { transFinishedBlockingQueue.add( new Object() ); } if ( !badGuys.isEmpty() ) { throw new KettleException( badGuys.get( 0 ) ); } } } }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { protected void fireTransFinishedListeners() throws KettleException { synchronized ( transListeners ) { if ( transListeners.size() == 0 ) { return; } List<KettleException> badGuys = new ArrayList<>( transListeners.size() ); for ( TransListener transListener : transListeners ) { try { transListener.transFinished( this ); } catch ( KettleException e ) { badGuys.add( e ); } } if ( transFinishedBlockingQueue != null ) { transFinishedBlockingQueue.add( new Object() ); } if ( !badGuys.isEmpty() ) { throw new KettleException( badGuys.get( 0 ) ); } } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { protected void fireTransFinishedListeners() throws KettleException { synchronized ( transListeners ) { if ( transListeners.size() == 0 ) { return; } List<KettleException> badGuys = new ArrayList<>( transListeners.size() ); for ( TransListener transListener : transListeners ) { try { transListener.transFinished( this ); } catch ( KettleException e ) { badGuys.add( e ); } } if ( transFinishedBlockingQueue != null ) { transFinishedBlockingQueue.add( new Object() ); } if ( !badGuys.isEmpty() ) { throw new KettleException( badGuys.get( 0 ) ); } } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); }
Trans implements VariableSpace, NamedParams, HasLogChannelInterface, LoggingObjectInterface, ExecutorInterface, ExtensionDataInterface { protected void fireTransFinishedListeners() throws KettleException { synchronized ( transListeners ) { if ( transListeners.size() == 0 ) { return; } List<KettleException> badGuys = new ArrayList<>( transListeners.size() ); for ( TransListener transListener : transListeners ) { try { transListener.transFinished( this ); } catch ( KettleException e ) { badGuys.add( e ); } } if ( transFinishedBlockingQueue != null ) { transFinishedBlockingQueue.add( new Object() ); } if ( !badGuys.isEmpty() ) { throw new KettleException( badGuys.get( 0 ) ); } } } Trans(); Trans( TransMeta transMeta ); Trans( TransMeta transMeta, LoggingObjectInterface parent ); <Parent extends VariableSpace & NamedParams> Trans( Parent parent, Repository rep, String name, String dirname, String filename ); void setParent( LoggingObjectInterface parent ); @Override LogChannelInterface getLogChannel(); void setLog( LogChannelInterface log ); String getName(); void execute( String[] arguments ); void prepareExecution( String[] arguments ); void startThreads(); void cleanup(); void logSummary( StepInterface si ); void waitUntilFinished(); int getErrors(); int getEnded(); boolean isFinished(); boolean isFinishedOrStopped(); @Deprecated void killAll(); void printStats( int seconds ); long getLastProcessed(); RowSet findRowSet( String rowsetname ); RowSet findRowSet( String from, int fromcopy, String to, int tocopy ); boolean hasStepStarted( String sname, int copy ); void stopAll(); int nrSteps(); int nrActiveSteps(); boolean[] getTransStepIsRunningLookup(); StepExecutionStatus[] getTransStepExecutionStatusLookup(); StepInterface getRunThread( int i ); StepInterface getRunThread( String name, int copy ); void calculateBatchIdAndDateRange(); void beginProcessing(); Result getResult(); StepInterface findRunThread( String stepname ); List<StepInterface> findBaseSteps( String stepname ); StepInterface findStepInterface( String stepname, int copyNr ); List<StepInterface> findStepInterfaces( String stepname ); StepDataInterface findDataInterface( String name ); Date getStartDate(); Date getEndDate(); boolean isMonitored(); void setMonitored( boolean monitored ); TransMeta getTransMeta(); void setTransMeta( TransMeta transMeta ); Date getCurrentDate(); Date getDepDate(); Date getLogDate(); List<RowSet> getRowsets(); List<StepMetaDataCombi> getSteps(); @Override String toString(); MappingInput[] findMappingInput(); MappingOutput[] findMappingOutput(); StepInterface getStepInterface( String stepname, int copy ); Date getReplayDate(); void setReplayDate( Date replayDate ); void setSafeModeEnabled( boolean safeModeEnabled ); boolean isSafeModeEnabled(); RowProducer addRowProducer( String stepname, int copynr ); Job getParentJob(); void setParentJob( Job parentJob ); StepDataInterface getStepDataInterface( String stepname, int stepcopy ); boolean hasHaltedSteps(); Date getJobStartDate(); Date getJobEndDate(); void setJobEndDate( Date jobEndDate ); void setJobStartDate( Date jobStartDate ); long getPassedBatchId(); void setPassedBatchId( long jobBatchId ); long getBatchId(); void setBatchId( long batchId ); @Deprecated String getThreadName(); @Deprecated void setThreadName( String threadName ); String getStatus(); boolean isInitializing(); void setInitializing( boolean initializing ); boolean isPreparing(); void setPreparing( boolean preparing ); boolean isRunning(); void setRunning( boolean running ); static TransSplitter executeClustered( final TransMeta transMeta, final TransExecutionConfiguration executionConfiguration ); static void executeClustered( final TransSplitter transSplitter, final TransExecutionConfiguration executionConfiguration ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final long monitorClusteredTransformation( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, int sleepTimeSeconds ); static int cleanupCluster( LogChannelInterface log, TransSplitter transSplitter ); static void cleanupSlaveServer( TransSplitter transSplitter, SlaveServer slaveServer, TransMeta slaveTransMeta ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob ); static final Result getClusteredTransformationResult( LogChannelInterface log, TransSplitter transSplitter, Job parentJob, boolean loggingRemoteWork ); static String sendToSlaveServer( TransMeta transMeta, TransExecutionConfiguration executionConfiguration, Repository repository, IMetaStore metaStore ); boolean isReadyToStart(); void setInternalKettleVariables( VariableSpace var ); @Override void copyVariablesFrom( VariableSpace space ); @Override String environmentSubstitute( String aString ); @Override String[] environmentSubstitute( String[] aString ); @Override String fieldSubstitute( String aString, RowMetaInterface rowMeta, Object[] rowData ); @Override VariableSpace getParentVariableSpace(); @Override void setParentVariableSpace( VariableSpace parent ); @Override String getVariable( String variableName, String defaultValue ); @Override String getVariable( String variableName ); @Override boolean getBooleanValueOfVariable( String variableName, boolean defaultValue ); @Override void initializeVariablesFrom( VariableSpace parent ); @Override String[] listVariables(); @Override void setVariable( String variableName, String variableValue ); @Override void shareVariablesWith( VariableSpace space ); @Override void injectVariables( Map<String, String> prop ); void pauseRunning(); void resumeRunning(); boolean isPreview(); void setPreview( boolean preview ); Repository getRepository(); void setRepository( Repository repository ); Map<String, List<StepPerformanceSnapShot>> getStepPerformanceSnapShots(); void setStepPerformanceSnapShots( Map<String, List<StepPerformanceSnapShot>> stepPerformanceSnapShots ); List<TransListener> getTransListeners(); void setTransListeners( List<TransListener> transListeners ); void addTransListener( TransListener transListener ); void setTransStoppedListeners( List<TransStoppedListener> transStoppedListeners ); List<TransStoppedListener> getTransStoppedListeners(); void addTransStoppedListener( TransStoppedListener transStoppedListener ); boolean isPaused(); void setPaused( boolean paused ); boolean isStopped(); void setStopped( boolean stopped ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer ); static void monitorRemoteTransformation( LogChannelInterface log, String carteObjectId, String transName, SlaveServer remoteSlaveServer, int sleepTimeSeconds ); @Override void addParameterDefinition( String key, String defValue, String description ); @Override String getParameterDefault( String key ); @Override String getParameterDescription( String key ); @Override String getParameterValue( String key ); @Override String[] listParameters(); @Override void setParameterValue( String key, String value ); @Override void eraseParameters(); @Override void clearParameters(); @Override void activateParameters(); @Override void copyParametersFrom( NamedParams params ); Trans getParentTrans(); void setParentTrans( Trans parentTrans ); String getMappingStepName(); void setMappingStepName( String mappingStepName ); void setSocketRepository( SocketRepository socketRepository ); SocketRepository getSocketRepository(); @Override String getObjectName(); @Override String getObjectCopy(); @Override String getFilename(); @Override String getLogChannelId(); @Override ObjectId getObjectId(); @Override ObjectRevision getObjectRevision(); @Override LoggingObjectType getObjectType(); @Override LoggingObjectInterface getParent(); @Override RepositoryDirectoryInterface getRepositoryDirectory(); @Override LogLevel getLogLevel(); void setLogLevel( LogLevel logLevel ); List<LoggingHierarchy> getLoggingHierarchy(); @Deprecated Map<String, Trans> getActiveSubtransformations(); void addActiveSubTransformation( final String subTransName, Trans subTrans ); Trans removeActiveSubTransformation( final String subTransName ); Trans getActiveSubTransformation( final String subTransName ); Map<String, Job> getActiveSubjobs(); @Override String getContainerObjectId(); void setContainerObjectId( String containerObjectId ); @Override Date getRegistrationDate(); void setServletPrintWriter( PrintWriter servletPrintWriter ); PrintWriter getServletPrintWriter(); @Override String getExecutingServer(); @Override void setExecutingServer( String executingServer ); @Override String getExecutingUser(); @Override void setExecutingUser( String executingUser ); @Override boolean isGatheringMetrics(); @Override void setGatheringMetrics( boolean gatheringMetrics ); @Override boolean isForcingSeparateLogging(); @Override void setForcingSeparateLogging( boolean forcingSeparateLogging ); List<ResultFile> getResultFiles(); void setResultFiles( List<ResultFile> resultFiles ); List<RowMetaAndData> getResultRows(); void setResultRows( List<RowMetaAndData> resultRows ); Result getPreviousResult(); void setPreviousResult( Result previousResult ); Hashtable<String, Counter> getCounters(); void setCounters( Hashtable<String, Counter> counters ); String[] getArguments(); void setArguments( String[] arguments ); void clearError(); String getTransactionId(); void setTransactionId( String transactionId ); String calculateTransactionId(); IMetaStore getMetaStore(); void setMetaStore( IMetaStore metaStore ); void setServletReponse( HttpServletResponse response ); HttpServletResponse getServletResponse(); void setServletRequest( HttpServletRequest request ); HttpServletRequest getServletRequest(); List<DelegationListener> getDelegationListeners(); void setDelegationListeners( List<DelegationListener> delegationListeners ); void addDelegationListener( DelegationListener delegationListener ); synchronized void doTopologySortOfSteps(); @Override Map<String, Object> getExtensionDataMap(); static final String REPLAY_DATE_FORMAT; public List<RowSet> rowsets; public int class_nr; static final int TYPE_DISP_1_1; static final int TYPE_DISP_1_N; static final int TYPE_DISP_N_1; static final int TYPE_DISP_N_N; static final int TYPE_DISP_N_M; static final String STRING_FINISHED; static final String STRING_FINISHED_WITH_ERRORS; static final String STRING_RUNNING; static final String STRING_PAUSED; static final String STRING_PREPARING; static final String STRING_INITIALIZING; static final String STRING_WAITING; static final String STRING_STOPPED; static final String STRING_HALTING; static final String CONFIGURATION_IN_EXPORT_FILENAME; }
@Test public void testConfigureParameters() throws Exception { TransMeta transMeta = new TransMeta(); transMeta.addParameterDefinition( TEST_PARAM_NAME, DEFAULT_PARAM_VALUE, "This tests a default parameter" ); assertEquals( "Default parameter was not set correctly on TransMeta", DEFAULT_PARAM_VALUE, transMeta.getParameterDefault( TEST_PARAM_NAME ) ); assertEquals( "Parameter value should be blank in TransMeta", "", transMeta.getParameterValue( TEST_PARAM_NAME ) ); Trans trans = new Trans( transMeta ); assertEquals( "Default parameter was not set correctly on Trans", DEFAULT_PARAM_VALUE, trans.getParameterDefault( TEST_PARAM_NAME ) ); assertEquals( "Parameter value should be blank in Trans", "", trans.getParameterValue( TEST_PARAM_NAME ) ); NamedParams params = new NamedParamsDefault(); params.addParameterDefinition( TEST_PARAM_NAME, NOT_DEFAULT_PARAM_VALUE, "This tests a non-default parameter" ); params.setParameterValue( TEST_PARAM_NAME, NOT_DEFAULT_PARAM_VALUE ); Pan.configureParameters( trans, params, transMeta ); assertEquals( "Parameter was not set correctly in Trans", NOT_DEFAULT_PARAM_VALUE, trans.getParameterValue( TEST_PARAM_NAME ) ); assertEquals( "Parameter was not set correctly in TransMeta", NOT_DEFAULT_PARAM_VALUE, transMeta.getParameterValue( TEST_PARAM_NAME ) ); }
protected static void configureParameters( Trans trans, NamedParams optionParams, TransMeta transMeta ) throws UnknownParamException { trans.initializeVariablesFrom( null ); trans.getTransMeta().setInternalKettleVariables( trans ); String[] transParams = trans.listParameters(); for ( String param : transParams ) { String value = optionParams.getParameterValue( param ); if ( value != null ) { trans.setParameterValue( param, value ); transMeta.setParameterValue( param, value ); } } trans.activateParameters(); }
Pan { protected static void configureParameters( Trans trans, NamedParams optionParams, TransMeta transMeta ) throws UnknownParamException { trans.initializeVariablesFrom( null ); trans.getTransMeta().setInternalKettleVariables( trans ); String[] transParams = trans.listParameters(); for ( String param : transParams ) { String value = optionParams.getParameterValue( param ); if ( value != null ) { trans.setParameterValue( param, value ); transMeta.setParameterValue( param, value ); } } trans.activateParameters(); } }
Pan { protected static void configureParameters( Trans trans, NamedParams optionParams, TransMeta transMeta ) throws UnknownParamException { trans.initializeVariablesFrom( null ); trans.getTransMeta().setInternalKettleVariables( trans ); String[] transParams = trans.listParameters(); for ( String param : transParams ) { String value = optionParams.getParameterValue( param ); if ( value != null ) { trans.setParameterValue( param, value ); transMeta.setParameterValue( param, value ); } } trans.activateParameters(); } }
Pan { protected static void configureParameters( Trans trans, NamedParams optionParams, TransMeta transMeta ) throws UnknownParamException { trans.initializeVariablesFrom( null ); trans.getTransMeta().setInternalKettleVariables( trans ); String[] transParams = trans.listParameters(); for ( String param : transParams ) { String value = optionParams.getParameterValue( param ); if ( value != null ) { trans.setParameterValue( param, value ); transMeta.setParameterValue( param, value ); } } trans.activateParameters(); } static void main( String[] a ); }
Pan { protected static void configureParameters( Trans trans, NamedParams optionParams, TransMeta transMeta ) throws UnknownParamException { trans.initializeVariablesFrom( null ); trans.getTransMeta().setInternalKettleVariables( trans ); String[] transParams = trans.listParameters(); for ( String param : transParams ) { String value = optionParams.getParameterValue( param ); if ( value != null ) { trans.setParameterValue( param, value ); transMeta.setParameterValue( param, value ); } } trans.activateParameters(); } static void main( String[] a ); }
@Test public void testGetInstance() { CompressionPluginType instance = CompressionPluginType.getInstance(); CompressionPluginType instance2 = CompressionPluginType.getInstance(); assertTrue( instance == instance2 ); assertNotNull( instance ); CompressionPluginType.pluginType = null; CompressionPluginType instance3 = CompressionPluginType.getInstance(); assertFalse( instance == instance3 ); }
public static CompressionPluginType getInstance() { if ( pluginType == null ) { pluginType = new CompressionPluginType(); } return pluginType; }
CompressionPluginType extends BasePluginType implements PluginTypeInterface { public static CompressionPluginType getInstance() { if ( pluginType == null ) { pluginType = new CompressionPluginType(); } return pluginType; } }
CompressionPluginType extends BasePluginType implements PluginTypeInterface { public static CompressionPluginType getInstance() { if ( pluginType == null ) { pluginType = new CompressionPluginType(); } return pluginType; } private CompressionPluginType(); }
CompressionPluginType extends BasePluginType implements PluginTypeInterface { public static CompressionPluginType getInstance() { if ( pluginType == null ) { pluginType = new CompressionPluginType(); } return pluginType; } private CompressionPluginType(); static CompressionPluginType getInstance(); @Override void handlePluginAnnotation( Class<?> clazz, Annotation annotation, List<String> libraries, boolean nativePluginType, URL pluginFolder ); String[] getNaturalCategoriesOrder(); }
CompressionPluginType extends BasePluginType implements PluginTypeInterface { public static CompressionPluginType getInstance() { if ( pluginType == null ) { pluginType = new CompressionPluginType(); } return pluginType; } private CompressionPluginType(); static CompressionPluginType getInstance(); @Override void handlePluginAnnotation( Class<?> clazz, Annotation annotation, List<String> libraries, boolean nativePluginType, URL pluginFolder ); String[] getNaturalCategoriesOrder(); }
@Test public void testGetName() { NoneCompressionProvider provider = (NoneCompressionProvider) factory.getCompressionProviderByName( PROVIDER_NAME ); assertNotNull( provider ); assertEquals( PROVIDER_NAME, provider.getName() ); }
@Override public String getName() { return "None"; }
NoneCompressionProvider implements CompressionProvider { @Override public String getName() { return "None"; } }
NoneCompressionProvider implements CompressionProvider { @Override public String getName() { return "None"; } }
NoneCompressionProvider implements CompressionProvider { @Override public String getName() { return "None"; } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
NoneCompressionProvider implements CompressionProvider { @Override public String getName() { return "None"; } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
@Test @SuppressWarnings( "deprecation" ) public void testDoGet() throws Exception { baseCartePlugin.doGet( req, resp ); verify( baseCartePlugin ).service( req, resp ); }
@Deprecated @Override public void doGet( HttpServletRequest req, final HttpServletResponse resp ) throws IOException { service( req, resp ); }
BaseCartePlugin extends BaseHttpServlet implements CartePluginInterface, CarteRequestHandler { @Deprecated @Override public void doGet( HttpServletRequest req, final HttpServletResponse resp ) throws IOException { service( req, resp ); } }
BaseCartePlugin extends BaseHttpServlet implements CartePluginInterface, CarteRequestHandler { @Deprecated @Override public void doGet( HttpServletRequest req, final HttpServletResponse resp ) throws IOException { service( req, resp ); } }
BaseCartePlugin extends BaseHttpServlet implements CartePluginInterface, CarteRequestHandler { @Deprecated @Override public void doGet( HttpServletRequest req, final HttpServletResponse resp ) throws IOException { service( req, resp ); } @Deprecated @Override void doGet( HttpServletRequest req, final HttpServletResponse resp ); @Override abstract void handleRequest( CarteRequest request ); @Override abstract String getContextPath(); String getService(); String toString(); }
BaseCartePlugin extends BaseHttpServlet implements CartePluginInterface, CarteRequestHandler { @Deprecated @Override public void doGet( HttpServletRequest req, final HttpServletResponse resp ) throws IOException { service( req, resp ); } @Deprecated @Override void doGet( HttpServletRequest req, final HttpServletResponse resp ); @Override abstract void handleRequest( CarteRequest request ); @Override abstract String getContextPath(); String getService(); String toString(); }
@Test public void testCreateInputStream() throws IOException { NoneCompressionProvider provider = (NoneCompressionProvider) factory.getCompressionProviderByName( PROVIDER_NAME ); ByteArrayInputStream in = new ByteArrayInputStream( "Test".getBytes() ); NoneCompressionInputStream inStream = new NoneCompressionInputStream( in, provider ); assertNotNull( inStream ); NoneCompressionInputStream ncis = (NoneCompressionInputStream) provider.createInputStream( in ); assertNotNull( ncis ); }
@Override public CompressionInputStream createInputStream( InputStream in ) throws IOException { return new NoneCompressionInputStream( in, this ); }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionInputStream createInputStream( InputStream in ) throws IOException { return new NoneCompressionInputStream( in, this ); } }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionInputStream createInputStream( InputStream in ) throws IOException { return new NoneCompressionInputStream( in, this ); } }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionInputStream createInputStream( InputStream in ) throws IOException { return new NoneCompressionInputStream( in, this ); } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionInputStream createInputStream( InputStream in ) throws IOException { return new NoneCompressionInputStream( in, this ); } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
@Test public void testCreateOutputStream() throws IOException { NoneCompressionProvider provider = (NoneCompressionProvider) factory.getCompressionProviderByName( PROVIDER_NAME ); ByteArrayOutputStream out = new ByteArrayOutputStream(); NoneCompressionOutputStream outStream = new NoneCompressionOutputStream( out, provider ); assertNotNull( outStream ); NoneCompressionOutputStream ncis = (NoneCompressionOutputStream) provider.createOutputStream( out ); assertNotNull( ncis ); }
@Override public CompressionOutputStream createOutputStream( OutputStream out ) throws IOException { return new NoneCompressionOutputStream( out, this ); }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionOutputStream createOutputStream( OutputStream out ) throws IOException { return new NoneCompressionOutputStream( out, this ); } }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionOutputStream createOutputStream( OutputStream out ) throws IOException { return new NoneCompressionOutputStream( out, this ); } }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionOutputStream createOutputStream( OutputStream out ) throws IOException { return new NoneCompressionOutputStream( out, this ); } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
NoneCompressionProvider implements CompressionProvider { @Override public CompressionOutputStream createOutputStream( OutputStream out ) throws IOException { return new NoneCompressionOutputStream( out, this ); } @Override CompressionInputStream createInputStream( InputStream in ); @Override boolean supportsInput(); @Override CompressionOutputStream createOutputStream( OutputStream out ); @Override boolean supportsOutput(); @Override String getDescription(); @Override String getName(); @Override String getDefaultExtension(); }
@Test public void getCompressionProvider() { CompressionProvider provider = outStream.getCompressionProvider(); assertEquals( provider.getName(), PROVIDER_NAME ); }
public CompressionProvider getCompressionProvider() { return compressionProvider; }
CompressionOutputStream extends OutputStream { public CompressionProvider getCompressionProvider() { return compressionProvider; } }
CompressionOutputStream extends OutputStream { public CompressionProvider getCompressionProvider() { return compressionProvider; } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); }
CompressionOutputStream extends OutputStream { public CompressionProvider getCompressionProvider() { return compressionProvider; } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }
CompressionOutputStream extends OutputStream { public CompressionProvider getCompressionProvider() { return compressionProvider; } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }
@Test public void testClose() throws IOException { CompressionProvider provider = outStream.getCompressionProvider(); ByteArrayOutputStream out = new ByteArrayOutputStream(); outStream = new DummyCompressionOS( out, provider ); outStream.close(); }
@Override public void close() throws IOException { delegate.close(); }
CompressionOutputStream extends OutputStream { @Override public void close() throws IOException { delegate.close(); } }
CompressionOutputStream extends OutputStream { @Override public void close() throws IOException { delegate.close(); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); }
CompressionOutputStream extends OutputStream { @Override public void close() throws IOException { delegate.close(); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }
CompressionOutputStream extends OutputStream { @Override public void close() throws IOException { delegate.close(); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }
@Test public void testWrite() throws IOException { CompressionProvider provider = outStream.getCompressionProvider(); ByteArrayOutputStream out = new ByteArrayOutputStream(); outStream = new DummyCompressionOS( out, provider ); outStream.write( "Test".getBytes() ); }
@Override public void write( int b ) throws IOException { delegate.write( b ); }
CompressionOutputStream extends OutputStream { @Override public void write( int b ) throws IOException { delegate.write( b ); } }
CompressionOutputStream extends OutputStream { @Override public void write( int b ) throws IOException { delegate.write( b ); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); }
CompressionOutputStream extends OutputStream { @Override public void write( int b ) throws IOException { delegate.write( b ); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }
CompressionOutputStream extends OutputStream { @Override public void write( int b ) throws IOException { delegate.write( b ); } CompressionOutputStream( OutputStream out, CompressionProvider provider ); private CompressionOutputStream(); CompressionProvider getCompressionProvider(); void addEntry( String filename, String extension ); @Override void close(); @Override void write( int b ); @Override void write( byte[] b ); @Override void write( byte[] b, int off, int len ); }