text
stringlengths 63
450k
|
---|
private void METHOD_1 ( TYPE_1 VAR_1 ) { if ( VAR_1 != null ) { try { VAR_1 . METHOD_2 ( ) ; } catch ( TYPE_2 VAR_2 ) { VAR_3 . error ( STRING_1 , VAR_2 ) ; throw new TYPE_3 ( ) ; } } }
|
private long getTime ( Record record ) throws OnRecordErrorException { Field timeField = record . get ( conf . timeField ) ; if ( timeField . getType ( ) == Field . Type . DATE || timeField . getType ( ) == Field . Type . DATETIME || timeField . getType ( ) == Field . Type . TIME ) { return timeField . getValueAsDatetime ( ) . getTime ( ) ; } else if ( timeField . getType ( ) == Field . Type . LONG ) { return timeField . getValueAsLong ( ) ; } throw new OnRecordErrorException ( Errors . INFLUX_09 , timeField . getType ( ) ) ; }
|
@ Override public void update ( double extrp ) { movement . setDirection ( 3 , 0 ) ; movement . update ( extrp ) ; jump . update ( extrp ) ; body . update ( extrp ) ; transformable . moveLocation ( extrp , body , movement , jump ) ; tileCollidable . update ( extrp ) ; }
|
@ PostConstruct public void start ( ) { if ( started . compareAndSet ( false , true ) ) { refreshExecutor . scheduleWithFixedDelay ( this :: refreshAndStartQueries , 1 , 1 , TimeUnit . MILLISECONDS ) ; } }
|
private void closeOnSuccess ( ) { Timer closeTimer = new Timer ( ) { /** * @see com.google.gwt.user.client.Timer#run() */ @ Override public void run ( ) { CmsReplaceDialog . this . hide ( ) ; } } ; closeTimer . schedule ( 1000 ) ; }
|
@ Override public int e ( Throwable throwable ) { throwable . printStackTrace ( ) ; System . err . println ( throwable . toString ( ) ) ; return 0 ; }
|
ANNOTATION_1 public TYPE_1 METHOD_1 ( ANNOTATION_2 final TYPE_2 VAR_1 ) { final TYPE_3 VAR_2 = VAR_1 . METHOD_2 ( ) ; final String VAR_3 = VAR_2 . METHOD_3 ( ) ; final String VAR_4 = VAR_2 . METHOD_4 ( ) ; final String VAR_5 = VAR_2 . METHOD_5 ( ) ; final String VAR_6 = VAR_2 . METHOD_6 ( ) ; final TYPE_4 VAR_7 = METHOD_7 ( VAR_3 , VAR_4 , VAR_5 , VAR_6 ) ; if ( VAR_7 == null ) { VAR_1 . METHOD_8 ( ) ; return TYPE_1 . NULL ; } final TYPE_5 VAR_8 = VAR_7 . METHOD_9 ( ) ; final TYPE_6 VAR_9 ; try { VAR_9 = VAR_1 . METHOD_10 ( VAR_8 ) ; } catch ( TYPE_7 | TYPE_8 VAR_10 ) { VAR_1 . METHOD_11 ( new TYPE_9 ( VAR_10 ) ) ; return TYPE_1 . NULL ; } final TYPE_10 < String , TYPE_11 > VAR_11 = VAR_9 . METHOD_12 ( ) ; final TYPE_12 < ? > VAR_12 = VAR_9 . METHOD_13 ( ) ; final String VAR_13 = VAR_12 . METHOD_14 ( ) . getName ( ) ; if ( ! VAR_7 . METHOD_15 ( VAR_13 ) ) { VAR_1 . METHOD_16 ( ) ; return TYPE_1 . NULL ; } final TYPE_13 VAR_14 = VAR_7 . getView ( VAR_13 ) ; final TYPE_14 VAR_15 = METHOD_17 ( VAR_14 , VAR_1 . METHOD_18 ( ) ) ; if ( VAR_15 == null ) { VAR_1 . METHOD_19 ( ) ; return TYPE_1 . NULL ; } final boolean VAR_16 = VAR_14 . METHOD_20 ( VAR_15 ) ; final boolean VAR_17 = VAR_16 && VAR_15 . METHOD_21 ( ) == void . class ; if ( VAR_17 ) { METHOD_22 ( VAR_1 , VAR_11 , VAR_12 , VAR_14 ) ; VAR_9 . METHOD_23 ( null ) ; } else if ( VAR_16 ) { VAR_1 . METHOD_24 ( ) ; } final TYPE_15 TYPE_16 = new TYPE_15 ( ) ; TYPE_17 VAR_18 = ( ) - > { if ( ! TYPE_16 . METHOD_25 ( ) ) { if ( ! VAR_17 ) VAR_1 . METHOD_26 ( ) ; return ; } final TYPE_11 result ; TYPE_18 VAR_19 = VAR_1 . METHOD_27 ( TYPE_18 . class ) ; if ( VAR_19 != null ) { TYPE_19 . METHOD_28 ( VAR_19 ) ; } else { TYPE_19 . METHOD_28 ( new TYPE_20 ( ) { ANNOTATION_1 public TYPE_21 METHOD_29 ( ) { return null ; } ANNOTATION_1 public TYPE_22 METHOD_30 ( ) { return VAR_1 . METHOD_30 ( ) ; } } ) ; } try { final TYPE_10 < String , TYPE_11 > VAR_20 = new TYPE_23 < > ( ) ; result = METHOD_31 ( VAR_14 , VAR_15 , VAR_1 , VAR_9 , TYPE_16 , VAR_20 ) ; VAR_11 . METHOD_32 ( VAR_20 ) ; } catch ( TYPE_24 VAR_21 ) { TYPE_25 . METHOD_33 ( STRING_1 , VAR_15 , VAR_6 ) ; if ( ! VAR_17 ) VAR_1 . METHOD_8 ( ) ; return ; } catch ( TYPE_26 VAR_21 ) { TYPE_25 . METHOD_33 ( STRING_2 , VAR_15 , VAR_6 ) ; if ( ! VAR_17 ) VAR_1 . METHOD_8 ( ) ; return ; } catch ( TYPE_27 VAR_21 ) { if ( ! VAR_17 ) VAR_1 . METHOD_26 ( ) ; return ; } catch ( TYPE_28 VAR_22 ) { if ( VAR_17 ) return ; final TYPE_28 VAR_23 ; final TYPE_29 VAR_24 = VAR_22 . METHOD_34 ( ) ; if ( VAR_14 . METHOD_35 ( ) instanceof TYPE_30 && VAR_22 instanceof TYPE_9 && VAR_24 != null ) { if ( ! ( VAR_14 . METHOD_35 ( ) . METHOD_36 ( VAR_24 ) ) ) { VAR_23 = new TYPE_9 ( VAR_22 . METHOD_37 ( ) ) ; } else { VAR_23 = VAR_22 ; } } else { VAR_23 = VAR_22 ; } VAR_1 . METHOD_11 ( VAR_23 ) ; return ; } finally { TYPE_19 . METHOD_38 ( ) ; } if ( ! VAR_17 ) try { METHOD_22 ( VAR_1 , VAR_11 , VAR_12 , VAR_14 ) ; VAR_9 . METHOD_23 ( result ) ; } catch ( TYPE_29 VAR_25 ) { TYPE_31 . METHOD_39 ( VAR_25 , VAR_15 , VAR_6 , VAR_3 , VAR_4 , VAR_5 ) ; } } ; execute ( VAR_1 , VAR_18 , VAR_16 ) ; return TYPE_16 : : METHOD_40 ; }
|
private Map < String , String > getMBeanValues ( MBeanServerConnection cnx , ObjectName on , String ... attributeNames ) throws InstanceNotFoundException , IOException , ReflectionException , IntrospectionException { if ( attributeNames == null ) { MBeanInfo info = cnx . getMBeanInfo ( on ) ; MBeanAttributeInfo [ ] attributeArray = info . getAttributes ( ) ; int i = 0 ; attributeNames = new String [ attributeArray . length ] ; for ( MBeanAttributeInfo ai : attributeArray ) attributeNames [ i ++ ] = ai . getName ( ) ; } AttributeList attributes = cnx . getAttributes ( on , attributeNames ) ; Map < String , String > values = new HashMap < String , String > ( ) ; for ( javax . management . Attribute attribute : attributes . asList ( ) ) { Object value = attribute . getValue ( ) ; values . put ( attribute . getName ( ) , value == null ? "" : value . toString ( ) ) ; } return values ; }
|
@ Override protected void subscribeActual ( SingleObserver < ? super FirebaseUser > observer ) { Listener listener = new Listener ( observer ) ; observer . onSubscribe ( listener ) ; user . unlink ( provider ) . addOnCompleteListener ( listener ) ; }
|
public void METHOD_1 ( final TYPE_1 VAR_1 ) { if ( VAR_2 ) { VAR_3 . METHOD_2 ( STRING_1 + VAR_1 . METHOD_3 ( ) ) ; } final TYPE_2 VAR_4 = VAR_5 . remove ( VAR_1 . METHOD_3 ( ) ) ; if ( VAR_4 != null ) { VAR_4 . METHOD_4 ( ) . METHOD_5 ( VAR_1 . METHOD_3 ( ) ) ; } if ( VAR_3 . METHOD_6 ( ) ) { VAR_3 . METHOD_7 ( STRING_2 + VAR_1 . METHOD_3 ( ) ) ; } }
|
public static void snapPoints ( List < ? extends Line2D . Float > rulings , float xThreshold , float yThreshold ) { // collect points and keep a Line -> p1,p2 map Map < Line2D . Float , Point2D [ ] > linesToPoints = new HashMap <> ( ) ; List < Point2D > points = new ArrayList <> ( ) ; for ( Line2D . Float r : rulings ) { Point2D p1 = r . getP1 ( ) ; Point2D p2 = r . getP2 ( ) ; linesToPoints . put ( r , new Point2D [ ] { p1 , p2 } ) ; points . add ( p1 ) ; points . add ( p2 ) ; } // snap by X Collections . sort ( points , new Comparator < Point2D > ( ) { @ Override public int compare ( Point2D arg0 , Point2D arg1 ) { return java . lang . Double . compare ( arg0 . getX ( ) , arg1 . getX ( ) ) ; } } ) ; List < List < Point2D > > groupedPoints = new ArrayList <> ( ) ; groupedPoints . add ( new ArrayList <> ( Arrays . asList ( new Point2D [ ] { points . get ( 0 ) } ) ) ) ; for ( Point2D p : points . subList ( 1 , points . size ( ) - 1 ) ) { List < Point2D > last = groupedPoints . get ( groupedPoints . size ( ) - 1 ) ; if ( Math . abs ( p . getX ( ) - last . get ( 0 ) . getX ( ) ) < xThreshold ) { groupedPoints . get ( groupedPoints . size ( ) - 1 ) . add ( p ) ; } else { groupedPoints . add ( new ArrayList <> ( Arrays . asList ( new Point2D [ ] { p } ) ) ) ; } } for ( List < Point2D > group : groupedPoints ) { float avgLoc = 0 ; for ( Point2D p : group ) { avgLoc += p . getX ( ) ; } avgLoc /= group . size ( ) ; for ( Point2D p : group ) { p . setLocation ( avgLoc , p . getY ( ) ) ; } } // --- // snap by Y Collections . sort ( points , new Comparator < Point2D > ( ) { @ Override public int compare ( Point2D arg0 , Point2D arg1 ) { return java . lang . Double . compare ( arg0 . getY ( ) , arg1 . getY ( ) ) ; } } ) ; groupedPoints = new ArrayList <> ( ) ; groupedPoints . add ( new ArrayList <> ( Arrays . asList ( new Point2D [ ] { points . get ( 0 ) } ) ) ) ; for ( Point2D p : points . subList ( 1 , points . size ( ) - 1 ) ) { List < Point2D > last = groupedPoints . get ( groupedPoints . size ( ) - 1 ) ; if ( Math . abs ( p . getY ( ) - last . get ( 0 ) . getY ( ) ) < yThreshold ) { groupedPoints . get ( groupedPoints . size ( ) - 1 ) . add ( p ) ; } else { groupedPoints . add ( new ArrayList <> ( Arrays . asList ( new Point2D [ ] { p } ) ) ) ; } } for ( List < Point2D > group : groupedPoints ) { float avgLoc = 0 ; for ( Point2D p : group ) { avgLoc += p . getY ( ) ; } avgLoc /= group . size ( ) ; for ( Point2D p : group ) { p . setLocation ( p . getX ( ) , avgLoc ) ; } } // --- // finally, modify lines for ( Map . Entry < Line2D . Float , Point2D [ ] > ltp : linesToPoints . entrySet ( ) ) { Point2D [ ] p = ltp . getValue ( ) ; ltp . getKey ( ) . setLine ( p [ 0 ] , p [ 1 ] ) ; } }
|
@ Override public DescribeJobsResult describeJobs ( DescribeJobsRequest request ) { request = beforeClientExecution ( request ) ; return executeDescribeJobs ( request ) ; }
|
public void setValue ( TYPE_1 value , int context ) throws TYPE_2 { if ( METHOD_1 ( STRING_1 ) ) { if ( this . value != null ) throw new TYPE_2 ( STRING_2 + name + STRING_3 ) ; if ( value == null ) return ; } if ( type != null && type != TYPE_1 . class && value != null ) { this . value = TYPE_3 . METHOD_2 ( value , type , context == VAR_1 ? TYPE_3 . VAR_2 : TYPE_3 . VAR_3 ) ; value = this . value ; } this . value = value ; if ( this . value == null && context != VAR_1 ) this . value = TYPE_4 . METHOD_3 ( type ) ; if ( VAR_4 != null ) this . value = VAR_4 . METHOD_4 ( this . value , false ) ; }
|
protected void remove ( ) { // remove from scheduler super . removeFromScheduler ( ) ; // detach this timer from the ac final ActivityContext ac = sleeContainer . getActivityContextFactory ( ) . getActivityContext ( data . getActivityContextHandle ( ) ) ; if ( ac != null ) { ac . detachTimer ( data . getTimerID ( ) ) ; } }
|
protected BindingImpl < T > annotatedWithInternal ( Annotation annotation ) { checkNotNull ( annotation , "annotation" ) ; checkNotAnnotated ( ) ; return setBinding ( binding . withKey ( Key . get ( this . binding . getKey ( ) . getTypeLiteral ( ) , annotation ) ) ) ; }
|
private void METHOD_1 ( TYPE_1 < TYPE_2 > VAR_1 , TYPE_3 VAR_2 ) { VAR_1 . remove ( ) ; TYPE_4 type = TYPE_4 . METHOD_2 ( VAR_2 . VAR_3 ) ; String VAR_4 = VAR_2 . VAR_5 ; String VAR_3 = type . VAR_6 ( ) ; if ( VAR_2 . METHOD_3 ( ) != VAR_7 . VAR_8 ) { String VAR_9 = type . VAR_10 ( ) ; VAR_4 = STRING_1 + VAR_9 + VAR_4 . substring ( 1 ) ; } VAR_1 . add ( new TYPE_5 ( VAR_2 . name , VAR_4 , VAR_11 , VAR_3 ) ) ; }
|
public void METHOD_1 ( TYPE_1 VAR_1 , TYPE_2 VAR_2 ) { if ( VAR_1 == null ) { throw new TYPE_3 ( STRING_1 ) ; } try { VAR_2 . METHOD_1 ( VAR_1 . getId ( ) , VAR_3 ) ; VAR_2 . METHOD_1 ( VAR_1 . getName ( ) , VAR_4 ) ; VAR_2 . METHOD_1 ( VAR_1 . getConfig ( ) , VAR_5 ) ; VAR_2 . METHOD_1 ( VAR_1 . METHOD_2 ( ) , VAR_6 ) ; VAR_2 . METHOD_1 ( VAR_1 . METHOD_3 ( ) , VAR_7 ) ; } catch ( TYPE_4 VAR_8 ) { throw new TYPE_3 ( STRING_2 + VAR_8 . METHOD_4 ( ) , VAR_8 ) ; } }
|
private void METHOD_1 ( String VAR_1 , TYPE_1 type ) { TYPE_2 < TYPE_1 > VAR_2 = VAR_3 . get ( VAR_1 ) ; if ( VAR_2 == null ) { VAR_2 = new TYPE_3 < TYPE_1 > ( ) ; VAR_3 . put ( VAR_1 , VAR_2 ) ; } synchronized ( VAR_2 ) { if ( ! VAR_2 . contains ( type ) ) { VAR_2 . add ( type ) ; } } }
|
public static void METHOD_1 ( final TYPE_1 VAR_1 ) { if ( VAR_1 . getName ( ) == null || VAR_1 . getName ( ) . isEmpty ( ) ) { throw new TYPE_2 ( TYPE_3 . status ( TYPE_3 . VAR_2 . VAR_3 ) . METHOD_2 ( STRING_1 ) . build ( ) ) ; } if ( VAR_1 . METHOD_3 ( ) == null || VAR_1 . METHOD_3 ( ) . isEmpty ( ) ) { throw new TYPE_2 ( TYPE_3 . status ( TYPE_3 . VAR_2 . VAR_3 ) . METHOD_2 ( STRING_2 ) . build ( ) ) ; } if ( VAR_1 . METHOD_4 ( ) != null && ! VAR_1 . METHOD_4 ( ) . isEmpty ( ) ) { try { TYPE_4 . METHOD_5 ( VAR_1 . METHOD_4 ( ) ) ; } catch ( TYPE_5 VAR_4 ) { throw new TYPE_2 ( TYPE_3 . status ( TYPE_3 . VAR_2 . VAR_3 ) . METHOD_2 ( STRING_3 ) . build ( ) ) ; } TYPE_4 VAR_5 = TYPE_4 . METHOD_5 ( STRING_4 ) ; if ( VAR_5 . METHOD_6 ( VAR_1 . METHOD_4 ( ) ) . METHOD_7 ( ) ) { throw new TYPE_2 ( TYPE_3 . status ( TYPE_3 . VAR_2 . VAR_3 ) . METHOD_2 ( STRING_3 ) . build ( ) ) ; } } }
|
public void setTimestampIsSet ( boolean value ) { __isset_bitfield = org . apache . thrift . EncodingUtils . setBit ( __isset_bitfield , __TIMESTAMP_ISSET_ID , value ) ; }
|
protected void handleServerErro ( HttpStatus statusCode , ClientHttpResponse response ) throws IOException { throw new HttpServerErrorException ( statusCode , response . getStatusText ( ) , response . getHeaders ( ) , getResponseBody ( response ) , getCharset ( response ) ) ; }
|
public < R1 > Kleisli < W , T , R1 > map ( Function < ? super R , ? extends R1 > mapper ) { return kleisliK ( monad , andThen ( am -> monad . map ( mapper , am ) ) ) ; }
|
public synchronized void commit_one_phase ( ) throws RollbackException , HeuristicHazardException , SystemException { if ( tc . isEntryEnabled ( ) ) Tr . entry ( tc , "commit_one_phase" , this ) ; // Ensure timeout cannot rollback the underlying transaction ( ( DistributableTransaction ) _transaction ) . addAssociation ( ) ; // If the transaction that this object represents has already been completed, // raise an exception if necessary. boolean rolledBack = false ; // Use locals in case of async completion boolean sysException = false ; final int state = _transaction . getTransactionState ( ) . getState ( ) ; switch ( state ) { case TransactionState . STATE_ACTIVE : // We have received a commit one phase request // from our superior. This transaction should // no longer act as a subordinate and should // assume superior status prior to beginning // commit processing - this is enabled by calling // the top-level commit operation. // The commit operation may throw the HeuristicHazard exception. In this case // allow it to go back to the caller. The commit call performs all state checks. try { // Resume the transaction created from the incoming // request so that it is installed on the thread. ( ( EmbeddableTranManagerSet ) TransactionManagerFactory . getTransactionManager ( ) ) . resume ( _transaction ) ; // need to prolongFinish to deal with heuristics // as commit_one_phase always calls notifyCompletion _transaction . prolongFinish ( ) ; _transaction . commit_one_phase ( ) ; _transaction . notifyCompletion ( ) ; } catch ( HeuristicMixedException exc ) { // No FFDC code needed. _heuristic = StatefulResource . HEURISTIC_MIXED ; } catch ( HeuristicHazardException exc ) { // No FFDC code needed. _heuristic = StatefulResource . HEURISTIC_HAZARD ; } catch ( HeuristicRollbackException exc ) { // No FFDC code needed. rolledBack = true ; _transaction . notifyCompletion ( ) ; } catch ( RollbackException exc ) { // No FFDC as rollback is a valid response rolledBack = true ; _transaction . notifyCompletion ( ) ; } catch ( Throwable exc ) { // SecurityException/IllegalStateException/SystemException FFDCFilter . processException ( exc , "com.ibm.tx.remote.TransactionWrapper.commit_one_phase" , "456" , this ) ; Tr . error ( tc , "WTRN0070_ONE_PHASE_COMMIT_FAILED" , exc ) ; sysException = true ; _transaction . notifyCompletion ( ) ; // destroy(); } break ; case TransactionState . STATE_COMMITTING : // We should only get in this state if the superior failed to get a // response on the original commit calls. We can be in committing // state either if we are retrying local resources or in recovery // Check the heuristic state and return that. _heuristic = _transaction . getResources ( ) . getHeuristicOutcome ( ) ; if ( _heuristic != StatefulResource . NONE ) break ; // If we are not in any heuristic state then we are not retrying and // must be in recovery about to perform the commit. // Continue to return transient until we have a real outcome to return. // The superior will consider this as heuristic hazard. Also the // same for LPS state. case TransactionState . STATE_LAST_PARTICIPANT : ( ( DistributableTransaction ) _transaction ) . removeAssociation ( ) ; final TRANSIENT tre = new TRANSIENT ( ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" , tre ) ; throw tre ; case TransactionState . STATE_COMMITTED : // this is probably a retry ... check heuristic state _heuristic = _transaction . getResources ( ) . getHeuristicOutcome ( ) ; break ; case TransactionState . STATE_HEURISTIC_ON_COMMIT : case TransactionState . STATE_HEURISTIC_ON_ROLLBACK : // Should never get in this state as C1P never gets // in this state as it presumes superior status even // for admin heuristic commit break ; case TransactionState . STATE_ROLLING_BACK : case TransactionState . STATE_ROLLED_BACK : // Transaction timed out or admin heuristic rollback or LPS rollback // again, probably a retry ... throw exception rolledBack = true ; break ; case TransactionState . STATE_NONE : // Transaction has completed and is now finished // Normally the remoteable object would be disconnected from the orb, // but ... timing may mean get got here while it was happenning ( ( DistributableTransaction ) _transaction ) . removeAssociation ( ) ; final OBJECT_NOT_EXIST one = new OBJECT_NOT_EXIST ( ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" , one ) ; throw one ; default : Tr . error ( tc , "WTRN0069_COMMIT_BAD_STATE" , TransactionState . stateToString ( state ) ) ; sysException = true ; break ; } // end switch ( ( DistributableTransaction ) _transaction ) . removeAssociation ( ) ; switch ( _heuristic ) { case StatefulResource . NONE : break ; default : // _transaction.addHeuristic(); final HeuristicHazardException hh = new HeuristicHazardException ( ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" , hh ) ; throw hh ; } if ( rolledBack ) { final TRANSACTION_ROLLEDBACK tre = new TRANSACTION_ROLLEDBACK ( 0 , Boolean . TRUE ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" , tre ) ; throw tre ; } else if ( sysException ) { final INTERNAL ie = new INTERNAL ( MinorCode . LOGIC_ERROR , null ) ; if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" , ie ) ; throw ie ; } if ( tc . isEntryEnabled ( ) ) Tr . exit ( tc , "commit_one_phase" ) ; }
|
public OJSONReader readNext ( final char [ ] iUntil , final boolean iInclude ) throws IOException , ParseException { readNext ( iUntil , iInclude , DEFAULT_JUMP ) ; return this ; }
|
int METHOD_1 ( ) { switch ( VAR_1 ) { case VAR_2 : return VAR_3 . VAR_4 ; case VAR_5 : return VAR_3 . VAR_6 ; case VAR_7 : return VAR_3 . VAR_8 ; case VAR_9 : return VAR_3 . VAR_10 ; case VAR_11 : return VAR_3 . VAR_12 ; } throw new TYPE_1 ( STRING_1 ) ; }
|
private TYPE_1 METHOD_1 ( ) throws TYPE_2 { try { return new TYPE_3 ( TYPE_4 . class . METHOD_2 ( STRING_1 ) ) ; } catch ( TYPE_5 VAR_1 ) { throw new TYPE_6 ( VAR_1 . METHOD_3 ( ) ) ; } }
|
private static Set < String > getColumnNames ( List < ? extends ColumnHandle > columnHandles ) { return columnHandles . stream ( ) . map ( column -> ( JmxColumnHandle ) column ) . map ( JmxColumnHandle :: getColumnName ) . collect ( Collectors . toSet ( ) ) ; }
|
private TYPE_1 < TYPE_2 > METHOD_1 ( String str ) { if ( str == null ) { return null ; } String [ ] VAR_1 = TYPE_3 . split ( str ) ; TYPE_1 < TYPE_2 > results = new TYPE_4 < > ( ) ; for ( String VAR_2 : VAR_1 ) { results . add ( new TYPE_5 ( VAR_2 ) ) ; } return results ; }
|
ANNOTATION_1 public void METHOD_1 ( TYPE_1 VAR_1 ) throws TYPE_2 { assert VAR_2 != null ; if ( ! VAR_2 . METHOD_2 ( ) ) { TYPE_3 VAR_3 = new TYPE_3 ( METHOD_3 ( ) , STRING_1 ) ; VAR_2 . METHOD_4 ( VAR_3 ) ; } VAR_2 = null ; }
|
@ Override public SearchResult getUsers ( String pattern , int limit ) throws RegistryException { try { Result result = customUserRegistry . getUsers ( pattern , limit ) ; return new SearchResult ( result . getList ( ) , result . hasMore ( ) ) ; } catch ( Exception e ) { throw new RegistryException ( e . getMessage ( ) , e ) ; } }
|
public static int [ ] removeFromIntArray ( int [ ] a , int value ) { if ( a == null ) { throw new NullPointerException ( "Array was null" ) ; } int index = - 1 ; for ( int i = 0 ; i < a . length ; i ++ ) { if ( a [ i ] == value ) { index = i ; break ; } } if ( index < 0 ) { throw new IllegalArgumentException ( String . format ( "Element %d not found in array" , value ) ) ; } int [ ] array = new int [ a . length - 1 ] ; if ( index > 0 ) { System . arraycopy ( a , 0 , array , 0 , index ) ; } if ( index < a . length ) { System . arraycopy ( a , index + 1 , array , index , array . length - index ) ; } return array ; }
|
public void checkForWellFormedMessageHeader ( final Node node ) { final boolean shouldGenerateInterfaces = Boolean . getBoolean ( JAVA_GENERATE_INTERFACES ) ; final EncodedDataType blockLengthType = ( EncodedDataType ) containedTypeByNameMap . get ( "blockLength" ) ; final EncodedDataType templateIdType = ( EncodedDataType ) containedTypeByNameMap . get ( "templateId" ) ; final EncodedDataType schemaIdType = ( EncodedDataType ) containedTypeByNameMap . get ( "schemaId" ) ; final EncodedDataType versionType = ( EncodedDataType ) containedTypeByNameMap . get ( "version" ) ; if ( blockLengthType == null ) { XmlSchemaParser . handleError ( node , "composite for message header must have \"blockLength\"" ) ; } else if ( ! isUnsigned ( blockLengthType . primitiveType ( ) ) ) { XmlSchemaParser . handleError ( node , "\"blockLength\" must be unsigned" ) ; } validateHeaderField ( node , "blockLength" , blockLengthType , UINT16 , shouldGenerateInterfaces ) ; validateHeaderField ( node , "templateId" , templateIdType , UINT16 , shouldGenerateInterfaces ) ; validateHeaderField ( node , "schemaId" , schemaIdType , UINT16 , shouldGenerateInterfaces ) ; validateHeaderField ( node , "version" , versionType , UINT16 , shouldGenerateInterfaces ) ; }
|
private boolean hasIndexMaps ( List < Value > indexes ) { return ! indexes . isEmpty ( ) && indexes . stream ( ) . map ( Value :: toMap ) . filter ( map -> map . get ( TYPE ) . toString ( ) . endsWith ( ".index" ) ) . count ( ) == indexes . size ( ) ; }
|
public static Exception getLowestException ( final Exception exception , final String exceptionPrefix ) { if ( exception == null ) { return null ; } Throwable nestedException = ( Throwable ) exception ; Exception lastLowException = null ; while ( nestedException != null ) { if ( nestedException . getClass ( ) . toString ( ) . startsWith ( "class " + exceptionPrefix ) ) { lastLowException = ( Exception ) nestedException ; } nestedException = getInnerException ( nestedException ) ; } return lastLowException ; }
|
public final void increment ( GradientEvaluation other ) { gradient . increment ( other . gradient , 1.0 ) ; objectiveValue += other . objectiveValue ; searchErrors += other . searchErrors ; }
|
@ Override public void encodeEnd ( FacesContext context , UIComponent component ) throws IOException { ColumnToggler columnToggler = ( ColumnToggler ) component ; encodeMarkup ( context , columnToggler ) ; encodeScript ( context , columnToggler ) ; }
|
private String extractServletPath ( HttpServletRequest pReq ) { return pReq . getRequestURI ( ) . substring ( 0 , pReq . getContextPath ( ) . length ( ) ) ; }
|
@ Override protected void deserializedNonNullValue ( T o , ObjectInput in ) throws IOException { this . setCharValue ( o , in . readChar ( ) ) ; }
|
private void METHOD_1 ( TYPE_1 VAR_1 ) { TYPE_2 < ? > VAR_2 = VAR_3 . METHOD_2 ( VAR_1 . getId ( ) ) . get ( ) ; TYPE_3 . METHOD_3 ( VAR_2 . METHOD_4 ( ) , STRING_1 , VAR_1 ) ; if ( VAR_1 . METHOD_5 ( ) ) { if ( VAR_1 . METHOD_6 ( ) == VAR_4 . VAR_5 . VAR_6 ) { VAR_7 . remove ( VAR_2 . getId ( ) ) ; } else { VAR_7 . add ( VAR_2 . getId ( ) ) ; } } VAR_2 . METHOD_7 ( ) . METHOD_8 ( VAR_1 ) ; VAR_3 . METHOD_9 ( VAR_2 ) ; }
|
@ Override public void addCookie ( final RequestContext requestContext , final String cookieValue ) { val request = WebUtils . getHttpServletRequestFromExternalWebflowContext ( requestContext ) ; val response = WebUtils . getHttpServletResponseFromExternalWebflowContext ( requestContext ) ; val theCookieValue = this . casCookieValueManager . buildCookieValue ( cookieValue , request ) ; if ( isRememberMeAuthentication ( requestContext ) ) { LOGGER . trace ( "Creating cookie [{}] for remember-me authentication" , getCookieName ( ) ) ; val cookie = createCookie ( theCookieValue ) ; cookie . setMaxAge ( cookieGenerationContext . getRememberMeMaxAge ( ) ) ; cookie . setSecure ( isCookieSecure ( ) ) ; cookie . setHttpOnly ( isCookieHttpOnly ( ) ) ; cookie . setComment ( "CAS Cookie w/ Remember-Me" ) ; response . addCookie ( cookie ) ; } else { LOGGER . trace ( "Creating cookie [{}]" , getCookieName ( ) ) ; super . addCookie ( response , theCookieValue ) ; } }
|
private static void raiseOverflowException ( Number number , Class targetClass ) { throw new IllegalArgumentException ( "Could not convert '" + number + "' of type " + number . getClass ( ) . getName ( ) + " to target class " + targetClass . getName ( ) + ": overflow" ) ; }
|
public static ClassLoader getClassLoader ( Class < ? > clazz ) { ClassLoader loader = Thread . currentThread ( ) . getContextClassLoader ( ) ; if ( loader != null ) { return loader ; } if ( clazz != null ) { loader = clazz . getClassLoader ( ) ; if ( loader != null ) { return loader ; } } return ClassLoader . getSystemClassLoader ( ) ; }
|
ANNOTATION_1 public TYPE_1 < TYPE_2 > METHOD_1 ( ) { TYPE_3 VAR_1 = new TYPE_3 ( ) ; VAR_1 . VAR_2 = METHOD_2 ( ) ; String VAR_2 = VAR_1 . VAR_2 ; if ( ( VAR_2 != null ) && ( VAR_2 . length ( ) == 0 ) ) { VAR_1 . VAR_2 = null ; } VAR_1 . VAR_3 = METHOD_3 ( ) ; VAR_1 . VAR_4 = METHOD_4 ( ) ; VAR_1 . VAR_5 = METHOD_5 ( ) ; VAR_1 . VAR_6 = METHOD_6 ( ) ; VAR_1 . VAR_7 = METHOD_7 ( ) ; String VAR_7 = VAR_1 . VAR_7 ; if ( ( VAR_7 != null ) && ( VAR_7 . length ( ) == 0 ) ) { VAR_1 . VAR_7 = null ; } TYPE_4 VAR_8 = METHOD_8 ( ) ; if ( VAR_8 != null ) { VAR_1 . VAR_8 = VAR_8 . getTime ( ) ; } else { VAR_1 . VAR_8 = VAR_9 . VAR_10 ; } TYPE_4 VAR_11 = METHOD_9 ( ) ; if ( VAR_11 != null ) { VAR_1 . VAR_11 = VAR_11 . getTime ( ) ; } else { VAR_1 . VAR_11 = VAR_9 . VAR_10 ; } VAR_1 . VAR_12 = METHOD_10 ( ) ; VAR_1 . VAR_13 = METHOD_11 ( ) ; VAR_1 . VAR_14 = METHOD_12 ( ) ; String VAR_14 = VAR_1 . VAR_14 ; if ( ( VAR_14 != null ) && ( VAR_14 . length ( ) == 0 ) ) { VAR_1 . VAR_14 = null ; } VAR_1 . VAR_15 = METHOD_13 ( ) ; String VAR_15 = VAR_1 . VAR_15 ; if ( ( VAR_15 != null ) && ( VAR_15 . length ( ) == 0 ) ) { VAR_1 . VAR_15 = null ; } VAR_1 . main = METHOD_14 ( ) ; return VAR_1 ; }
|
public String METHOD_1 ( TYPE_1 VAR_1 ) { String [ ] info = METHOD_2 ( VAR_1 ) ; assert info != null : STRING_1 + VAR_1 ; return info [ 1 ] ; }
|
private void METHOD_1 ( long VAR_1 ) { TYPE_1 . METHOD_2 ( VAR_2 , STRING_1 , VAR_1 ) ; METHOD_3 ( ) . METHOD_4 ( new TYPE_2 ( ) { ANNOTATION_1 protected void execute ( ) { VAR_3 . execute ( new TYPE_3 ( ) { ANNOTATION_1 public void METHOD_5 ( ) { try { TYPE_1 . METHOD_2 ( VAR_2 , STRING_2 ) ; METHOD_6 ( ) ; } catch ( TYPE_4 VAR_4 ) { TYPE_1 . METHOD_7 ( VAR_2 , VAR_4 , STRING_3 ) ; METHOD_8 ( VAR_4 ) ; } } } ) ; } } , VAR_1 ) ; }
|
ANNOTATION_1 protected TYPE_1 METHOD_1 ( final TYPE_2 VAR_1 , final String key , final String value ) { final TYPE_1 VAR_2 = VAR_3 . METHOD_2 ( VAR_4 . METHOD_3 ( ) . METHOD_4 ( VAR_1 . getId ( ) ) . key ( key ) . value ( value ) . METHOD_5 ( VAR_5 . getValue ( ) ) ) ; METHOD_6 ( VAR_2 . METHOD_7 ( ) ) ; return VAR_2 ; }
|
public TYPE_1 METHOD_1 ( ) throws TYPE_2 { TYPE_3 VAR_1 = METHOD_2 ( ) ; if ( VAR_1 == null ) { return VAR_2 . METHOD_1 ( TYPE_4 . getString ( VAR_3 , STRING_1 ) ) ; } return VAR_2 . METHOD_1 ( VAR_1 . METHOD_3 ( ) ) ; }
|
private Class < T > exploreTypeInfo ( ) { List < Type > types = Generics . typeParamImplementations ( getClass ( ) , ActProvider . class ) ; int sz = types . size ( ) ; E . illegalStateIf ( 1 != sz , "generic type number not match" ) ; Type type = types . get ( 0 ) ; E . illegalArgumentIf ( ! ( type instanceof Class ) , "generic type is not a class: %s" , type ) ; return ( Class ) type ; }
|
private void countSpecials ( Matcher matcher ) { if ( matcher instanceof ProxyMatcher ) { proxyMatchers . add ( ( ProxyMatcher ) matcher ) ; } else if ( matcher instanceof VarFramingMatcher ) { varFramingMatchers . add ( ( VarFramingMatcher ) matcher ) ; } else if ( matcher instanceof MemoMismatchesMatcher ) { memoMismatchesMatchers . add ( ( MemoMismatchesMatcher ) matcher ) ; } }
|
public void mapStartsWith ( Map < String , String > map , String startsWith , boolean includeSubProperties ) { for ( Properties props : this . props ) { mapStartsWith ( map , props , startsWith , includeSubProperties ) ; } for ( Properties props : this . defaultProps ) { mapStartsWith ( map , props , startsWith , includeSubProperties ) ; } }
|
ANNOTATION_1 public boolean METHOD_1 ( final String VAR_1 , final TYPE_1 < String , TYPE_2 > VAR_2 ) { if ( METHOD_2 ( VAR_2 ) ) { if ( ! METHOD_3 ( ) ) { return false ; } return METHOD_4 ( VAR_2 ) ; } return super . METHOD_1 ( VAR_1 , VAR_2 ) ; }
|
private void setContext ( String str , int pos ) { setPreContext ( str , pos ) ; setPostContext ( str , pos ) ; }
|
private static List < String > classNameTokensReversed ( Class theClass ) { return C . list ( Keyword . of ( theClass . getSimpleName ( ) ) . tokens ( ) ) . reverse ( ) ; }
|
@ Override public Iterator < SamlProvider > iterator ( ) { return new StandardResourceIterator < SamlProvider > ( impl . iterator ( ) , SamlProviderImpl . CODEC ) ; }
|
protected TYPE_1 execute ( TYPE_2 VAR_1 , String VAR_2 , String VAR_3 ) { TYPE_3 VAR_4 = null ; try { if ( VAR_5 ) { TYPE_4 VAR_6 = new TYPE_4 ( new TYPE_5 ( ) ) ; VAR_4 = new TYPE_3 ( VAR_6 . METHOD_1 ( ) , VAR_7 , VAR_8 ) ; } else { VAR_4 = new TYPE_3 ( VAR_7 , VAR_8 ) ; } if ( VAR_2 != null ) { TYPE_6 VAR_9 = VAR_4 . METHOD_2 ( VAR_2 , VAR_3 ) ; if ( ! VAR_9 . METHOD_3 ( ) . METHOD_4 ( ) ) { VAR_10 . error ( STRING_1 + VAR_9 . METHOD_3 ( ) ) ; } } return VAR_4 . METHOD_5 ( VAR_1 ) ; } catch ( TYPE_7 VAR_11 ) { String message = VAR_11 . METHOD_6 ( ) ; if ( ! message . startsWith ( STRING_2 ) ) { VAR_10 . error ( VAR_11 . METHOD_6 ( ) , VAR_11 ) ; } } catch ( TYPE_8 VAR_12 ) { VAR_10 . error ( VAR_12 . METHOD_6 ( ) , VAR_12 ) ; } finally { if ( null != VAR_4 ) { VAR_4 . METHOD_7 ( ) ; } } return null ; }
|
private static boolean hasTz ( String pattern ) { boolean inQuote = false ; for ( int i = 0 ; i < pattern . length ( ) ; i ++ ) { char ch = pattern . charAt ( i ) ; // If inside quote, except two quote connected, just copy or exit. if ( inQuote ) { if ( ch == ' ' ) { if ( i + 1 < pattern . length ( ) && pattern . charAt ( i + 1 ) == ' ' ) { // Quote appeared twice continuously, interpret as one quote. ++ i ; } else { inQuote = false ; } } continue ; } // Outside quote now. if ( "Zzv" . indexOf ( ch ) >= 0 ) { return true ; } // Two consecutive quotes is a quote literal, inside or outside of quotes. if ( ch == ' ' ) { if ( i + 1 < pattern . length ( ) && pattern . charAt ( i + 1 ) == ' ' ) { i ++ ; } else { inQuote = true ; } } } return false ; }
|
@ Override public void writePrologue ( XMLOutput xmlOutput ) throws IOException { xmlOutput . beginDocument ( ) ; xmlOutput . openTag ( ROOT_ELEMENT_NAME , new XMLAttributeList ( ) . addAttribute ( "version" , analysisVersion ) . addAttribute ( "sequence" , String . valueOf ( getSequenceNumber ( ) ) ) . addAttribute ( "timestamp" , String . valueOf ( getTimestamp ( ) ) ) . addAttribute ( "analysisTimestamp" , String . valueOf ( getAnalysisTimestamp ( ) ) ) . addAttribute ( "release" , getReleaseName ( ) ) ) ; project . writeXML ( xmlOutput , null , this ) ; }
|
public static void start ( final Cache < Serializable , Object > cache ) { PrivilegedAction < Object > action = new PrivilegedAction < Object > ( ) { public Object run ( ) { cache . start ( ) ; return null ; } } ; SecurityHelper . doPrivilegedAction ( action ) ; }
|
ANNOTATION_1 public void METHOD_1 ( String VAR_1 , TYPE_1 value ) throws TYPE_2 { super . METHOD_1 ( VAR_1 , value ) ; TYPE_3 type = value . getType ( ) ; if ( type == TYPE_3 . VAR_2 ) { String VAR_3 = TYPE_4 . METHOD_2 ( value ) . METHOD_3 ( ) ; TYPE_5 VAR_4 = VAR_5 . get ( VAR_3 ) ; if ( VAR_4 == null ) { try { VAR_4 = TYPE_6 . METHOD_4 ( VAR_6 , VAR_3 . toUpperCase ( VAR_7 . VAR_8 ) ) ; } catch ( TYPE_7 VAR_9 ) { throw TYPE_8 . METHOD_5 ( VAR_3 , VAR_1 , VAR_5 . METHOD_6 ( ) ) ; } } if ( ! VAR_10 . contains ( VAR_4 ) ) { throw TYPE_8 . METHOD_5 ( VAR_3 , VAR_1 , VAR_5 . METHOD_6 ( ) ) ; } if ( ! value . VAR_11 ( ) ) { value . set ( VAR_4 . toString ( ) ) ; } } }
|
@ Override public DataBucket combineBuckets ( final DataBucket [ ] pBuckets ) { checkArgument ( pBuckets . length > 0 , "At least one DataBucket must be provided" ) ; // create entire bucket.. final DataBucket returnVal = new DataBucket ( pBuckets [ 0 ] . getBucketKey ( ) , pBuckets [ 0 ] . getLastBucketPointer ( ) ) ; // ...iterate through the datas and check if it is stored.. for ( int i = 0 ; i < pBuckets [ 0 ] . getDatas ( ) . length ; i ++ ) { boolean bucketSkip = false ; // ... form the newest version to the oldest one.. for ( int j = 0 ; ! bucketSkip && j < pBuckets . length ; j ++ ) { // if the data is not set yet but existing in the current version.. if ( pBuckets [ j ] . getData ( i ) != null ) { // ...break out the loop the next time and.. bucketSkip = true ; // ...set it returnVal . setData ( i , pBuckets [ j ] . getData ( i ) ) ; } } } return returnVal ; }
|
public DataSet getTwiceShallowClone ( ) { DataSet clone = shallowClone ( ) ; for ( int i = 0 ; i < clone . size ( ) ; i ++ ) { DataPoint d = getDataPoint ( i ) ; DataPoint sd = new DataPoint ( d . getNumericalValues ( ) , d . getCategoricalValues ( ) , d . getCategoricalData ( ) ) ; clone . setDataPoint ( i , sd ) ; } return clone ; }
|
public Pair < Pair < String , ByteBuffer > , List < Pair < ByteBuffer , AbstractType > > > getUserTypeParameters ( ) throws SyntaxException , ConfigurationException { if ( isEOS ( ) || str . charAt ( idx ) != ' ' ) throw new IllegalStateException ( ) ; ++ idx ; // skipping '(' skipBlankAndComma ( ) ; String keyspace = readNextIdentifier ( ) ; skipBlankAndComma ( ) ; ByteBuffer typeName = fromHex ( readNextIdentifier ( ) ) ; List < Pair < ByteBuffer , AbstractType > > defs = new ArrayList <> ( ) ; while ( skipBlankAndComma ( ) ) { if ( str . charAt ( idx ) == ' ' ) { ++ idx ; return Pair . create ( Pair . create ( keyspace , typeName ) , defs ) ; } ByteBuffer name = fromHex ( readNextIdentifier ( ) ) ; skipBlank ( ) ; if ( str . charAt ( idx ) != ' ' ) throwSyntaxError ( "expecting ':' token" ) ; ++ idx ; skipBlank ( ) ; try { AbstractType type = parse ( ) ; defs . add ( Pair . create ( name , type ) ) ; } catch ( SyntaxException e ) { SyntaxException ex = new SyntaxException ( String . format ( "Exception while parsing '%s' around char %d" , str , idx ) ) ; ex . initCause ( e ) ; throw ex ; } } throw new SyntaxException ( String . format ( "Syntax error parsing '%s' at char %d: unexpected end of string" , str , idx ) ) ; }
|
@ Override public Long zRem ( byte [ ] key , byte [ ] ... values ) { try { if ( isPipelined ( ) ) { pipeline ( new JedisResult ( pipeline . zrem ( key , values ) ) ) ; return null ; } return this . client . zrem ( key , values ) ; } catch ( Exception ex ) { throw convertException ( ex ) ; } }
|
public static void registerVariableDataType ( Class < ? > type , DataTypeFactory dataTypeFactory , String name , Class < ? > valueEditorClass , Class < ? > dataTypeEditorClass ) { variableDataTypeRegistry . registerDataType ( type , dataTypeFactory , name , valueEditorClass , dataTypeEditorClass ) ; }
|
public String METHOD_1 ( String VAR_1 ) { TYPE_1 buf = new TYPE_1 ( super . METHOD_1 ( VAR_1 ) ) ; buf . append ( VAR_2 . METHOD_1 ( VAR_1 + STRING_1 ) ) ; return buf . toString ( ) ; }
|
public TYPE_1 METHOD_1 ( final String VAR_1 ) { METHOD_2 ( ) . METHOD_3 ( VAR_2 . VAR_3 . VAR_4 , VAR_5 . VAR_6 ) ; METHOD_4 ( ) ; try { final TYPE_2 VAR_7 = METHOD_2 ( ) ; final String VAR_8 = String . format ( STRING_1 , name , VAR_1 ) ; VAR_7 . METHOD_5 ( VAR_8 ) ; } finally { METHOD_6 ( ) ; } return this ; }
|
ANNOTATION_1 ( "unchecked" ) static < TYPE_1 > void METHOD_1 ( TYPE_1 VAR_1 , TYPE_2 field , TYPE_3 < String > VAR_2 ) { TYPE_4 VAR_3 = null ; if ( ( field . getType ( ) == TYPE_5 . class ) || ( field . getType ( ) == TYPE_5 . VAR_4 ) ) { VAR_3 = ( byte ) ( VAR_2 . METHOD_2 ( ) & INT_1 ) ; } else if ( ( field . getType ( ) == TYPE_6 . class ) || ( field . getType ( ) == TYPE_6 . VAR_4 ) ) { VAR_3 = VAR_2 . METHOD_3 ( ) ; } else if ( ( field . getType ( ) == TYPE_7 . class ) || ( field . getType ( ) == TYPE_7 . VAR_4 ) ) { VAR_3 = ( short ) ( VAR_2 . METHOD_2 ( ) & INT_1 ) ; } else if ( ( field . getType ( ) == TYPE_8 . class ) || ( field . getType ( ) == TYPE_8 . VAR_4 ) ) { VAR_3 = VAR_2 . METHOD_2 ( ) ; } else if ( ( field . getType ( ) == TYPE_9 . class ) || ( field . getType ( ) == TYPE_9 . VAR_4 ) ) { VAR_3 = VAR_2 . METHOD_4 ( ) ; } else if ( ( field . getType ( ) == TYPE_10 . class ) || ( field . getType ( ) == TYPE_10 . VAR_4 ) ) { VAR_3 = ( float ) VAR_2 . METHOD_5 ( ) ; } else if ( ( field . getType ( ) == TYPE_11 . class ) || ( field . getType ( ) == TYPE_11 . VAR_4 ) ) { VAR_3 = VAR_2 . METHOD_5 ( ) ; } else if ( field . getType ( ) == TYPE_12 . class ) { VAR_3 = VAR_2 . METHOD_6 ( ) ; } else if ( field . getType ( ) == String . VAR_5 ) { VAR_3 = VAR_2 . METHOD_7 ( ) ; } else if ( field . getType ( ) == byte [ ] . class ) { VAR_3 = VAR_2 . METHOD_8 ( ) ; } else if ( field . getType ( ) == TYPE_13 . VAR_5 ) { VAR_3 = VAR_2 . METHOD_9 ( ) ; } else if ( field . getType ( ) . METHOD_10 ( ) ) { VAR_3 = TYPE_14 . METHOD_11 ( ( TYPE_15 < ? extends TYPE_14 > ) field . getType ( ) , VAR_2 . METHOD_7 ( ) ) ; } if ( VAR_3 == null ) { throw new TYPE_16 ( STRING_1 + field . getType ( ) . METHOD_12 ( ) ) ; } try { field . set ( VAR_1 , VAR_3 ) ; } catch ( TYPE_17 VAR_6 ) { throw new TYPE_18 ( VAR_6 ) ; } }
|
@ Override public Tensor forward ( ) { Beliefs b = inf . getOutput ( ) ; n = guessNumWords ( b ) ; y = new Tensor ( s , n , n ) ; for ( int v = 0 ; v < b . varBeliefs . length ; v ++ ) { Var var = b . varBeliefs [ v ] . getVars ( ) . get ( 0 ) ; if ( var instanceof LinkVar ) { LinkVar link = ( LinkVar ) var ; int p = link . getParent ( ) ; int c = link . getChild ( ) ; int pp = EdgeScores . getTensorParent ( p , c ) ; assert p < n && c < n ; // Set the marginal p(e_{p,c} = True). y . set ( b . varBeliefs [ v ] . getValue ( LinkVar . TRUE ) , pp , c ) ; } } return y ; }
|
public synchronized void METHOD_1 ( ) { if ( TYPE_1 . METHOD_2 ( ) && VAR_1 . METHOD_3 ( ) ) TYPE_2 . METHOD_4 ( this , VAR_1 , STRING_1 ) ; if ( VAR_2 ) { VAR_3 = true ; this . METHOD_5 ( ) ; } else { if ( TYPE_1 . METHOD_2 ( ) && VAR_1 . METHOD_6 ( ) ) TYPE_2 . event ( this , VAR_1 , STRING_2 ) ; if ( TYPE_1 . METHOD_2 ( ) && VAR_1 . METHOD_3 ( ) ) TYPE_2 . METHOD_7 ( this , VAR_1 , STRING_1 ) ; throw new TYPE_3 ( STRING_2 ) ; } if ( TYPE_1 . METHOD_2 ( ) && VAR_1 . METHOD_3 ( ) ) TYPE_2 . METHOD_7 ( this , VAR_1 , STRING_1 ) ; }
|
ANNOTATION_1 public TYPE_1 METHOD_1 ( String name ) throws TYPE_2 { TYPE_3 . METHOD_2 ( "name" , name ) ; return TYPE_4 . METHOD_3 ( name , this . VAR_1 , null ) ; }
|
private void parse ( KXmlParser parser , DocumentImpl document , Node node , int endToken ) throws XmlPullParserException , IOException { int token = parser . getEventType ( ) ; /* * The main parsing loop. The precondition is that we are already on the * token to be processed. This holds for each iteration of the loop, so * the inner statements have to ensure that (in particular the recursive * call). */ while ( token != endToken && token != XmlPullParser . END_DOCUMENT ) { if ( token == XmlPullParser . PROCESSING_INSTRUCTION ) { /* * Found a processing instructions. We need to split the token * text at the first whitespace character. */ String text = parser . getText ( ) ; int dot = text . indexOf ( ' ' ) ; String target = ( dot != - 1 ? text . substring ( 0 , dot ) : text ) ; String data = ( dot != - 1 ? text . substring ( dot + 1 ) : "" ) ; node . appendChild ( document . createProcessingInstruction ( target , data ) ) ; } else if ( token == XmlPullParser . DOCDECL ) { String name = parser . getRootElementName ( ) ; String publicId = parser . getPublicId ( ) ; String systemId = parser . getSystemId ( ) ; document . appendChild ( new DocumentTypeImpl ( document , name , publicId , systemId ) ) ; } else if ( token == XmlPullParser . COMMENT ) { /* * Found a comment. We simply take the token text, but we only * create a node if the client wants to see comments at all. */ if ( ! ignoreComments ) { node . appendChild ( document . createComment ( parser . getText ( ) ) ) ; } } else if ( token == XmlPullParser . IGNORABLE_WHITESPACE ) { /* * Found some ignorable whitespace. We only add it if the client * wants to see whitespace. Whitespace before and after the * document element is always ignored. */ if ( ! ignoreElementContentWhitespace && document != node ) { appendText ( document , node , token , parser . getText ( ) ) ; } } else if ( token == XmlPullParser . TEXT || token == XmlPullParser . CDSECT ) { /* * Found a piece of text (possibly encoded as a CDATA section). * That's the easiest case. We simply take it and create a new text node, * or merge with an adjacent text node. */ appendText ( document , node , token , parser . getText ( ) ) ; } else if ( token == XmlPullParser . ENTITY_REF ) { /* * Found an entity reference. If an entity resolver is * installed, we replace it by text (if possible). Otherwise we * add an entity reference node. */ String entity = parser . getName ( ) ; if ( entityResolver != null ) { // TODO Implement this... } String resolved = resolvePredefinedOrCharacterEntity ( entity ) ; if ( resolved != null ) { appendText ( document , node , token , resolved ) ; } else { node . appendChild ( document . createEntityReference ( entity ) ) ; } } else if ( token == XmlPullParser . START_TAG ) { /* * Found an element start tag. We create an element node with * the proper info and attributes. We then invoke parse() * recursively to handle the next level of nesting. When we * return from this call, we check that we are on the proper * element end tag. The whole handling differs somewhat * depending on whether the parser is namespace-aware or not. */ if ( namespaceAware ) { // Collect info for element node String namespace = parser . getNamespace ( ) ; String name = parser . getName ( ) ; String prefix = parser . getPrefix ( ) ; if ( "" . equals ( namespace ) ) { namespace = null ; } // Create element node and wire it correctly Element element = document . createElementNS ( namespace , name ) ; element . setPrefix ( prefix ) ; node . appendChild ( element ) ; for ( int i = 0 ; i < parser . getAttributeCount ( ) ; i ++ ) { // Collect info for a single attribute node String attrNamespace = parser . getAttributeNamespace ( i ) ; String attrPrefix = parser . getAttributePrefix ( i ) ; String attrName = parser . getAttributeName ( i ) ; String attrValue = parser . getAttributeValue ( i ) ; if ( "" . equals ( attrNamespace ) ) { attrNamespace = null ; } // Create attribute node and wire it correctly Attr attr = document . createAttributeNS ( attrNamespace , attrName ) ; attr . setPrefix ( attrPrefix ) ; attr . setValue ( attrValue ) ; element . setAttributeNodeNS ( attr ) ; } // Recursive descent token = parser . nextToken ( ) ; parse ( parser , document , element , XmlPullParser . END_TAG ) ; // Expect the element's end tag here parser . require ( XmlPullParser . END_TAG , namespace , name ) ; } else { // Collect info for element node String name = parser . getName ( ) ; // Create element node and wire it correctly Element element = document . createElement ( name ) ; node . appendChild ( element ) ; for ( int i = 0 ; i < parser . getAttributeCount ( ) ; i ++ ) { // Collect info for a single attribute node String attrName = parser . getAttributeName ( i ) ; String attrValue = parser . getAttributeValue ( i ) ; // Create attribute node and wire it correctly Attr attr = document . createAttribute ( attrName ) ; attr . setValue ( attrValue ) ; element . setAttributeNode ( attr ) ; } // Recursive descent token = parser . nextToken ( ) ; parse ( parser , document , element , XmlPullParser . END_TAG ) ; // Expect the element's end tag here parser . require ( XmlPullParser . END_TAG , "" , name ) ; } } token = parser . nextToken ( ) ; } }
|
public void commit ( ) throws TYPE_1 { VAR_1 . METHOD_1 ( ) ; METHOD_2 ( ) ; try { VAR_2 . commit ( ) ; } catch ( TYPE_2 VAR_3 ) { throw new TYPE_1 ( STRING_1 , VAR_3 ) ; } try { VAR_2 . METHOD_3 ( true ) ; } catch ( TYPE_2 VAR_3 ) { throw new TYPE_1 ( STRING_2 ) ; } }
|
ANNOTATION_1 ( "unchecked" ) public TYPE_1 put ( int key , TYPE_1 value ) { try { if ( value == null ) return remove ( key ) ; int VAR_1 = METHOD_1 ( key ) ; int index = METHOD_2 ( key , VAR_1 ) ; if ( index < 0 ) { size ++ ; METHOD_3 ( ) ; index = METHOD_4 ( key , VAR_1 ) ; keys [ index ] = key ; } TYPE_1 VAR_2 = ( TYPE_1 ) values [ index ] ; values [ index ] = value ; return VAR_2 ; } catch ( TYPE_2 VAR_3 ) { throw new TYPE_2 ( String . VAR_4 ( key ) , VAR_3 ) ; } }
|
public void setAreaTextVisible ( final boolean VISIBLE ) { if ( null == areaTextVisible ) { _areaTextVisible = VISIBLE ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { areaTextVisible . set ( VISIBLE ) ; } }
|
public static TimeOfDay fromMillisOfDay ( long millisOfDay , Chronology chrono ) { chrono = DateTimeUtils . getChronology ( chrono ) ; chrono = chrono . withUTC ( ) ; return new TimeOfDay ( millisOfDay , chrono ) ; }
|
ANNOTATION_1 public TYPE_1 METHOD_1 ( TYPE_2 out , String VAR_1 ) throws TYPE_3 { if ( out == null ) { throw new TYPE_4 ( STRING_1 ) ; } return new TYPE_5 ( METHOD_2 ( out , null , VAR_1 , false ) ) ; }
|
ANNOTATION_1 public synchronized void METHOD_1 ( TYPE_1 ... VAR_1 ) { VAR_1 = TYPE_2 . METHOD_2 ( VAR_1 ) ; TYPE_3 < TYPE_4 < ? > > VAR_2 = TYPE_5 . list ( ) ; for ( TYPE_1 VAR_3 : VAR_1 ) { boolean VAR_4 = METHOD_3 ( VAR_3 ) ; TYPE_4 < ? > VAR_5 = TYPE_6 . METHOD_4 ( VAR_3 ) ; if ( TYPE_7 . METHOD_5 ( VAR_5 ) ) { for ( TYPE_4 < ? > VAR_6 : TYPE_6 . METHOD_6 ( VAR_5 ) ) { METHOD_7 ( VAR_6 , VAR_3 ) ; } if ( VAR_4 ) { TYPE_8 . METHOD_8 ( STRING_1 , STRING_2 , VAR_3 ) ; state . VAR_7 . add ( VAR_5 ) ; if ( ! VAR_5 . METHOD_9 ( ) && ! VAR_5 . METHOD_10 ( ) && ! VAR_5 . METHOD_11 ( ) ) { if ( VAR_5 . METHOD_12 ( TYPE_9 . class ) != null ) { VAR_2 . add ( VAR_5 ) ; } } } else { TYPE_1 VAR_8 = VAR_3 ; TYPE_8 . METHOD_8 ( STRING_3 , STRING_4 , VAR_8 ) ; METHOD_7 ( VAR_5 , VAR_8 ) ; state . VAR_9 . add ( VAR_8 ) ; state . VAR_10 . add ( VAR_8 ) ; } } } for ( TYPE_4 < ? > VAR_5 : VAR_2 ) { METHOD_13 ( VAR_5 ) ; } }
|
public List < FacesConfigManagedBeanType < FacesConfigType < T > > > getAllManagedBean ( ) { List < FacesConfigManagedBeanType < FacesConfigType < T >>> list = new ArrayList < FacesConfigManagedBeanType < FacesConfigType < T > > > ( ) ; List < Node > nodeList = childNode . get ( "managed-bean" ) ; for ( Node node : nodeList ) { FacesConfigManagedBeanType < FacesConfigType < T >> type = new FacesConfigManagedBeanTypeImpl < FacesConfigType < T > > ( this , "managed-bean" , childNode , node ) ; list . add ( type ) ; } return list ; }
|
ANNOTATION_1 public TYPE_1 METHOD_1 ( ANNOTATION_2 ANNOTATION_3 String name ) throws TYPE_2 { TYPE_3 VAR_1 = METHOD_2 ( ) ; TYPE_4 VAR_2 = new TYPE_4 ( VAR_1 . getText ( ) ) ; String VAR_3 = new TYPE_4 ( VAR_2 . METHOD_3 ( ) , name ) . toString ( ) ; TYPE_5 VAR_4 = TYPE_6 . METHOD_4 ( this . METHOD_5 ( ) , VAR_3 ) ; VAR_1 . replace ( VAR_4 . METHOD_2 ( ) ) ; return this ; }
|
public final void addMemberAndGroup ( AnalyzedToken token ) { if ( patternToken . hasAndGroup ( ) ) { List < PatternTokenMatcher > andGroupList = andGroup ; for ( int i = 0 ; i < andGroupList . size ( ) ; i ++ ) { if ( ! andGroupCheck [ i + 1 ] ) { PatternTokenMatcher testAndGroup = andGroupList . get ( i ) ; if ( testAndGroup . isMatched ( token ) ) { andGroupCheck [ i + 1 ] = true ; } } } } }
|
@ Override public < RET extends OCommandRequest > RET setFetchPlan ( final String iFetchPlan ) { command . setFetchPlan ( iFetchPlan ) ; return null ; }
|
@ Override public void serialize ( BserWriter writer ) throws IOException { writer . writeInt ( 1 , this . uid ) ; writer . writeLong ( 2 , this . date ) ; if ( this . deviceType != null ) { writer . writeInt ( 3 , this . deviceType . getValue ( ) ) ; } if ( this . deviceCategory != null ) { writer . writeString ( 4 , this . deviceCategory ) ; } }
|
public Set < RoleRelation > getRoleRelations ( ) throws PropertyException { Set < String > relationNames = getRelationNameList ( ) ; Set < RoleRelation > result = new HashSet < RoleRelation > ( ) ; for ( String relationName : relationNames ) { String propertyValue = props . getProperty ( relationName ) ; if ( propertyValue == null ) throw new PropertyException ( RBACModelProperty . ROLE_RELATION , propertyValue , "Cannot extract role relation property \"" + propertyValue + "\"" ) ; int separatorIndex = propertyValue . lastIndexOf ( "->" ) ; if ( separatorIndex == - 1 || ( relationName . length ( ) == separatorIndex + 1 ) ) throw new PropertyException ( RBACModelProperty . ROLE_RELATION , relationName , "Corrupted property file (invalid role relation value)" ) ; String dominatingRole = null ; String dominatedRole = null ; try { dominatingRole = propertyValue . substring ( 0 , separatorIndex ) ; dominatedRole = propertyValue . substring ( separatorIndex + 2 ) ; } catch ( Exception e ) { throw new PropertyException ( RBACModelProperty . ROLE_RELATION , relationName , "Corrupted property file (invalid role relation value)" ) ; } result . add ( new RoleRelation ( dominatingRole , dominatedRole ) ) ; } return result ; }
|
@ Override public void dryRun ( ) { crossReferencing . dryRun ( ) ; if ( multiThreading ) { delayedMethods . add ( new DelayedMethod ( dryRun ) ) ; } else { delegate . dryRun ( ) ; } }
|
@ Override public void createInitialLayout ( IPageLayout layout ) { final IFolderLayout consoleFolder = layout . createFolder ( IInternalDebugUIConstants . ID_CONSOLE_FOLDER_VIEW , IPageLayout . BOTTOM , BOTTOM_PANEL_RATIO , layout . getEditorArea ( ) ) ; consoleFolder . addView ( IConsoleConstants . ID_CONSOLE_VIEW ) ; consoleFolder . addView ( IPageLayout . ID_TASK_LIST ) ; consoleFolder . addPlaceholder ( IPageLayout . ID_BOOKMARKS ) ; consoleFolder . addPlaceholder ( IPageLayout . ID_PROP_SHEET ) ; final IFolderLayout navFolder = layout . createFolder ( IInternalDebugUIConstants . ID_NAVIGATOR_FOLDER_VIEW , IPageLayout . TOP , NAVIGATION_PANEL_RATIO , layout . getEditorArea ( ) ) ; navFolder . addView ( IDebugUIConstants . ID_DEBUG_VIEW ) ; navFolder . addPlaceholder ( IPageLayout . ID_PROJECT_EXPLORER ) ; final IFolderLayout toolsFolder = layout . createFolder ( IInternalDebugUIConstants . ID_TOOLS_FOLDER_VIEW , IPageLayout . RIGHT , TOOL_PANEL_RATIO , IInternalDebugUIConstants . ID_NAVIGATOR_FOLDER_VIEW ) ; toolsFolder . addView ( IDebugUIConstants . ID_VARIABLE_VIEW ) ; toolsFolder . addView ( IDebugUIConstants . ID_BREAKPOINT_VIEW ) ; toolsFolder . addPlaceholder ( IDebugUIConstants . ID_EXPRESSION_VIEW ) ; toolsFolder . addPlaceholder ( IDebugUIConstants . ID_REGISTER_VIEW ) ; final IFolderLayout outlineFolder = layout . createFolder ( IInternalDebugUIConstants . ID_OUTLINE_FOLDER_VIEW , IPageLayout . RIGHT , OUTLINE_PANEL_RATIO , layout . getEditorArea ( ) ) ; outlineFolder . addView ( IPageLayout . ID_OUTLINE ) ; layout . addActionSet ( IDebugUIConstants . LAUNCH_ACTION_SET ) ; layout . addActionSet ( IDebugUIConstants . DEBUG_ACTION_SET ) ; // Set the view shortcuts layout . addShowViewShortcut ( IDebugUIConstants . ID_DEBUG_VIEW ) ; layout . addShowViewShortcut ( IDebugUIConstants . ID_VARIABLE_VIEW ) ; layout . addShowViewShortcut ( IDebugUIConstants . ID_BREAKPOINT_VIEW ) ; layout . addShowViewShortcut ( IDebugUIConstants . ID_EXPRESSION_VIEW ) ; layout . addShowViewShortcut ( IPageLayout . ID_OUTLINE ) ; layout . addShowViewShortcut ( IConsoleConstants . ID_CONSOLE_VIEW ) ; layout . addShowViewShortcut ( IPageLayout . ID_TASK_LIST ) ; // 'Window' > 'Open Perspective' contributions //--- Add the SARL debug perspective layout . addPerspectiveShortcut ( SARLEclipseConfig . ID_SARL_PERSPECTIVE ) ; }
|
public SerializationConfig setClassDefinitions ( Set < ClassDefinition > classDefinitions ) { isNotNull ( classDefinitions , "classDefinitions" ) ; this . classDefinitions . clear ( ) ; this . classDefinitions . addAll ( classDefinitions ) ; return this ; }
|
@ Override public boolean add ( String e ) { Object o = map . put ( e , null ) ; return o != null ; }
|
private void mergePropertyDescriptorWithSuperClass ( PojoPropertyDescriptorImpl propertyDescriptor , PojoPropertyDescriptorImpl superPropertyDescriptor ) { for ( PojoPropertyAccessor superPropertyAccessor : superPropertyDescriptor . getAccessors ( ) ) { PojoPropertyAccessor propertyAccessor = propertyDescriptor . getAccessor ( superPropertyAccessor . getMode ( ) ) ; if ( propertyAccessor == null ) { propertyDescriptor . putAccessor ( superPropertyAccessor ) ; } } Field field = superPropertyDescriptor . getField ( ) ; if ( ( field != null ) && ( propertyDescriptor . getField ( ) == null ) ) { propertyDescriptor . setField ( field ) ; } }
|
@ io . grpc . ExperimentalApi ( "https://github.com/grpc/grpc-java/issues/1901" ) public static io . grpc . MethodDescriptor < com . google . cloud . automl . v1beta1 . ListTableSpecsRequest , com . google . cloud . automl . v1beta1 . ListTableSpecsResponse > getListTableSpecsMethod ( ) { return getListTableSpecsMethodHelper ( ) ; }
|
@ InternalFunction ( operator = "!" , precedence = 22 ) public Boolean logicalNot ( Boolean value ) { if ( value == null ) { return Boolean . TRUE ; } return ! value ; }
|
private < TYPE_1 > TYPE_2 METHOD_1 ( String VAR_1 , TYPE_3 < String , TYPE_2 > context , TYPE_4 < TYPE_1 > VAR_2 ) throws TYPE_5 { return null ; }
|
public static boolean isEmailValid ( String email ) { if ( email == null ) { return false ; } Pattern p = Pattern . compile ( ".+@.+\\.[a-z]+" ) ; Matcher m = p . matcher ( email ) ; return m . matches ( ) ; }
|
public void METHOD_1 ( int VAR_1 , String ... VAR_2 ) { VAR_3 . put ( VAR_1 , VAR_2 ) ; for ( String VAR_4 : VAR_2 ) { VAR_5 . put ( VAR_4 , VAR_1 ) ; } }
|
public String compileClasses ( File [ ] sourceFiles , String classpath ) throws JRException { String [ ] source = new String [ sourceFiles . length + 4 ] ; for ( int i = 0 ; i < sourceFiles . length ; i ++ ) { source [ i ] = sourceFiles [ i ] . getPath ( ) ; } source [ sourceFiles . length ] = "-classpath" ; source [ sourceFiles . length + 1 ] = classpath ; source [ sourceFiles . length + 2 ] = "-encoding" ; source [ sourceFiles . length + 3 ] = System . getProperty ( "file.encoding" ) ; String errors = null ; try { Class clazz = JRClassLoader . loadClassForName ( "com.sun.tools.javac.Main" ) ; Object compiler = clazz . newInstance ( ) ; try { Method compileMethod = clazz . getMethod ( "compile" , String [ ] . class , PrintWriter . class ) ; ByteArrayOutputStream baos = new ByteArrayOutputStream ( ) ; int result = ( Integer ) compileMethod . invoke ( compiler , source , new PrintWriter ( baos ) ) ; if ( result != MODERN_COMPILER_SUCCESS ) { errors = baos . toString ( ) ; } } catch ( NoSuchMethodException ex ) { Method compileMethod = clazz . getMethod ( "compile" , String [ ] . class ) ; int result = ( Integer ) compileMethod . invoke ( compiler , new Object [ ] { source } ) ; if ( result != MODERN_COMPILER_SUCCESS ) { errors = "See error messages above." ; } } } catch ( Exception e ) { StringBuilder files = new StringBuilder ( ) ; for ( File sourceFile : sourceFiles ) { files . append ( sourceFile . getPath ( ) ) ; files . append ( ' ' ) ; } throw new JRException ( "Error compiling report java source files : " + files , e ) ; } return errors ; }
|
public static PathNode createPathNode ( CharSequence path , boolean existsCheck , boolean shouldExists ) { return new PathNode ( path , existsCheck , shouldExists ) ; }
|
public Field getFirstUniqueField ( ) { Field returnValue = null ; for ( Entry < String , Field > entry : getAllFields ( ) . entrySet ( ) ) { if ( isUnique ( entry . getKey ( ) ) ) { returnValue = entry . getValue ( ) ; break ; } } return returnValue ; }
|
protected void doSubscribe ( SocializeSession session , Entity entity , NotificationType type , SubscriptionListener listener , boolean subscribed ) { Subscription c = new Subscription ( ) ; c . setUser ( session . getUser ( ) ) ; c . setEntity ( entity ) ; c . setSubscribed ( subscribed ) ; c . setNotificationType ( type ) ; List < Subscription > list = new ArrayList < Subscription > ( 1 ) ; list . add ( c ) ; postAsync ( session , ENDPOINT , list , listener ) ; }
|
public static void METHOD_1 ( String name , TYPE_1 VAR_1 , TYPE_2 VAR_2 ) { int num = VAR_2 . METHOD_2 ( VAR_3 , 0 ) + 1 ; VAR_2 . METHOD_3 ( VAR_3 , num ) ; VAR_2 . METHOD_4 ( VAR_4 + num , name ) ; VAR_2 . METHOD_4 ( VAR_5 + num , VAR_1 . filePath ) ; VAR_2 . METHOD_5 ( VAR_6 + num , VAR_1 . VAR_7 || new TYPE_3 ( VAR_1 . filePath ) . METHOD_6 ( ) ) ; VAR_2 . METHOD_5 ( VAR_8 + num , VAR_1 . VAR_9 || new TYPE_3 ( VAR_1 . filePath ) . METHOD_7 ( ) ) ; if ( VAR_1 . VAR_10 != null ) { VAR_2 . METHOD_8 ( VAR_11 + num , VAR_1 . VAR_10 ) ; } }
|
public synchronized void start ( ) { if ( future == null ) { Runnable task = new Runnable ( ) { public void run ( ) { try { accumulator . publish ( ) ; } catch ( Exception e ) { handleException ( e ) ; } } } ; future = getExecutor ( ) . scheduleWithFixedDelay ( task , delayMillis , delayMillis , TimeUnit . MILLISECONDS ) ; } }
|
@ Override public java . util . concurrent . Future < CancelCommandResult > cancelCommandAsync ( CancelCommandRequest request , com . amazonaws . handlers . AsyncHandler < CancelCommandRequest , CancelCommandResult > asyncHandler ) { throw new java . lang . UnsupportedOperationException ( ) ; }
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.