signature
stringlengths 43
39.1k
| implementation
stringlengths 0
450k
|
---|---|
public class ExampleImageStitching { /** * Using abstracted code , find a transform which minimizes the difference between corresponding features
* in both images . This code is completely model independent and is the core algorithms . */
public static < T extends ImageGray < T > , FD extends TupleDesc > Homography2D_F64 computeTransform ( T imageA , T imageB , DetectDescribePoint < T , FD > detDesc , AssociateDescription < FD > associate , ModelMatcher < Homography2D_F64 , AssociatedPair > modelMatcher ) { } } | // get the length of the description
List < Point2D_F64 > pointsA = new ArrayList < > ( ) ; FastQueue < FD > descA = UtilFeature . createQueue ( detDesc , 100 ) ; List < Point2D_F64 > pointsB = new ArrayList < > ( ) ; FastQueue < FD > descB = UtilFeature . createQueue ( detDesc , 100 ) ; // extract feature locations and descriptions from each image
describeImage ( imageA , detDesc , pointsA , descA ) ; describeImage ( imageB , detDesc , pointsB , descB ) ; // Associate features between the two images
associate . setSource ( descA ) ; associate . setDestination ( descB ) ; associate . associate ( ) ; // create a list of AssociatedPairs that tell the model matcher how a feature moved
FastQueue < AssociatedIndex > matches = associate . getMatches ( ) ; List < AssociatedPair > pairs = new ArrayList < > ( ) ; for ( int i = 0 ; i < matches . size ( ) ; i ++ ) { AssociatedIndex match = matches . get ( i ) ; Point2D_F64 a = pointsA . get ( match . src ) ; Point2D_F64 b = pointsB . get ( match . dst ) ; pairs . add ( new AssociatedPair ( a , b , false ) ) ; } // find the best fit model to describe the change between these images
if ( ! modelMatcher . process ( pairs ) ) throw new RuntimeException ( "Model Matcher failed!" ) ; // return the found image transform
return modelMatcher . getModelParameters ( ) . copy ( ) ; |
public class MarkerComparator { /** * Compare two { @ link Marker } objects under the { @ link PartitionStrategy } .
* All comparisons are with respect to the partition ordering defined by
* this comparator ' s { @ code PartitionStrategy } . Under a
* { @ code PartitionStrategy } , a { @ code Marker } contains a set of one or
* more partitions . A { @ code Marker } is strictly less than another if all of
* the partitions it contains are less than the partitions of the other .
* Similarly , if all partitions are greater than the partitions of the other ,
* then the { @ code Marker } is greater . Two { @ code Markers } are equal if they
* contain the same set of partitions .
* This method implements strictly exclusive comparison : if either
* { @ code Marker } contains the other , then this throws
* { @ code IllegalStateException } . This is because there is at least one
* partition in the containing { @ code Marker } that is less than or equal to
* all partitions in the contained { @ code Marker } and at least one partition
* that is greater than or equal to all partitions in the contained
* { @ code Marker } .
* Alternatively , the comparison methods { @ link # leftCompare ( Marker , Marker ) }
* and { @ link # rightCompare ( Marker , Marker ) } consider contained { @ code Marker }
* objects to be greater - than and less - than respectively .
* Note : Because { @ code Marker } objects are hierarchical , they are either
* completely disjoint or one marker contains the other . If one contains the
* other and the two are not equal , this method throws
* { @ code IllegalStateException } .
* TODO : catch wildcard to concrete comparisons and throw an Exception
* @ param m1 a { @ code Marker }
* @ param m2 a { @ code Marker }
* @ return
* - 1 If all partitions in m1 are less than the partitions in m2
* 0 If m1 and m2 contain the same set of partitions
* 1 If all partitions of m1 are greater than the partitions in m2
* @ throws IllegalStateException
* If either { @ code Marker } is a proper subset of the other
* @ see MarkerComparator # leftCompare ( Marker , Marker )
* @ see MarkerComparator # rightCompare ( Marker , Marker )
* @ since 0.9.0 */
@ Override @ SuppressWarnings ( "unchecked" ) public int compare ( Marker m1 , Marker m2 ) { } } | for ( FieldPartitioner field : Accessor . getDefault ( ) . getFieldPartitioners ( strategy ) ) { Object m1Value = m1 . valueFor ( field ) ; Object m2Value = m2 . valueFor ( field ) ; // if either is null , but not both , then they are Incomparable
if ( m1Value == null ) { if ( m2Value != null ) { // m1 contains m2
throw new IllegalStateException ( "Incomparable" ) ; } } else if ( m2Value == null ) { // m2 contains m1
throw new IllegalStateException ( "Incomparable" ) ; } else { int cmp = field . compare ( m1Value , m2Value ) ; if ( cmp != 0 ) { return cmp ; } } } return 0 ; |
public class AbstractSpringActivator { /** * { @ inheritDoc } */
@ Override protected void destroy ( ) throws Exception { } } | try { destroyApplicationContext ( ) ; } catch ( Exception e ) { LOGGER . error ( e . getMessage ( ) , e ) ; } |
public class HBaseSchemaManager { /** * drop schema method drop the table */
public void dropSchema ( ) { } } | if ( operation != null && operation . equalsIgnoreCase ( "create-drop" ) ) { try { HTableDescriptor hTableDescriptor = null ; if ( admin . isTableAvailable ( databaseName ) ) { if ( admin . isTableEnabled ( databaseName ) ) { hTableDescriptor = admin . getTableDescriptor ( databaseName . getBytes ( ) ) ; admin . disableTable ( databaseName ) ; } for ( TableInfo tableInfo : tableInfos ) { if ( tableInfo != null && tableInfo . getTableName ( ) != null && hTableDescriptor . getFamily ( tableInfo . getTableName ( ) . getBytes ( ) ) != null ) { admin . deleteColumn ( databaseName , tableInfo . getTableName ( ) ) ; } } } } catch ( TableNotFoundException tnfe ) { logger . error ( "Table doesn't exist, Caused by " , tnfe ) ; throw new SchemaGenerationException ( tnfe , "Hbase" ) ; } catch ( IOException ioe ) { logger . error ( "Table isn't in enabled state, Caused by" , ioe ) ; throw new SchemaGenerationException ( ioe , "Hbase" ) ; } finally { try { admin . enableTable ( databaseName ) ; } catch ( IOException ioe ) { logger . error ( "Table isn't in enabled state, Caused by" , ioe ) ; throw new SchemaGenerationException ( ioe , "Hbase" ) ; } } } operation = null ; admin = null ; |
public class CmsSearchFieldConfiguration { /** * Extends the given document by a field that contains the extracted content blob . < p >
* @ param document the document to extend
* @ param cms the OpenCms context used for building the search index
* @ param resource the resource that is indexed
* @ param extractionResult the plain text extraction result from the resource
* @ param properties the list of all properties directly attached to the resource ( not searched )
* @ param propertiesSearched the list of all searched properties of the resource
* @ return the document extended by a field that contains the extracted content blob */
protected I_CmsSearchDocument appendContentBlob ( I_CmsSearchDocument document , CmsObject cms , CmsResource resource , I_CmsExtractionResult extractionResult , List < CmsProperty > properties , List < CmsProperty > propertiesSearched ) { } } | if ( extractionResult != null ) { byte [ ] data = extractionResult . getBytes ( ) ; if ( data != null ) { document . addContentField ( data ) ; } } return document ; |
public class KeyStoreFactory { /** * Save X509Certificate in KeyStore file . */
private static KeyStore saveCertificateAsKeyStore ( KeyStore existingKeyStore , boolean deleteOnExit , String keyStoreFileName , String certificationAlias , Key privateKey , char [ ] keyStorePassword , Certificate [ ] chain , X509Certificate caCert ) { } } | try { KeyStore keyStore = existingKeyStore ; if ( keyStore == null ) { // create new key store
keyStore = KeyStore . getInstance ( KeyStore . getDefaultType ( ) ) ; keyStore . load ( null , keyStorePassword ) ; } // add certificate
try { keyStore . deleteEntry ( certificationAlias ) ; } catch ( KeyStoreException kse ) { // ignore as may not exist in keystore yet
} keyStore . setKeyEntry ( certificationAlias , privateKey , keyStorePassword , chain ) ; // add CA certificate
try { keyStore . deleteEntry ( KEY_STORE_CA_ALIAS ) ; } catch ( KeyStoreException kse ) { // ignore as may not exist in keystore yet
} keyStore . setCertificateEntry ( KEY_STORE_CA_ALIAS , caCert ) ; // save as JKS file
String keyStoreFileAbsolutePath = new File ( keyStoreFileName ) . getAbsolutePath ( ) ; try ( FileOutputStream fileOutputStream = new FileOutputStream ( keyStoreFileAbsolutePath ) ) { keyStore . store ( fileOutputStream , keyStorePassword ) ; MOCK_SERVER_LOGGER . trace ( "Saving key store to file [" + keyStoreFileAbsolutePath + "]" ) ; } if ( deleteOnExit ) { new File ( keyStoreFileAbsolutePath ) . deleteOnExit ( ) ; } return keyStore ; } catch ( Exception e ) { throw new RuntimeException ( "Exception while saving KeyStore" , e ) ; } |
public class JdbcTypesHelper { /** * Returns a string representation of the given { @ link java . sql . Types } value . */
public static String getSqlTypeAsString ( int jdbcType ) { } } | String statusName = "*can't find String representation for sql type '" + jdbcType + "'*" ; try { Field [ ] fields = Types . class . getDeclaredFields ( ) ; for ( int i = 0 ; i < fields . length ; i ++ ) { if ( fields [ i ] . getInt ( null ) == jdbcType ) { statusName = fields [ i ] . getName ( ) ; break ; } } } catch ( Exception ignore ) { // ignore it
} return statusName ; |
public class ApiOvhMe { /** * Get this object properties
* REST : GET / me / installationTemplate / { templateName } / partitionScheme / { schemeName }
* @ param templateName [ required ] This template name
* @ param schemeName [ required ] name of this partitioning scheme */
public OvhTemplatePartitioningSchemes installationTemplate_templateName_partitionScheme_schemeName_GET ( String templateName , String schemeName ) throws IOException { } } | String qPath = "/me/installationTemplate/{templateName}/partitionScheme/{schemeName}" ; StringBuilder sb = path ( qPath , templateName , schemeName ) ; String resp = exec ( qPath , "GET" , sb . toString ( ) , null ) ; return convertTo ( resp , OvhTemplatePartitioningSchemes . class ) ; |
public class PackageManagerUtils { /** * Checks if the device has a device administrative feature .
* @ param manager the package manager .
* @ return { @ code true } if the device has a device administrative feature . */
@ TargetApi ( Build . VERSION_CODES . KITKAT ) public static boolean hasDeviceAdminFeature ( PackageManager manager ) { } } | return manager . hasSystemFeature ( PackageManager . FEATURE_DEVICE_ADMIN ) ; |
public class CurvedArrow { /** * Returns the bounds .
* @ return the rectangular bounds for this curved arrow */
public Rectangle2D getBounds ( ) { } } | if ( needsRefresh ) refreshCurve ( ) ; Rectangle2D b = curve . getBounds ( ) ; Area area = new Area ( bounds ) ; area . transform ( affineToText ) ; b . add ( area . getBounds ( ) ) ; return b ; |
public class NetworkSadnessTransformer { /** * Possibly duplicate and delay by some random amount . */
void transformAndQueue ( T event , long systemCurrentTimeMillis ) { } } | // if you ' re super unlucky , this blows up the stack
if ( rand . nextDouble ( ) < 0.05 ) { // duplicate this message ( note recursion means maybe more than duped )
transformAndQueue ( event , systemCurrentTimeMillis ) ; } long delayms = nextZipfDelay ( ) ; delayed . add ( systemCurrentTimeMillis + delayms , event ) ; |
public class Gauge { /** * Defines the color that will be used to colorize the average
* indicator of the gauge .
* @ param COLOR */
public void setAverageColor ( final Color COLOR ) { } } | if ( null == averageColor ) { _averageColor = COLOR ; fireUpdateEvent ( REDRAW_EVENT ) ; } else { averageColor . set ( COLOR ) ; } |
public class EmbeddedGobblin { /** * Use a { @ link org . apache . gobblin . runtime . api . GobblinInstancePlugin } identified by name . */
public EmbeddedGobblin usePlugin ( String pluginAlias ) throws ClassNotFoundException , IllegalAccessException , InstantiationException { } } | return usePlugin ( GobblinInstancePluginUtils . instantiatePluginByAlias ( pluginAlias ) ) ; |
public class AbstractProtoRealization { /** * / * ( non - Javadoc )
* @ see com . ibm . ws . sib . processor . impl . interfaces . DestinationHandler # registerControlAdapters ( ) */
public void registerControlAdapters ( ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "registerControlAdapters" ) ; // do nothing
if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "registerControlAdapters" ) ; |
public class DateRangeQuery { /** * Sets the lower boundary of the range , inclusive or not depending on the second parameter .
* Works with a { @ link Date } object , which is converted to RFC 3339 format using
* { @ link SearchUtils # toFtsUtcString ( Date ) } , so you shouldn ' t use a non - default { @ link # dateTimeParser ( String ) }
* after that . */
public DateRangeQuery start ( Date start , boolean inclusive ) { } } | this . start = SearchUtils . toFtsUtcString ( start ) ; this . inclusiveStart = inclusive ; return this ; |
public class SchemeInformationBuilder { /** * { @ inheritDoc } */
@ Override public SchemeInformation buildObject ( String namespaceURI , String localName , String namespacePrefix ) { } } | return new SchemeInformationImpl ( namespaceURI , localName , namespacePrefix ) ; |
public class AbstractLinearClassifierFactory { /** * Takes a { @ link Collection } of { @ link Datum } objects and gives you back a
* { @ link Classifier } trained on it .
* @ param examples { @ link Collection } of { @ link Datum } objects to train the
* classifier on
* @ return A { @ link Classifier } trained on it . */
public LinearClassifier < L , F > trainClassifier ( Collection < Datum < L , F > > examples ) { } } | Dataset < L , F > dataset = new Dataset < L , F > ( ) ; dataset . addAll ( examples ) ; return trainClassifier ( dataset ) ; |
public class LogMetadata { /** * Gets the LedgerMetadata for the ledger with given ledger Id .
* @ param ledgerId The Ledger Id to search .
* @ return The sought LedgerMetadata , or null if not found . */
LedgerMetadata getLedger ( long ledgerId ) { } } | int index = getLedgerMetadataIndex ( ledgerId ) ; if ( index >= 0 ) { return this . ledgers . get ( index ) ; } return null ; |
public class ClassGraph { /** * Blacklist classpath elements based on resource paths . Classpath elements that contain resources with paths
* matching the blacklist will not be scanned .
* @ param resourcePaths
* The resource paths which cause a classpath not to be scanned if any are present in a classpath
* element for the classpath element . May contain a wildcard glob ( { @ code ' * ' } ) .
* @ return this ( for method chaining ) . */
public ClassGraph blacklistClasspathElementsContainingResourcePath ( final String ... resourcePaths ) { } } | for ( final String resourcePath : resourcePaths ) { final String resourcePathNormalized = WhiteBlackList . normalizePath ( resourcePath ) ; scanSpec . classpathElementResourcePathWhiteBlackList . addToBlacklist ( resourcePathNormalized ) ; } return this ; |
public class RpcRequest { /** * ( non - Javadoc )
* @ see
* com . emc . ecs . nfsclient . nfs . NfsRequest # marshalling ( com . emc . ecs . nfsclient .
* rpc . Xdr ) */
public void marshalling ( Xdr xdr ) { } } | xdr . setXid ( _xid ) ; xdr . putInt ( _xid ) ; xdr . putInt ( _direction ) ; xdr . putInt ( _rpcVersion ) ; xdr . putInt ( _serviceProgram ) ; xdr . putInt ( _serviceVersion ) ; xdr . putInt ( _serviceProcedure ) ; _credential . marshalling ( xdr ) ; |
public class Traverson { /** * Follow the { @ link Link } s of the current resource , selected by its link - relation type and returns a { @ link Stream }
* containing the returned { @ link HalRepresentation HalRepresentations } .
* Templated links are expanded to URIs using the specified template variables .
* If the current node has { @ link Embedded embedded } items with the specified { @ code rel } ,
* these items are used instead of resolving the associated { @ link Link } .
* @ param type the specific type of the returned HalRepresentations
* @ param < T > type of the returned HalRepresentations
* @ return this
* @ throws IOException if a low - level I / O problem ( unexpected end - of - input , network error ) occurs .
* @ throws JsonParseException if the json document can not be parsed by Jackson ' s ObjectMapper
* @ throws JsonMappingException if the input JSON structure can not be mapped to the specified HalRepresentation type
* @ since 1.0.0 */
public < T extends HalRepresentation > Stream < T > streamAs ( final Class < T > type ) throws IOException { } } | return streamAs ( type , null ) ; |
public class PostTools { /** * Use Jackson to convert Map to JSON string .
* @ param map Map to convert to json
* @ return json string
* @ throws MovieDbException exception */
private String convertToJson ( Map < String , ? > map ) throws MovieDbException { } } | try { return MAPPER . writeValueAsString ( map ) ; } catch ( JsonProcessingException ex ) { throw new MovieDbException ( ApiExceptionType . MAPPING_FAILED , "JSON conversion failed" , "" , ex ) ; } |
public class ByteHandlerPipeline { /** * { @ inheritDoc } */
public OutputStream serialize ( final OutputStream pToSerialize ) throws TTByteHandleException { } } | OutputStream lastOutput = pToSerialize ; for ( IByteHandler part : mParts ) { lastOutput = part . serialize ( lastOutput ) ; } return lastOutput ; |
public class Throttle { /** * Updates the number of operations for this throttle to a new maximum , retaining the current
* history of operations if the limit is being increased and truncating the oldest operations
* if the limit is decreased .
* @ param operations the new maximum number of operations .
* @ param period the new period . */
public void reinit ( int operations , long period ) { } } | _period = period ; if ( operations != _ops . length ) { long [ ] ops = new long [ operations ] ; if ( operations > _ops . length ) { // copy to a larger buffer , leaving zeroes at the beginning
int lastOp = _lastOp + operations - _ops . length ; System . arraycopy ( _ops , 0 , ops , 0 , _lastOp ) ; System . arraycopy ( _ops , _lastOp , ops , lastOp , _ops . length - _lastOp ) ; } else { // if we ' re truncating , copy the first ( oldest ) stamps into ops [ 0 . . ]
int endCount = Math . min ( operations , _ops . length - _lastOp ) ; System . arraycopy ( _ops , _lastOp , ops , 0 , endCount ) ; System . arraycopy ( _ops , 0 , ops , endCount , operations - endCount ) ; _lastOp = 0 ; } _ops = ops ; } |
public class Matrix4f { /** * / * ( non - Javadoc )
* @ see org . joml . Matrix4fc # unprojectRay ( float , float , int [ ] , org . joml . Vector3f , org . joml . Vector3f ) */
public Matrix4f unprojectRay ( float winX , float winY , int [ ] viewport , Vector3f originDest , Vector3f dirDest ) { } } | float a = m00 * m11 - m01 * m10 ; float b = m00 * m12 - m02 * m10 ; float c = m00 * m13 - m03 * m10 ; float d = m01 * m12 - m02 * m11 ; float e = m01 * m13 - m03 * m11 ; float f = m02 * m13 - m03 * m12 ; float g = m20 * m31 - m21 * m30 ; float h = m20 * m32 - m22 * m30 ; float i = m20 * m33 - m23 * m30 ; float j = m21 * m32 - m22 * m31 ; float k = m21 * m33 - m23 * m31 ; float l = m22 * m33 - m23 * m32 ; float det = a * l - b * k + c * j + d * i - e * h + f * g ; det = 1.0f / det ; float im00 = ( m11 * l - m12 * k + m13 * j ) * det ; float im01 = ( - m01 * l + m02 * k - m03 * j ) * det ; float im02 = ( m31 * f - m32 * e + m33 * d ) * det ; float im03 = ( - m21 * f + m22 * e - m23 * d ) * det ; float im10 = ( - m10 * l + m12 * i - m13 * h ) * det ; float im11 = ( m00 * l - m02 * i + m03 * h ) * det ; float im12 = ( - m30 * f + m32 * c - m33 * b ) * det ; float im13 = ( m20 * f - m22 * c + m23 * b ) * det ; float im20 = ( m10 * k - m11 * i + m13 * g ) * det ; float im21 = ( - m00 * k + m01 * i - m03 * g ) * det ; float im22 = ( m30 * e - m31 * c + m33 * a ) * det ; float im23 = ( - m20 * e + m21 * c - m23 * a ) * det ; float im30 = ( - m10 * j + m11 * h - m12 * g ) * det ; float im31 = ( m00 * j - m01 * h + m02 * g ) * det ; float im32 = ( - m30 * d + m31 * b - m32 * a ) * det ; float im33 = ( m20 * d - m21 * b + m22 * a ) * det ; float ndcX = ( winX - viewport [ 0 ] ) / viewport [ 2 ] * 2.0f - 1.0f ; float ndcY = ( winY - viewport [ 1 ] ) / viewport [ 3 ] * 2.0f - 1.0f ; float px = im00 * ndcX + im10 * ndcY + im30 ; float py = im01 * ndcX + im11 * ndcY + im31 ; float pz = im02 * ndcX + im12 * ndcY + im32 ; float invNearW = 1.0f / ( im03 * ndcX + im13 * ndcY - im23 + im33 ) ; float nearX = ( px - im20 ) * invNearW ; float nearY = ( py - im21 ) * invNearW ; float nearZ = ( pz - im22 ) * invNearW ; float invFarW = 1.0f / ( im03 * ndcX + im13 * ndcY + im23 + im33 ) ; float farX = ( px + im20 ) * invFarW ; float farY = ( py + im21 ) * invFarW ; float farZ = ( pz + im22 ) * invFarW ; originDest . x = nearX ; originDest . y = nearY ; originDest . z = nearZ ; dirDest . x = farX - nearX ; dirDest . y = farY - nearY ; dirDest . z = farZ - nearZ ; return this ; |
public class AnimaQuery { /** * Execute sql statement
* @ param sql sql statement
* @ param params params
* @ return affect the number of rows */
public int execute ( String sql , Object ... params ) { } } | Connection conn = getConn ( ) ; try { return conn . createQuery ( sql ) . withParams ( params ) . executeUpdate ( ) . getResult ( ) ; } finally { this . closeConn ( conn ) ; this . clean ( conn ) ; } |
public class SimpleExpressionsPackageImpl { /** * Creates , registers , and initializes the < b > Package < / b > for this model , and for any others upon which it depends .
* < p > This method is used to initialize { @ link SimpleExpressionsPackage # eINSTANCE } when that field is accessed .
* Clients should not invoke it directly . Instead , they should simply access that field to obtain the package .
* < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ see # eNS _ URI
* @ see # createPackageContents ( )
* @ see # initializePackageContents ( )
* @ generated */
public static SimpleExpressionsPackage init ( ) { } } | if ( isInited ) return ( SimpleExpressionsPackage ) EPackage . Registry . INSTANCE . getEPackage ( SimpleExpressionsPackage . eNS_URI ) ; // Obtain or create and register package
SimpleExpressionsPackageImpl theSimpleExpressionsPackage = ( SimpleExpressionsPackageImpl ) ( EPackage . Registry . INSTANCE . get ( eNS_URI ) instanceof SimpleExpressionsPackageImpl ? EPackage . Registry . INSTANCE . get ( eNS_URI ) : new SimpleExpressionsPackageImpl ( ) ) ; isInited = true ; // Create package meta - data objects
theSimpleExpressionsPackage . createPackageContents ( ) ; // Initialize created meta - data
theSimpleExpressionsPackage . initializePackageContents ( ) ; // Mark meta - data to indicate it can ' t be changed
theSimpleExpressionsPackage . freeze ( ) ; // Update the registry and return the package
EPackage . Registry . INSTANCE . put ( SimpleExpressionsPackage . eNS_URI , theSimpleExpressionsPackage ) ; return theSimpleExpressionsPackage ; |
public class HtmlDataTable { /** * < p > Return the value of the < code > cellpadding < / code > property . < / p >
* < p > Contents : Definition of how much space the user agent should
* leave between the border of each cell and its contents . */
public java . lang . String getCellpadding ( ) { } } | return ( java . lang . String ) getStateHelper ( ) . eval ( PropertyKeys . cellpadding ) ; |
public class DefaultSqlConfig { /** * DB接続情報を指定してSqlConfigを取得する
* @ param url JDBC接続URL
* @ param user JDBC接続ユーザ
* @ param password JDBC接続パスワード
* @ param schema JDBCスキーマ名
* @ param autoCommit 自動コミットするかどうか . 自動コミットの場合 < code > true < / code >
* @ param readOnly 参照のみかどうか . 参照のみの場合 < code > true < / code >
* @ return SqlConfigオブジェクト */
public static SqlConfig getConfig ( final String url , final String user , final String password , final String schema , final boolean autoCommit , final boolean readOnly ) { } } | return new DefaultSqlConfig ( new JdbcConnectionSupplierImpl ( url , user , password , schema , autoCommit , readOnly ) , null ) ; |
public class PreferenceFragment { /** * Initializes the preference , which allows to show the custom dialog . */
private void initializeShowCustomDialogPreference ( ) { } } | Preference preference = findPreference ( getString ( R . string . show_custom_dialog_preference_key ) ) ; preference . setOnPreferenceClickListener ( new OnPreferenceClickListener ( ) { @ Override public boolean onPreferenceClick ( final Preference preference ) { initializeCustomDialog ( ) ; customDialog . setShowAnimation ( createRectangularRevealAnimation ( preference ) ) ; customDialog . setDismissAnimation ( createRectangularRevealAnimation ( preference ) ) ; customDialog . setCancelAnimation ( createRectangularRevealAnimation ( preference ) ) ; customDialog . show ( ) ; return true ; } } ) ; |
public class DateTimeUtils { /** * Parses a fraction , multiplying the first character by { @ code multiplier } ,
* the second character by { @ code multiplier / 10 } ,
* the third character by { @ code multiplier / 100 } , and so forth .
* < p > For example , { @ code parseFraction ( " 1234 " , 100 ) } yields { @ code 123 } . */
private static int parseFraction ( String v , int multiplier ) { } } | int r = 0 ; for ( int i = 0 ; i < v . length ( ) ; i ++ ) { char c = v . charAt ( i ) ; int x = c < '0' || c > '9' ? 0 : ( c - '0' ) ; r += multiplier * x ; if ( multiplier < 10 ) { // We ' re at the last digit . Check for rounding .
if ( i + 1 < v . length ( ) && v . charAt ( i + 1 ) >= '5' ) { ++ r ; } break ; } multiplier /= 10 ; } return r ; |
public class Search { /** * Remove the given search listener . If the search listener had not been added , < code > false < / code > is returned .
* Note that this method may only be called when the search is idle .
* @ param listener search listener to be removed
* @ return < code > true < / code > if the listener was present and has now been removed
* @ throws SearchException if the search is not idle */
public boolean removeSearchListener ( SearchListener < ? super SolutionType > listener ) { } } | // acquire status lock
synchronized ( statusLock ) { // assert idle
assertIdle ( "Cannot remove search listener." ) ; // remove listener
if ( searchListeners . remove ( listener ) ) { // log
LOGGER . debug ( "{}: removed search listener {}" , this , listener ) ; return true ; } else { return false ; } } |
public class Filter { /** * Adds an array of keys to < i > this < / i > filter .
* @ param keys The array of keys . */
public void add ( Key [ ] keys ) { } } | if ( keys == null ) { throw new IllegalArgumentException ( "Key[] may not be null" ) ; } for ( int i = 0 ; i < keys . length ; i ++ ) { add ( keys [ i ] ) ; } |
public class MockRepository { /** * When a mock framework API needs to store additional state not applicable
* for the other methods , it may use this method to do so .
* @ param key
* The key under which the < tt > value < / tt > is stored .
* @ param value
* The value to store under the specified < tt > key < / tt > .
* @ return The previous object under the specified < tt > key < / tt > or
* { @ code null } . */
public static synchronized Object putAdditionalState ( String key , Object value ) { } } | return additionalState . put ( key , value ) ; |
public class FieldInfo { /** * Initialise the field ( used by ' Alias ' constructor ) . */
public void writeFieldInit ( MethodVisitor mv ) { } } | Label l10 = new Label ( ) ; mv . visitLabel ( l10 ) ; mv . visitLineNumber ( 3 , l10 ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitTypeInsn ( NEW , internalName ) ; mv . visitInsn ( DUP ) ; mv . visitLdcInsn ( name ) ; mv . visitVarInsn ( ALOAD , 0 ) ; mv . visitMethodInsn ( INVOKESPECIAL , internalName , "<init>" , "(Ljava/lang/String;Ljava/lang/Object;)V" , false ) ; mv . visitFieldInsn ( PUTFIELD , classInfo . getClassName ( ) , name , desc ) ; |
public class ICalendar { /** * Marshals this iCalendar object to its traditional , plain - text
* representation .
* If this iCalendar object contains user - defined property or component
* objects , you must use the { @ link Biweekly } or { @ link ICalWriter } classes
* instead in order to register the scribe classes .
* @ param writer the writer to write to
* @ throws IllegalArgumentException if this iCalendar object contains
* user - defined property or component objects
* @ throws IOException if there ' s a problem writing to the writer */
public void write ( Writer writer ) throws IOException { } } | ICalVersion version = ( this . version == null ) ? ICalVersion . V2_0 : this . version ; Biweekly . write ( this ) . version ( version ) . go ( writer ) ; |
public class BeanConfig { /** * bind .
* @ param beanName a { @ link java . lang . String } object .
* @ param clazz a { @ link java . lang . Class } object .
* @ return a { @ link org . beangle . commons . inject . bind . BeanConfig . DefinitionBinder } object . */
public DefinitionBinder bind ( String beanName , Class < ? > clazz ) { } } | return new DefinitionBinder ( this ) . bind ( beanName , clazz ) ; |
public class CmsLoginManager { /** * Returns the key to use for looking up the user in the invalid login storage . < p >
* @ param userName the name of the user
* @ param remoteAddress the remore address ( IP ) from which the login attempt was made
* @ return the key to use for looking up the user in the invalid login storage */
private static String createStorageKey ( String userName , String remoteAddress ) { } } | StringBuffer result = new StringBuffer ( ) ; result . append ( userName ) ; result . append ( KEY_SEPARATOR ) ; result . append ( remoteAddress ) ; return result . toString ( ) ; |
public class DisableOnFieldHandler { /** * The Field has Changed .
* If the target string matches this field , disable the target field .
* @ param bDisplayOption If true , display the change .
* @ param iMoveMode The type of move being done ( init / read / screen ) .
* @ return The error code ( or NORMAL _ RETURN if okay ) .
* Disable field if criteria met . */
public int fieldChanged ( boolean bDisplayOption , int iMoveMode ) { } } | boolean bFlag = this . compareFieldToString ( ) ; if ( m_bDisableIfMatch ) bFlag = ! bFlag ; m_fldToDisable . setEnabled ( bFlag ) ; return DBConstants . NORMAL_RETURN ; |
public class SREConfigurationBlock { /** * Replies the specific SARL runtime environment .
* @ return the SARL runtime environment or < code > null < / code > if
* there is no selected SRE .
* @ see # isSystemWideDefaultSRE ( ) */
public ISREInstall getSpecificSRE ( ) { } } | final int index = this . runtimeEnvironmentCombo . getSelectionIndex ( ) ; if ( index >= 0 && index < this . runtimeEnvironments . size ( ) ) { return this . runtimeEnvironments . get ( index ) ; } return null ; |
public class VirtualMachineScaleSetsInner { /** * Gets the status of a VM scale set instance .
* @ param resourceGroupName The name of the resource group .
* @ param vmScaleSetName The name of the VM scale set .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the VirtualMachineScaleSetInstanceViewInner object */
public Observable < VirtualMachineScaleSetInstanceViewInner > getInstanceViewAsync ( String resourceGroupName , String vmScaleSetName ) { } } | return getInstanceViewWithServiceResponseAsync ( resourceGroupName , vmScaleSetName ) . map ( new Func1 < ServiceResponse < VirtualMachineScaleSetInstanceViewInner > , VirtualMachineScaleSetInstanceViewInner > ( ) { @ Override public VirtualMachineScaleSetInstanceViewInner call ( ServiceResponse < VirtualMachineScaleSetInstanceViewInner > response ) { return response . body ( ) ; } } ) ; |
public class HtmlDataSourceTag { /** * This method will create the name of the form element ( HTML Control ) that has a name . The
* < b > name < / b > attribute represent the " control name " for the control . This name is scoped
* into the form element . In addition , a control may have a < b > id < / b > attribute which is
* specified by setting the < b > tagId < / b > . These two value are set in this routine . The name
* is always the expression mapping the data to it ' s backing element and is conotrolled
* by the optional naming chain provided by the tag . The < b > tagId < / b > specifies the < b > id < / b >
* attribute . If this is present then we write out a JavaScript that allows mapping
* the tagId set on the tag to both the real < b > id < / b > value and also the < b > name < / b > value .
* The < b > id < / b > is formed by passing the < b > tagId < / b > to the URL rewritter service .
* @ param state The tag state structure . This contains both the name and id attributes . The
* id attribute should be set with the initial value from the tagId .
* @ param javaScript A ByRef element that will contain any JavaScript that should be written out
* by the calling tag . A value is returned only if tagId is set and there is not IScriptReporter
* found .
* @ throws JspException Pass through the exception from applyNamingChain . */
protected void nameHtmlControl ( AbstractHtmlControlState state , ByRef javaScript ) throws JspException { } } | assert ( javaScript != null ) : "paramater 'javaScript' may not be null" ; assert ( state != null ) : "parameter 'state' may not be null" ; assert ( _dataSource != null ) : "dataSource is Null" ; // create the expression ( name )
String datasource = "{" + _dataSource + "}" ; state . name = applyNamingChain ( datasource ) ; Form parentForm = getNearestForm ( ) ; String idScript = renderNameAndId ( ( HttpServletRequest ) pageContext . getRequest ( ) , state , parentForm ) ; if ( idScript != null ) javaScript . setRef ( idScript ) ; |
public class Admin { /** * @ throws PageException */
private void doUpdateSerial ( ) throws PageException { } } | admin . updateSerial ( getString ( "admin" , action , "serial" ) ) ; store ( ) ; pageContext . serverScope ( ) . reload ( ) ; |
public class Datamodel { /** * Creates an { @ link SenseIdValue } .
* @ param id
* a string of the form Ln . . . - Sm . . . where n . . . and m . . . are the string
* representation of a positive integer number
* @ param siteIri
* IRI to identify the site , usually the first part of the entity
* IRI of the site this belongs to , e . g . ,
* " http : / / www . wikidata . org / entity / "
* @ return an { @ link SenseIdValue } corresponding to the input */
public static SenseIdValue makeSenseIdValue ( String id , String siteIri ) { } } | return factory . getSenseIdValue ( id , siteIri ) ; |
public class AbstractTypedProperties { /** * - - - - - Default Properties - - - - - */
@ Override protected void loadDefaultProperties ( ) { } } | for ( P property : propertyClass . getEnumConstants ( ) ) { try { setProperty ( property , property . getPropertyCharacteristics ( ) . getDefaultValue ( ) , true ) ; } catch ( PropertyException e ) { e . printStackTrace ( ) ; } } |
public class FileUtils { /** * Close a { @ code DirectByteBuffer } - - in particular , will unmap a { @ link MappedByteBuffer } .
* @ param byteBuffer
* The { @ link ByteBuffer } to close / unmap .
* @ param log
* The log .
* @ return True if the byteBuffer was closed / unmapped . */
public static boolean closeDirectByteBuffer ( final ByteBuffer byteBuffer , final LogNode log ) { } } | if ( byteBuffer != null && byteBuffer . isDirect ( ) ) { return AccessController . doPrivileged ( new PrivilegedAction < Boolean > ( ) { @ Override public Boolean run ( ) { return closeDirectByteBufferPrivileged ( byteBuffer , log ) ; } } ) ; } else { // Nothing to unmap
return false ; } |
public class Reflections { /** * 根据方法名称获取方法 , 这里只能获取public权限的方法
* @ param cls
* @ param methodName
* @ return */
public static Method getMethod ( final Class cls , final String methodName ) { } } | return getMethod ( cls , methodName , false ) ; |
public class Graphics { /** * Draw a string to the screen using the current font
* @ param str
* The string to draw
* @ param x
* The x coordinate to draw the string at
* @ param y
* The y coordinate to draw the string at */
public void drawString ( String str , float x , float y ) { } } | predraw ( ) ; font . drawString ( x , y , str , currentColor ) ; postdraw ( ) ; |
public class NamespaceChangeStreamListener { /** * Read and store the next event from an open stream . This is a blocking method . */
void storeNextEvent ( ) { } } | try { if ( currentStream != null && currentStream . isOpen ( ) ) { final StitchEvent < ChangeEvent < BsonDocument > > event = currentStream . nextEvent ( ) ; if ( event == null ) { return ; } if ( event . getError ( ) != null ) { throw event . getError ( ) ; } if ( event . getData ( ) == null ) { return ; } logger . debug ( String . format ( Locale . US , "NamespaceChangeStreamListener::stream ns=%s event found: op=%s id=%s" , nsConfig . getNamespace ( ) , event . getData ( ) . getOperationType ( ) , event . getData ( ) . getId ( ) ) ) ; nsLock . writeLock ( ) . lockInterruptibly ( ) ; try { events . put ( BsonUtils . getDocumentId ( event . getData ( ) . getDocumentKey ( ) ) , event . getData ( ) ) ; } finally { nsLock . writeLock ( ) . unlock ( ) ; } for ( final Callback < ChangeEvent < BsonDocument > , Object > watcher : watchers ) { watcher . onComplete ( OperationResult . successfulResultOf ( event . getData ( ) ) ) ; } } } catch ( final InterruptedException | IOException ex ) { logger . info ( String . format ( Locale . US , "NamespaceChangeStreamListener::stream ns=%s interrupted on " + "fetching next event: %s" , nsConfig . getNamespace ( ) , ex ) ) ; logger . info ( "stream END – INTERRUPTED" ) ; Thread . currentThread ( ) . interrupt ( ) ; } catch ( final Exception ex ) { // TODO : Emit error through DataSynchronizer as an ifc
logger . error ( String . format ( Locale . US , "NamespaceChangeStreamListener::stream ns=%s exception on fetching next event: %s" , nsConfig . getNamespace ( ) , ex ) , ex ) ; logger . info ( "stream END – EXCEPTION" ) ; final boolean wasInterrupted = Thread . currentThread ( ) . isInterrupted ( ) ; this . close ( ) ; if ( wasInterrupted ) { Thread . currentThread ( ) . interrupt ( ) ; } } |
public class LoggingOperations { /** * Adds a { @ link Stage # RUNTIME runtime } step to the context that will commit or rollback any logging changes . Also
* if not a logging profile writes the { @ code logging . properties } file .
* Note the commit step will only be added if process type is a
* { @ linkplain org . jboss . as . controller . ProcessType # isServer ( ) server } .
* @ param context the context to add the step to
* @ param configurationPersistence the configuration to commit */
static void addCommitStep ( final OperationContext context , final ConfigurationPersistence configurationPersistence ) { } } | // This should only check that it ' s a server for the commit step . The logging . properties may need to be written
// in ADMIN _ ONLY mode
if ( context . getProcessType ( ) . isServer ( ) ) { context . addStep ( new CommitOperationStepHandler ( configurationPersistence ) , Stage . RUNTIME ) ; } |
public class IssueManager { /** * Deprecated . use issue . delete ( ) instead .
* @ throws RedmineException */
@ Deprecated public void deleteIssue ( Integer id ) throws RedmineException { } } | transport . deleteObject ( Issue . class , Integer . toString ( id ) ) ; |
public class SoftwareSystem { /** * Sets the location of this software system .
* @ param location a Location instance */
public void setLocation ( Location location ) { } } | if ( location != null ) { this . location = location ; } else { this . location = Location . Unspecified ; } |
public class FileTracker { /** * Create a new tracked file
* @ param dir file directory
* @ param filename filename
* @ return file new file
* @ throws DataUtilException data util exception */
public File createFile ( File dir , String filename ) throws DataUtilException { } } | if ( dir == null ) { throw new DataUtilException ( "The directory parameter can't be a null value" ) ; } try { File file = new File ( dir , filename ) ; return createFile ( file ) ; } catch ( Exception e ) { throw new DataUtilException ( "Invalid file: " + filename ) ; } |
public class WsByteBufferUtils { /** * Clear each buffer in an array of them .
* @ param list */
public static void clearBufferArray ( WsByteBuffer [ ] list ) { } } | if ( list != null ) { for ( int i = 0 ; i < list . length ; ++ i ) { if ( list [ i ] != null ) list [ i ] . clear ( ) ; } } |
public class ProfileService { /** * < p > Creates a new instance of an implementation of { @ link ProfileManager } .
* @ param context
* the invocation context
* @ return a new instance of { @ link ProfileManager } .
* @ since 1.1.1 */
public static final synchronized ProfileManager getInstance ( Object context ) { } } | return ( instance == null ) ? ( instance = new ProfileService ( ContextUtils . asApplication ( context ) ) ) : instance ; |
public class DataWriterUtil { /** * The function to convert a percent element into a string .
* @ param value the input numeric value to convert .
* @ param decimalFormat the formatter to convert percentage element into string .
* @ return the string with the text presentation of the input numeric value . */
private static String convertPercentElementToString ( Object value , DecimalFormat decimalFormat ) { } } | Double doubleValue = value instanceof Long ? ( ( Long ) value ) . doubleValue ( ) : ( Double ) value ; return decimalFormat . format ( doubleValue ) ; |
public class PatchedRuntimeEnvironmentBuilder { /** * Sets entityManagerFactory .
* @ param emf entityManagerFactory
* @ return this RuntimeEnvironmentBuilder */
public RuntimeEnvironmentBuilder entityManagerFactory ( Object emf ) { } } | if ( emf == null ) { return this ; } if ( ! ( emf instanceof EntityManagerFactory ) ) { throw new IllegalArgumentException ( "Argument is not of type EntityManagerFactory" ) ; } _runtimeEnvironment . setEmf ( ( EntityManagerFactory ) emf ) ; return this ; |
public class TypesREST { /** * Get the relationship definition for the given guid
* @ param guid relationship guid
* @ return relationship definition
* @ throws AtlasBaseException
* @ HTTP 200 On successful lookup of the the relationship definition by it ' s guid
* @ HTTP 404 On Failed lookup for the given guid */
@ GET @ Path ( "/relationshipdef/guid/{guid}" ) @ Produces ( Servlets . JSON_MEDIA_TYPE ) public AtlasRelationshipDef getRelationshipDefByGuid ( @ PathParam ( "guid" ) String guid ) throws AtlasBaseException { } } | AtlasRelationshipDef ret = typeDefStore . getRelationshipDefByGuid ( guid ) ; return ret ; |
public class HBaseReader { /** * Load all .
* @ param hTable
* the h table
* @ param rows
* the rows
* @ param columnFamily
* the column family
* @ param columns
* the columns
* @ return the list
* @ throws IOException
* Signals that an I / O exception has occurred . */
public List < HBaseDataWrapper > loadAll ( final Table hTable , final List < Object > rows , final String columnFamily , final String [ ] columns ) throws IOException { } } | setTableName ( hTable ) ; List < HBaseDataWrapper > results = new ArrayList < HBaseDataWrapper > ( ) ; List < Get > getRequest = new ArrayList < Get > ( ) ; for ( Object rowKey : rows ) { if ( rowKey != null ) { byte [ ] rowKeyBytes = HBaseUtils . getBytes ( rowKey ) ; Get request = new Get ( rowKeyBytes ) ; getRequest . add ( request ) ; } } Result [ ] rawResult = hTable . get ( getRequest ) ; for ( Result result : rawResult ) { List < Cell > cells = result . listCells ( ) ; if ( cells != null ) { HBaseDataWrapper data = new HBaseDataWrapper ( tableName , result . getRow ( ) ) ; data . setColumns ( cells ) ; results . add ( data ) ; } } return results ; |
public class MembershipTypeHandlerImpl { /** * Creates and returns membership type node . If node already exists it will be returned
* otherwise the new one will be created . */
private Node getOrCreateMembershipTypeNode ( Session session , MembershipTypeImpl mType ) throws Exception { } } | try { return mType . getInternalId ( ) != null ? session . getNodeByUUID ( mType . getInternalId ( ) ) : utils . getMembershipTypeNode ( session , mType . getName ( ) ) ; } catch ( ItemNotFoundException e ) { return createNewMembershipTypeNode ( session , mType ) ; } catch ( PathNotFoundException e ) { return createNewMembershipTypeNode ( session , mType ) ; } |
public class TimeUtils { /** * Renders a time period in human readable form
* @ param time the time in milliseconds
* @ return the time in human readable form */
public static String asTime ( long time ) { } } | if ( time > HOUR ) { return String . format ( "%.1f h" , ( time / HOUR ) ) ; } else if ( time > MINUTE ) { return String . format ( "%.1f m" , ( time / MINUTE ) ) ; } else if ( time > SECOND ) { return String . format ( "%.1f s" , ( time / SECOND ) ) ; } else { return String . format ( "%d ms" , time ) ; } |
public class UnionPayApi { /** * 后台请求返回Map
* @ param reqData
* 请求参数
* @ return { Map < String , String > } */
public static Map < String , String > backRequestByMap ( Map < String , String > reqData ) { } } | return SDKUtil . convertResultStringToMap ( backRequest ( reqData ) ) ; |
public class DeviceProxyDAODefaultImpl { public AttributeInfo [ ] get_attribute_info ( final DeviceProxy deviceProxy , final String [ ] attributeNames ) throws DevFailed { } } | build_connection ( deviceProxy ) ; final int retries = deviceProxy . transparent_reconnection ? 2 : 1 ; for ( int oneTry = 1 ; oneTry <= retries ; oneTry ++ ) { try { AttributeInfo [ ] result ; AttributeConfig [ ] ac = new AttributeConfig [ 0 ] ; AttributeConfig_2 [ ] ac_2 = null ; if ( deviceProxy . url . protocol == TANGO ) { // Check IDL version
if ( deviceProxy . device_2 != null ) { ac_2 = deviceProxy . device_2 . get_attribute_config_2 ( attributeNames ) ; } else { ac = deviceProxy . device . get_attribute_config ( attributeNames ) ; } } else { ac = deviceProxy . taco_device . get_attribute_config ( attributeNames ) ; } // Convert AttributeConfig ( _ 2 ) object to AttributeInfo object
final int size = ac_2 != null ? ac_2 . length : ac . length ; result = new AttributeInfo [ size ] ; for ( int i = 0 ; i < size ; i ++ ) { if ( ac_2 != null ) { result [ i ] = new AttributeInfo ( ac_2 [ i ] ) ; } else { result [ i ] = new AttributeInfo ( ac [ i ] ) ; } } return result ; } catch ( final DevFailed e ) { if ( oneTry >= retries ) { throw e ; } } catch ( final Exception e ) { if ( oneTry >= retries ) { ApiUtilDAODefaultImpl . removePendingRepliesOfDevice ( deviceProxy ) ; throw_dev_failed ( deviceProxy , e , "get_attribute_config" , true ) ; } } } return null ; |
public class GlobalNamespace { /** * Determines whether a variable name reference in a particular scope is a global variable
* reference .
* @ param name A variable name ( e . g . " a " )
* @ param s The scope in which the name is referenced
* @ return Whether the name reference is a global variable reference */
private boolean isGlobalVarReference ( String name , Scope s ) { } } | Var v = s . getVar ( name ) ; if ( v == null && externsScope != null ) { v = externsScope . getVar ( name ) ; } return v != null && ! v . isLocal ( ) ; |
public class Enhancements { /** * Returns the { @ link Collection } of extracted { @ link EntityAnnotation } s
* @ return */
@ SuppressWarnings ( "unchecked" ) public Collection < KeywordAnnotation > getKeywordAnnotations ( ) { } } | Collection < ? extends Enhancement > result = enhancements . get ( KeywordAnnotation . class ) ; return result == null ? Collections . < KeywordAnnotation > emptySet ( ) : Collections . unmodifiableCollection ( ( Collection < KeywordAnnotation > ) result ) ; |
public class PersonGroupsImpl { /** * Retrieve the training status of a person group ( completed or ongoing ) .
* @ param personGroupId Id referencing a particular person group .
* @ throws IllegalArgumentException thrown if parameters fail the validation
* @ return the observable to the TrainingStatus object */
public Observable < TrainingStatus > getTrainingStatusAsync ( String personGroupId ) { } } | return getTrainingStatusWithServiceResponseAsync ( personGroupId ) . map ( new Func1 < ServiceResponse < TrainingStatus > , TrainingStatus > ( ) { @ Override public TrainingStatus call ( ServiceResponse < TrainingStatus > response ) { return response . body ( ) ; } } ) ; |
public class Http2Connection { /** * Callers of this method are not thread safe , and sometimes on application threads . Most often ,
* this method will be called to send a buffer worth of data to the peer .
* < p > Writes are subject to the write window of the stream and the connection . Until there is a
* window sufficient to send { @ code byteCount } , the caller will block . For example , a user of
* { @ code HttpURLConnection } who flushes more bytes to the output stream than the connection ' s
* write window will block .
* < p > Zero { @ code byteCount } writes are not subject to flow control and will not block . The only
* use case for zero { @ code byteCount } is closing a flushed output stream . */
public void writeData ( int streamId , boolean outFinished , Buffer buffer , long byteCount ) throws IOException { } } | if ( byteCount == 0 ) { // Empty data frames are not flow - controlled .
writer . data ( outFinished , streamId , buffer , 0 ) ; return ; } while ( byteCount > 0 ) { int toWrite ; synchronized ( Http2Connection . this ) { try { while ( bytesLeftInWriteWindow <= 0 ) { // Before blocking , confirm that the stream we ' re writing is still open . It ' s possible
// that the stream has since been closed ( such as if this write timed out . )
if ( ! streams . containsKey ( streamId ) ) { throw new IOException ( "stream closed" ) ; } Http2Connection . this . wait ( ) ; // Wait until we receive a WINDOW _ UPDATE .
} } catch ( InterruptedException e ) { Thread . currentThread ( ) . interrupt ( ) ; // Retain interrupted status .
throw new InterruptedIOException ( ) ; } toWrite = ( int ) Math . min ( byteCount , bytesLeftInWriteWindow ) ; toWrite = Math . min ( toWrite , writer . maxDataLength ( ) ) ; bytesLeftInWriteWindow -= toWrite ; } byteCount -= toWrite ; writer . data ( outFinished && byteCount == 0 , streamId , buffer , toWrite ) ; } |
public class Ifc2x3tc1FactoryImpl { /** * < ! - - begin - user - doc - - >
* < ! - - end - user - doc - - >
* @ generated */
public String convertIfcCableSegmentTypeEnumToString ( EDataType eDataType , Object instanceValue ) { } } | return instanceValue == null ? null : instanceValue . toString ( ) ; |
public class CommerceNotificationAttachmentPersistenceImpl { /** * Returns the number of commerce notification attachments where uuid = & # 63 ; .
* @ param uuid the uuid
* @ return the number of matching commerce notification attachments */
@ Override public int countByUuid ( String uuid ) { } } | FinderPath finderPath = FINDER_PATH_COUNT_BY_UUID ; Object [ ] finderArgs = new Object [ ] { uuid } ; Long count = ( Long ) finderCache . getResult ( finderPath , finderArgs , this ) ; if ( count == null ) { StringBundler query = new StringBundler ( 2 ) ; query . append ( _SQL_COUNT_COMMERCENOTIFICATIONATTACHMENT_WHERE ) ; boolean bindUuid = false ; if ( uuid == null ) { query . append ( _FINDER_COLUMN_UUID_UUID_1 ) ; } else if ( uuid . equals ( "" ) ) { query . append ( _FINDER_COLUMN_UUID_UUID_3 ) ; } else { bindUuid = true ; query . append ( _FINDER_COLUMN_UUID_UUID_2 ) ; } String sql = query . toString ( ) ; Session session = null ; try { session = openSession ( ) ; Query q = session . createQuery ( sql ) ; QueryPos qPos = QueryPos . getInstance ( q ) ; if ( bindUuid ) { qPos . add ( uuid ) ; } count = ( Long ) q . uniqueResult ( ) ; finderCache . putResult ( finderPath , finderArgs , count ) ; } catch ( Exception e ) { finderCache . removeResult ( finderPath , finderArgs ) ; throw processException ( e ) ; } finally { closeSession ( session ) ; } } return count . intValue ( ) ; |
public class DbPreparedStatement { /** * Sets int value for all fields matched by name . If value is null calls setNull for all fields . */
@ NotNull public DbPreparedStatement < T > set ( @ NotNull String name , @ Nullable DbInt value ) throws SQLException { } } | return value == null ? setNull ( name , JDBCType . INTEGER ) : set ( name , value . getDbValue ( ) ) ; |
public class Sets { /** * Create a keyset from a set using a { @ code KeyExtractor }
* @ param set original set
* @ param keyExtractor key extractor
* @ param < T > type
* @ return a new keyset extracted from the original set */
public static < T > Set < T > keyset ( Set < T > set , KeyExtractor < T > keyExtractor ) { } } | Set < T > keyset = new HashSet < T > ( ) ; for ( T item : set ) { keyset . add ( keyExtractor . extractKey ( item ) ) ; } return keyset ; |
public class CheckRequestAggregator { /** * Obtains the { @ code HashCode } for the contents of { @ code value } .
* @ param value a { @ code CheckRequest } to be signed
* @ return the { @ code HashCode } corresponding to { @ code value } */
public static HashCode sign ( CheckRequest value ) { } } | Hasher h = Hashing . md5 ( ) . newHasher ( ) ; Operation o = value . getOperation ( ) ; if ( o == null || Strings . isNullOrEmpty ( o . getConsumerId ( ) ) || Strings . isNullOrEmpty ( o . getOperationName ( ) ) ) { throw new IllegalArgumentException ( "CheckRequest should have a valid operation" ) ; } h . putString ( o . getConsumerId ( ) , StandardCharsets . UTF_8 ) ; h . putChar ( '\0' ) ; h . putString ( o . getOperationName ( ) , StandardCharsets . UTF_8 ) ; h . putChar ( '\0' ) ; Signing . putLabels ( h , o . getLabels ( ) ) ; for ( MetricValueSet mvSet : o . getMetricValueSetsList ( ) ) { h . putString ( mvSet . getMetricName ( ) , StandardCharsets . UTF_8 ) ; h . putChar ( '\0' ) ; for ( MetricValue metricValue : mvSet . getMetricValuesList ( ) ) { MetricValues . putMetricValue ( h , metricValue ) ; } } return h . hash ( ) ; |
public class DeleteException { /** * Converts a Throwable to a DeleteException with the specified detail message . If the
* Throwable is a DeleteException and if the Throwable ' s message is identical to the
* one supplied , the Throwable will be passed through unmodified ; otherwise , it will be wrapped in
* a new DeleteException with the detail message .
* @ param cause the Throwable to convert
* @ param message the specified detail message
* @ return a DeleteException */
public static DeleteException fromThrowable ( String message , Throwable cause ) { } } | return ( cause instanceof DeleteException && Objects . equals ( message , cause . getMessage ( ) ) ) ? ( DeleteException ) cause : new DeleteException ( message , cause ) ; |
public class FileOperations { /** * Create a new directory . The direct parent directory already needs to exist .
* @ param aDir
* The directory to be created . May not be < code > null < / code > .
* @ return A non - < code > null < / code > error code . */
@ Nonnull public static FileIOError createDir ( @ Nonnull final File aDir ) { } } | ValueEnforcer . notNull ( aDir , "Directory" ) ; // Does the directory already exist ?
if ( aDir . exists ( ) ) return EFileIOErrorCode . TARGET_ALREADY_EXISTS . getAsIOError ( EFileIOOperation . CREATE_DIR , aDir ) ; // Is the parent directory writable ?
final File aParentDir = aDir . getParentFile ( ) ; if ( aParentDir != null && aParentDir . exists ( ) && ! aParentDir . canWrite ( ) ) return EFileIOErrorCode . SOURCE_PARENT_NOT_WRITABLE . getAsIOError ( EFileIOOperation . CREATE_DIR , aDir ) ; try { final EFileIOErrorCode eError = aDir . mkdir ( ) ? EFileIOErrorCode . NO_ERROR : EFileIOErrorCode . OPERATION_FAILED ; return eError . getAsIOError ( EFileIOOperation . CREATE_DIR , aDir ) ; } catch ( final SecurityException ex ) { return EFileIOErrorCode . getSecurityAsIOError ( EFileIOOperation . CREATE_DIR , ex ) ; } |
public class PlaceViewUtil { /** * Dispatches a call to { @ link PlaceView # didLeavePlace } to all UI elements in the hierarchy
* rooted at the component provided via the < code > root < / code > parameter .
* @ param root the component at which to start traversing the UI hierarchy .
* @ param plobj the place object that is about to be entered . */
public static void dispatchDidLeavePlace ( Object root , PlaceObject plobj ) { } } | // dispatch the call on this component if it implements PlaceView
if ( root instanceof PlaceView ) { try { ( ( PlaceView ) root ) . didLeavePlace ( plobj ) ; } catch ( Exception e ) { log . warning ( "Component choked on didLeavePlace()" , "component" , root , "plobj" , plobj , e ) ; } } // now traverse all of this component ' s children
if ( root instanceof Container ) { Container cont = ( Container ) root ; int ccount = cont . getComponentCount ( ) ; for ( int ii = 0 ; ii < ccount ; ii ++ ) { dispatchDidLeavePlace ( cont . getComponent ( ii ) , plobj ) ; } } |
public class TypeDelta { void setTypeSuperNameChange ( String oldSuperName , String newSuperName ) { } } | this . oSuperName = oldSuperName ; this . nSuperName = newSuperName ; this . changed |= CHANGED_SUPERNAME ; |
public class StructureUtil { /** * Find linkages between two groups within tolerance of bond length ,
* from potential atoms .
* @ param group1 the first { @ link Group } .
* @ param group2 the second { @ link Group } .
* @ param potentialNamesOfAtomOnGroup1 potential names of the atom on the first group .
* If null , search all atoms on the first group .
* @ param potentialNamesOfAtomOnGroup2 potential names of the atom on the second group .
* If null , search all atoms on the second group .
* @ param ignoreNCLinkage true to ignore all N - C linkages
* @ param bondLengthTolerance bond length error tolerance .
* @ return a list , each element of which is an array of two Atoms that form bond
* between each other . */
public static List < Atom [ ] > findAtomLinkages ( final Group group1 , final Group group2 , List < String > potentialNamesOfAtomOnGroup1 , List < String > potentialNamesOfAtomOnGroup2 , final boolean ignoreNCLinkage , final double bondLengthTolerance ) { } } | if ( group1 == null || group2 == null ) { throw new IllegalArgumentException ( "Null group(s)." ) ; } if ( bondLengthTolerance < 0 ) { throw new IllegalArgumentException ( "bondLengthTolerance cannot be negative." ) ; } List < Atom [ ] > ret = new ArrayList < Atom [ ] > ( ) ; if ( potentialNamesOfAtomOnGroup1 == null ) { // if empty name , search for all atoms
potentialNamesOfAtomOnGroup1 = getAtomNames ( group1 ) ; } if ( potentialNamesOfAtomOnGroup2 == null ) { // if empty name , search for all atoms
potentialNamesOfAtomOnGroup2 = getAtomNames ( group2 ) ; } for ( String namesOfAtomOnGroup1 : potentialNamesOfAtomOnGroup1 ) { for ( String namesOfAtomOnGroup2 : potentialNamesOfAtomOnGroup2 ) { Atom [ ] atoms = findLinkage ( group1 , group2 , namesOfAtomOnGroup1 , namesOfAtomOnGroup2 , bondLengthTolerance ) ; if ( atoms != null ) { if ( ignoreNCLinkage && ( ( atoms [ 0 ] . getName ( ) . equals ( "N" ) && atoms [ 1 ] . getName ( ) . equals ( "C" ) ) || ( atoms [ 0 ] . getName ( ) . equals ( "C" ) && atoms [ 1 ] . getName ( ) . equals ( "N" ) ) ) ) { continue ; } ret . add ( atoms ) ; } } } return ret ; |
public class Gradient { /** * Resolve the gradient reference
* @ param diagram The diagram to resolve against */
public void resolve ( Diagram diagram ) { } } | if ( ref == null ) { return ; } Gradient other = diagram . getGradient ( ref ) ; for ( int i = 0 ; i < other . steps . size ( ) ; i ++ ) { steps . add ( other . steps . get ( i ) ) ; } |
public class BioPAXIOHandlerAdapter { /** * This method currently only fixes reusedPEPs if the option is set . As L2 is becoming obsolete this method will be
* slated for deprecation .
* @ param bpe to be bound
* @ param value to be assigned .
* @ return a " fixed " value . */
protected Object resourceFixes ( BioPAXElement bpe , Object value ) { } } | if ( this . isFixReusedPEPs ( ) && value instanceof physicalEntityParticipant ) { value = this . getReusedPEPHelper ( ) . fixReusedPEP ( ( physicalEntityParticipant ) value , bpe ) ; } return value ; |
public class RuntimeStepExecution { /** * Since this is a non - trivial behavior to support , let ' s keep it internal rather than exposing it . */
private MetricImpl getCommittedMetric ( MetricImpl . MetricType metricType ) { } } | return ( MetricImpl ) committedMetrics . get ( metricType . name ( ) ) ; |
public class ContactsApi { /** * Get alliance contact labels Return custom labels for an alliance & # 39 ; s
* contacts - - - This route is cached for up to 300 seconds SSO Scope :
* esi - alliances . read _ contacts . v1
* @ param allianceId
* An EVE alliance ID ( required )
* @ param datasource
* The server name you would like data from ( optional , default to
* tranquility )
* @ param ifNoneMatch
* ETag from a previous request . A 304 will be returned if this
* matches the current ETag ( optional )
* @ param token
* Access token to use if unable to set a header ( optional )
* @ return List & lt ; AllianceContactsLabelsResponse & gt ;
* @ throws ApiException
* If fail to call the API , e . g . server error or cannot
* deserialize the response body */
public List < AllianceContactsLabelsResponse > getAlliancesAllianceIdContactsLabels ( Integer allianceId , String datasource , String ifNoneMatch , String token ) throws ApiException { } } | ApiResponse < List < AllianceContactsLabelsResponse > > resp = getAlliancesAllianceIdContactsLabelsWithHttpInfo ( allianceId , datasource , ifNoneMatch , token ) ; return resp . getData ( ) ; |
public class JmsJcaActivationSpecImpl { /** * ( non - Javadoc )
* @ see com . ibm . ws . sib . api . jmsra . JmsJcaActivationSpec # setPassword ( java . lang . String ) */
@ Override public void setPassword ( final String password ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && TRACE . isDebugEnabled ( ) ) { SibTr . debug ( this , TRACE , "setPassword" , "***" ) ; } _password = password ; |
public class AbstractFeatureSourceLayer { /** * Get the feature source ( either load from the supplier or return the cached source ) .
* @ param httpRequestFactory The factory for making http requests .
* @ param mapContext The map context . */
public final FeatureSource < ? , ? > getFeatureSource ( @ Nonnull final MfClientHttpRequestFactory httpRequestFactory , @ Nonnull final MapfishMapContext mapContext ) { } } | if ( this . featureSource == null ) { this . featureSource = this . featureSourceSupplier . load ( httpRequestFactory , mapContext ) ; } return this . featureSource ; |
public class AuthorizeSecurityGroupIngressRequest { /** * This method is intended for internal use only . Returns the marshaled request configured with additional
* parameters to enable operation dry - run . */
@ Override public Request < AuthorizeSecurityGroupIngressRequest > getDryRunRequest ( ) { } } | Request < AuthorizeSecurityGroupIngressRequest > request = new AuthorizeSecurityGroupIngressRequestMarshaller ( ) . marshall ( this ) ; request . addParameter ( "DryRun" , Boolean . toString ( true ) ) ; return request ; |
public class CurrencyFormat { /** * { @ inheritDoc } */
@ Override public StringBuilder formatMeasures ( StringBuilder appendTo , FieldPosition fieldPosition , Measure ... measures ) { } } | return mf . formatMeasures ( appendTo , fieldPosition , measures ) ; |
public class MariaDbConnection { /** * < p > Creates a default < code > PreparedStatement < / code > object that has the capability to retrieve
* auto - generated keys . The given constant tells the driver whether it should make auto - generated
* keys available for retrieval . This parameter is ignored if the SQL statement is not an
* < code > INSERT < / code > statement , or an SQL statement able to return auto - generated keys ( the
* list of such statements is vendor - specific ) . < / p >
* < p > < B > Note : < / B > This method is optimized for handling parametric SQL statements that benefit
* from precompilation . If the driver supports precompilation , the method
* < code > prepareStatement < / code > will send the statement to the database for precompilation . Some
* drivers may not support precompilation . In this case , the statement may not be sent to the
* database until the < code > PreparedStatement < / code > object is executed . This has no direct
* effect on users ; however , it does affect which methods throw certain SQLExceptions . < / p >
* < p > Result sets created using the returned < code > PreparedStatement < / code > object will by
* default be type < code > TYPE _ FORWARD _ ONLY < / code > and have a concurrency level of
* < code > CONCUR _ READ _ ONLY < / code > . The holdability of the created result sets can be determined by
* calling { @ link # getHoldability } . < / p >
* @ param sql an SQL statement that may contain one or more ' ? ' IN parameter
* placeholders
* @ param autoGeneratedKeys a flag indicating whether auto - generated keys should be returned ; one
* of
* < code > Statement . RETURN _ GENERATED _ KEYS < / code >
* or < code > Statement . NO _ GENERATED _ KEYS < / code >
* @ return a new < code > PreparedStatement < / code > object , containing the pre - compiled SQL statement ,
* that will have the capability of returning auto - generated keys
* @ throws SQLException if a database access error occurs , this method is
* called on a closed connection or the given parameter is
* not a < code > Statement < / code > constant indicating
* whether auto - generated keys should be returned */
public PreparedStatement prepareStatement ( final String sql , final int autoGeneratedKeys ) throws SQLException { } } | return internalPrepareStatement ( sql , ResultSet . TYPE_FORWARD_ONLY , ResultSet . CONCUR_READ_ONLY , autoGeneratedKeys ) ; |
public class TreeItem { /** * This method is called by the children tags of the TreeItem . If there is a < code > TreeHtmlAttribute < / code >
* it will set the attribute name and value .
* @ param attr */
public void setItemAttribute ( TreeHtmlAttributeInfo attr ) { } } | _child = true ; if ( _attributes == null ) _attributes = new ArrayList ( ) ; _attributes . add ( attr ) ; |
public class GeometryIndexService { /** * Given a certain geometry , get the edge the index points to . This only works if the index actually points to an
* edge .
* @ param geometry
* The geometry to search in .
* @ param index
* The index that points to an edge within the given geometry .
* @ return Returns the edge if it exists .
* @ throws GeometryIndexNotFoundException
* Thrown in case the index is of the wrong type , or if the edge could not be found within the given
* geometry . */
public Coordinate [ ] getEdge ( Geometry geometry , GeometryIndex index ) throws GeometryIndexNotFoundException { } } | if ( index . hasChild ( ) ) { if ( geometry . getGeometries ( ) != null && geometry . getGeometries ( ) . length > index . getValue ( ) ) { return getEdge ( geometry . getGeometries ( ) [ index . getValue ( ) ] , index . getChild ( ) ) ; } throw new GeometryIndexNotFoundException ( "Could not match index with given geometry" ) ; } if ( index . getType ( ) == GeometryIndexType . TYPE_EDGE && geometry . getCoordinates ( ) != null && geometry . getCoordinates ( ) . length > ( index . getValue ( ) - 1 ) ) { return new Coordinate [ ] { geometry . getCoordinates ( ) [ index . getValue ( ) ] , geometry . getCoordinates ( ) [ index . getValue ( ) + 1 ] } ; } throw new GeometryIndexNotFoundException ( "Could not match index with given geometry" ) ; |
public class NameSpace { /** * Get the methods defined in this namespace . ( This does not show methods
* in parent namespaces ) . Note : This will probably be renamed
* getDeclaredMethods ( )
* @ return the methods */
public BshMethod [ ] getMethods ( ) { } } | return this . methods . values ( ) . stream ( ) . flatMap ( v -> v . stream ( ) ) . toArray ( BshMethod [ ] :: new ) ; |
public class AmazonKinesisVideoPutMediaClient { /** * Create an { @ link javax . net . ssl . SSLContext } for the Netty Bootstrap .
* @ param uri URI of request .
* @ return Null if not over SSL , otherwise configured { @ link javax . net . ssl . SSLContext } to use . */
private SslContext getSslContext ( URI uri ) { } } | if ( ! "https" . equalsIgnoreCase ( uri . getScheme ( ) ) ) { return null ; } try { return SslContextBuilder . forClient ( ) . build ( ) ; } catch ( SSLException e ) { throw new SdkClientException ( "Could not create SSL context" , e ) ; } |
public class SocketResourceFactory { /** * / * Log relevant socket creation details */
private void recordSocketCreation ( SocketDestination dest , Socket socket ) throws SocketException { } } | int numCreated = created . incrementAndGet ( ) ; logger . debug ( "Created socket " + numCreated + " for " + dest . getHost ( ) + ":" + dest . getPort ( ) + " using protocol " + dest . getRequestFormatType ( ) . getCode ( ) ) ; // check buffer sizes - - you often don ' t get out what you put in !
int sendBufferSize = socket . getSendBufferSize ( ) ; int receiveBufferSize = socket . getReceiveBufferSize ( ) ; if ( receiveBufferSize != this . socketBufferSize ) logger . debug ( "Requested socket receive buffer size was " + this . socketBufferSize + " bytes but actual size is " + receiveBufferSize + " bytes." ) ; if ( sendBufferSize != this . socketBufferSize ) logger . debug ( "Requested socket send buffer size was " + this . socketBufferSize + " bytes but actual size is " + sendBufferSize + " bytes." ) ; |
public class DefaultGroovyMethods { /** * Allows the usage of addShutdownHook without getting the runtime first .
* @ param self the object the method is called on ( ignored )
* @ param closure the shutdown hook action
* @ since 1.5.0 */
public static void addShutdownHook ( Object self , Closure closure ) { } } | Runtime . getRuntime ( ) . addShutdownHook ( new Thread ( closure ) ) ; |
public class IOGroovyMethods { /** * Filter lines from an input stream using a closure predicate . The closure
* will be passed each line as a String , and it should return
* < code > true < / code > if the line should be passed to the writer .
* @ param self an input stream
* @ param charset opens the stream with a specified charset
* @ param predicate a closure which returns boolean and takes a line
* @ return a writable which writes out the filtered lines
* @ throws UnsupportedEncodingException if the encoding specified is not supported
* @ see # filterLine ( java . io . Reader , groovy . lang . Closure )
* @ since 1.6.8 */
public static Writable filterLine ( InputStream self , String charset , @ ClosureParams ( value = SimpleType . class , options = "java.lang.String" ) Closure predicate ) throws UnsupportedEncodingException { } } | return filterLine ( newReader ( self , charset ) , predicate ) ; |
public class CPDefinitionUtil { /** * Returns the first cp definition in the ordered set where groupId = & # 63 ; .
* @ param groupId the group ID
* @ param orderByComparator the comparator to order the set by ( optionally < code > null < / code > )
* @ return the first matching cp definition
* @ throws NoSuchCPDefinitionException if a matching cp definition could not be found */
public static CPDefinition findByGroupId_First ( long groupId , OrderByComparator < CPDefinition > orderByComparator ) throws com . liferay . commerce . product . exception . NoSuchCPDefinitionException { } } | return getPersistence ( ) . findByGroupId_First ( groupId , orderByComparator ) ; |
public class AccumulatorHelper { /** * Compare both classes and throw { @ link UnsupportedOperationException } if
* they differ . */
@ SuppressWarnings ( "rawtypes" ) public static void compareAccumulatorTypes ( Object name , Class < ? extends Accumulator > first , Class < ? extends Accumulator > second ) throws UnsupportedOperationException { } } | if ( first == null || second == null ) { throw new NullPointerException ( ) ; } if ( first != second ) { if ( ! first . getName ( ) . equals ( second . getName ( ) ) ) { throw new UnsupportedOperationException ( "The accumulator object '" + name + "' was created with two different types: " + first . getName ( ) + " and " + second . getName ( ) ) ; } else { // damn , name is the same , but different classloaders
throw new UnsupportedOperationException ( "The accumulator object '" + name + "' was created with two different classes: " + first + " and " + second + " Both have the same type (" + first . getName ( ) + ") but different classloaders: " + first . getClassLoader ( ) + " and " + second . getClassLoader ( ) ) ; } } |
public class Message { /** * Sets the buffer < p / >
* Note that the byte [ ] buffer passed as argument must not be modified . Reason : if we retransmit the
* message , it would still have a ref to the original byte [ ] buffer passed in as argument , and so we would
* retransmit a changed byte [ ] buffer ! */
public Message setBuffer ( Buffer buf ) { } } | if ( buf != null ) { this . buf = buf . getBuf ( ) ; this . offset = buf . getOffset ( ) ; this . length = buf . getLength ( ) ; } return this ; |
public class Encoding { /** * Returns the Java name for the given locale .
* @ param locale the locale to use
* @ return Java encoding name */
public static String getJavaName ( Locale locale ) { } } | if ( locale == null ) return null ; return getJavaName ( getMimeName ( locale ) ) ; |
public class IntervalTree { /** * Returns the successor of the specified Entry , or null if no such .
* @ param < V > the value type */
private static < V > TreeEntry < V > successor ( TreeEntry < V > t ) { } } | if ( t == null ) { return null ; } else if ( t . right != null ) { TreeEntry < V > p = t . right ; while ( p . left != null ) { p = p . left ; } return p ; } else { TreeEntry < V > p = t . parent ; TreeEntry < V > ch = t ; while ( p != null && ch == p . right ) { ch = p ; p = p . parent ; } return p ; } |
public class MPIO { /** * remove a MPConnection from the cache
* @ param conn The MEConnection of the MPConnection to be removed
* @ return The MPConnection which removed */
public MPConnection removeConnection ( MEConnection conn ) { } } | if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . entry ( tc , "removeConnection" , new Object [ ] { conn } ) ; MPConnection mpConn ; synchronized ( _mpConnectionsByMEConnection ) { // remove the MPConnection from the ' by MEConnection ' cache
mpConn = _mpConnectionsByMEConnection . remove ( conn ) ; if ( mpConn != null ) { // remove it from the ' by cellule ' cache also
_mpConnectionsByMEUuid . remove ( mpConn . getRemoteMEUuid ( ) ) ; } } if ( TraceComponent . isAnyTracingEnabled ( ) && tc . isEntryEnabled ( ) ) SibTr . exit ( tc , "removeConnection" , mpConn ) ; return mpConn ; |
public class AbstractHistogram { /** * Produce textual representation of the value distribution of histogram data by percentile . The distribution is
* output with exponentially increasing resolution , with each exponentially decreasing half - distance containing
* < i > dumpTicksPerHalf < / i > percentile reporting tick points .
* @ param printStream Stream into which the distribution will be output
* @ param percentileTicksPerHalfDistance The number of reporting points per exponentially decreasing half - distance
* @ param outputValueUnitScalingRatio The scaling factor by which to divide histogram recorded values units in
* output */
public void outputPercentileDistributionVolt ( final PrintStream printStream , final int percentileTicksPerHalfDistance , final Double outputValueUnitScalingRatio ) { } } | outputPercentileDistributionVolt ( printStream , percentileTicksPerHalfDistance , outputValueUnitScalingRatio , false ) ; |
End of preview. Expand
in Dataset Viewer.
No dataset card yet
New: Create and edit this dataset card directly on the website!
Contribute a Dataset Card- Downloads last month
- 12