method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
public void updateRemoteMetadata(int remoteNodeId, String key, Versioned<String> value) { if(key.equals(SystemStoreConstants.STORES_VERSION_KEY)) { List<StoreDefinition> storeDefs = storeMapper.readStoreList(new StringReader(value.getValue())); // Check for backwards compatibility StoreDefinitionUtils.validateSchemasAsNeeded(storeDefs); } ByteArray keyBytes = new ByteArray(ByteUtils.getBytes(key, "UTF-8")); Versioned<byte[]> valueBytes = new Versioned<byte[]>(ByteUtils.getBytes(value.getValue(), "UTF-8"), value.getVersion()); VAdminProto.VoldemortAdminRequest request = VAdminProto.VoldemortAdminRequest.newBuilder() .setType(VAdminProto.AdminRequestType.UPDATE_METADATA) .setUpdateMetadata(VAdminProto.UpdateMetadataRequest.newBuilder() .setKey(ByteString.copyFrom(keyBytes.get())) .setVersioned(ProtoUtils.encodeVersioned(valueBytes)) .build()) .build(); VAdminProto.UpdateMetadataResponse.Builder response = rpcOps.sendAndReceive(remoteNodeId, request, VAdminProto.UpdateMetadataResponse.newBuilder()); if(response.hasError()) helperOps.throwException(response.getError()); }
void function(int remoteNodeId, String key, Versioned<String> value) { if(key.equals(SystemStoreConstants.STORES_VERSION_KEY)) { List<StoreDefinition> storeDefs = storeMapper.readStoreList(new StringReader(value.getValue())); StoreDefinitionUtils.validateSchemasAsNeeded(storeDefs); } ByteArray keyBytes = new ByteArray(ByteUtils.getBytes(key, "UTF-8")); Versioned<byte[]> valueBytes = new Versioned<byte[]>(ByteUtils.getBytes(value.getValue(), "UTF-8"), value.getVersion()); VAdminProto.VoldemortAdminRequest request = VAdminProto.VoldemortAdminRequest.newBuilder() .setType(VAdminProto.AdminRequestType.UPDATE_METADATA) .setUpdateMetadata(VAdminProto.UpdateMetadataRequest.newBuilder() .setKey(ByteString.copyFrom(keyBytes.get())) .setVersioned(ProtoUtils.encodeVersioned(valueBytes)) .build()) .build(); VAdminProto.UpdateMetadataResponse.Builder response = rpcOps.sendAndReceive(remoteNodeId, request, VAdminProto.UpdateMetadataResponse.newBuilder()); if(response.hasError()) helperOps.throwException(response.getError()); }
/** * Update metadata at the given remoteNodeId. * <p> * * Metadata keys can be one of {@link MetadataStore#METADATA_KEYS}<br> * eg.<br> * <li>cluster metadata (cluster.xml as string) * <li>stores definitions (stores.xml as string) * <li>Server states <br <br> * See {@link voldemort.store.metadata.MetadataStore} for more * information. * * @param remoteNodeId Id of the node * @param key Metadata key to update * @param value Value for the metadata key */
Update metadata at the given remoteNodeId. Metadata keys can be one of <code>MetadataStore#METADATA_KEYS</code> eg. cluster metadata (cluster.xml as string) stores definitions (stores.xml as string) Server states <br See <code>voldemort.store.metadata.MetadataStore</code> for more information
updateRemoteMetadata
{ "repo_name": "null-exception/voldemort", "path": "src/java/voldemort/client/protocol/admin/AdminClient.java", "license": "apache-2.0", "size": 240012 }
[ "com.google.protobuf.ByteString", "java.io.StringReader", "java.util.List" ]
import com.google.protobuf.ByteString; import java.io.StringReader; import java.util.List;
import com.google.protobuf.*; import java.io.*; import java.util.*;
[ "com.google.protobuf", "java.io", "java.util" ]
com.google.protobuf; java.io; java.util;
2,499,520
public static void startHttpDownload(DownloadListener listener, Context context, String url) { new HttpDownloadTask(listener, context, url).execute(); }
static void function(DownloadListener listener, Context context, String url) { new HttpDownloadTask(listener, context, url).execute(); }
/** * Start downloading the Crosswalk runtime in background via HTTP connection * * <p>This method must be invoked on the UI thread. * * @param listener The {@link DownloadListener} to use * @param context The context to get DownloadManager * @param url The URL of the Crosswalk runtime */
Start downloading the Crosswalk runtime in background via HTTP connection This method must be invoked on the UI thread
startHttpDownload
{ "repo_name": "darktears/crosswalk", "path": "runtime/android/core/src/org/xwalk/core/XWalkLibraryLoader.java", "license": "bsd-3-clause", "size": 22336 }
[ "android.content.Context" ]
import android.content.Context;
import android.content.*;
[ "android.content" ]
android.content;
2,033,789
private boolean isDateTimeColumn(int columnType) { return (columnType == Types.TIMESTAMP) || (columnType == Types.DATE) || (columnType == Types.TIME); }
boolean function(int columnType) { return (columnType == Types.TIMESTAMP) (columnType == Types.DATE) (columnType == Types.TIME); }
/** * Determine if a column is date/time. * @return true if column type is TIMESTAMP, DATE, or TIME. */
Determine if a column is date/time
isDateTimeColumn
{ "repo_name": "sahilsehgal81/Sqoop", "path": "src/java/org/apache/sqoop/tool/ImportTool.java", "license": "apache-2.0", "size": 39652 }
[ "java.sql.Types" ]
import java.sql.Types;
import java.sql.*;
[ "java.sql" ]
java.sql;
1,532,474
public void removeNote(String noteId, AuthenticationInfo subject) throws IOException { String notePath = this.notesInfo.remove(noteId); Folder folder = getOrCreateFolder(getFolderName(notePath)); folder.removeNote(getNoteName(notePath)); this.notebookRepo.remove(noteId, notePath, subject); }
void function(String noteId, AuthenticationInfo subject) throws IOException { String notePath = this.notesInfo.remove(noteId); Folder folder = getOrCreateFolder(getFolderName(notePath)); folder.removeNote(getNoteName(notePath)); this.notebookRepo.remove(noteId, notePath, subject); }
/** * Remove note from NotebookRepo and NoteManager * * @param noteId * @param subject * @throws IOException */
Remove note from NotebookRepo and NoteManager
removeNote
{ "repo_name": "cquptEthan/incubator-zeppelin", "path": "zeppelin-zengine/src/main/java/org/apache/zeppelin/notebook/NoteManager.java", "license": "apache-2.0", "size": 16179 }
[ "java.io.IOException", "org.apache.zeppelin.user.AuthenticationInfo" ]
import java.io.IOException; import org.apache.zeppelin.user.AuthenticationInfo;
import java.io.*; import org.apache.zeppelin.user.*;
[ "java.io", "org.apache.zeppelin" ]
java.io; org.apache.zeppelin;
2,130,522
public boolean matchRestMethod(String method, String restrict) { if (restrict == null) { return true; } // always match OPTIONS as some REST clients uses that prior to calling the service if ("OPTIONS".equals(method)) { return true; } return restrict.toLowerCase(Locale.US).contains(method.toLowerCase(Locale.US)); }
boolean function(String method, String restrict) { if (restrict == null) { return true; } if (STR.equals(method)) { return true; } return restrict.toLowerCase(Locale.US).contains(method.toLowerCase(Locale.US)); }
/** * Matches the given request HTTP method with the configured HTTP method of the consumer * * @param method the request HTTP method * @param restrict the consumer configured HTTP restrict method * @return <tt>true</tt> if matched, <tt>false</tt> otherwise */
Matches the given request HTTP method with the configured HTTP method of the consumer
matchRestMethod
{ "repo_name": "logzio/camel", "path": "components/camel-servlet/src/main/java/org/apache/camel/component/servlet/ServletRestServletResolveConsumerStrategy.java", "license": "apache-2.0", "size": 5590 }
[ "java.util.Locale" ]
import java.util.Locale;
import java.util.*;
[ "java.util" ]
java.util;
1,036,696
public void writeHeaderPart(AnnotatedOutput out) { throwIfNotPrepared(); int sz = protoIds.size(); int offset = (sz == 0) ? 0 : getFileOffset(); if (sz > 65536) { throw new UnsupportedOperationException("too many proto ids"); } if (out.annotates()) { out.annotate(4, "proto_ids_size: " + Hex.u4(sz)); out.annotate(4, "proto_ids_off: " + Hex.u4(offset)); } out.writeInt(sz); out.writeInt(offset); }
void function(AnnotatedOutput out) { throwIfNotPrepared(); int sz = protoIds.size(); int offset = (sz == 0) ? 0 : getFileOffset(); if (sz > 65536) { throw new UnsupportedOperationException(STR); } if (out.annotates()) { out.annotate(4, STR + Hex.u4(sz)); out.annotate(4, STR + Hex.u4(offset)); } out.writeInt(sz); out.writeInt(offset); }
/** * Writes the portion of the file header that refers to this instance. * * @param out {@code non-null;} where to write */
Writes the portion of the file header that refers to this instance
writeHeaderPart
{ "repo_name": "RyanTech/DexHunter", "path": "dalvik/dexgen/src/com/android/dexgen/dex/file/ProtoIdsSection.java", "license": "apache-2.0", "size": 3896 }
[ "com.android.dexgen.util.AnnotatedOutput", "com.android.dexgen.util.Hex" ]
import com.android.dexgen.util.AnnotatedOutput; import com.android.dexgen.util.Hex;
import com.android.dexgen.util.*;
[ "com.android.dexgen" ]
com.android.dexgen;
1,726,122
List<Attribute> getAllAttributesStartWithNameWithoutNullValue(PerunSession sess, Resource resource, String startPartOfName);
List<Attribute> getAllAttributesStartWithNameWithoutNullValue(PerunSession sess, Resource resource, String startPartOfName);
/** * Get all <b>non-empty</b> attributes associated with the resource starts with name startPartOfName. * Get only nonvirtual attributes with notNull value. * * @param sess perun session * @param resource to get the attributes from * @param startPartOfName attribute name start with this part * @return list of attributes which name start with startPartOfName * * @throws InternalErrorException if an exception raise in concrete implementation, the exception is wrapped in InternalErrorException */
Get all non-empty attributes associated with the resource starts with name startPartOfName. Get only nonvirtual attributes with notNull value
getAllAttributesStartWithNameWithoutNullValue
{ "repo_name": "zlamalp/perun", "path": "perun-core/src/main/java/cz/metacentrum/perun/core/bl/AttributesManagerBl.java", "license": "bsd-2-clause", "size": 244560 }
[ "cz.metacentrum.perun.core.api.Attribute", "cz.metacentrum.perun.core.api.PerunSession", "cz.metacentrum.perun.core.api.Resource", "java.util.List" ]
import cz.metacentrum.perun.core.api.Attribute; import cz.metacentrum.perun.core.api.PerunSession; import cz.metacentrum.perun.core.api.Resource; import java.util.List;
import cz.metacentrum.perun.core.api.*; import java.util.*;
[ "cz.metacentrum.perun", "java.util" ]
cz.metacentrum.perun; java.util;
1,813,914
default Function4<T13, T14, T15, T16, R> applyPartially(Tuple12<? extends T1, ? extends T2, ? extends T3, ? extends T4, ? extends T5, ? extends T6, ? extends T7, ? extends T8, ? extends T9, ? extends T10, ? extends T11, ? extends T12> args) { return (v13, v14, v15, v16) -> apply(args.v1, args.v2, args.v3, args.v4, args.v5, args.v6, args.v7, args.v8, args.v9, args.v10, args.v11, args.v12, v13, v14, v15, v16); }
default Function4<T13, T14, T15, T16, R> applyPartially(Tuple12<? extends T1, ? extends T2, ? extends T3, ? extends T4, ? extends T5, ? extends T6, ? extends T7, ? extends T8, ? extends T9, ? extends T10, ? extends T11, ? extends T12> args) { return (v13, v14, v15, v16) -> apply(args.v1, args.v2, args.v3, args.v4, args.v5, args.v6, args.v7, args.v8, args.v9, args.v10, args.v11, args.v12, v13, v14, v15, v16); }
/** * Partially apply this function to the arguments. */
Partially apply this function to the arguments
applyPartially
{ "repo_name": "jOOQ/jOOL", "path": "jOOL/src/main/java/org/jooq/lambda/function/Function16.java", "license": "apache-2.0", "size": 31845 }
[ "org.jooq.lambda.tuple.Tuple12" ]
import org.jooq.lambda.tuple.Tuple12;
import org.jooq.lambda.tuple.*;
[ "org.jooq.lambda" ]
org.jooq.lambda;
2,343,830
void onMigrationStart(PartitionMigrationEvent event);
void onMigrationStart(PartitionMigrationEvent event);
/** * Indicates that migration started for a given partition * * @param event the migration event */
Indicates that migration started for a given partition
onMigrationStart
{ "repo_name": "mdogan/hazelcast", "path": "hazelcast/src/main/java/com/hazelcast/wan/WanMigrationAwarePublisher.java", "license": "apache-2.0", "size": 4035 }
[ "com.hazelcast.internal.partition.PartitionMigrationEvent" ]
import com.hazelcast.internal.partition.PartitionMigrationEvent;
import com.hazelcast.internal.partition.*;
[ "com.hazelcast.internal" ]
com.hazelcast.internal;
2,405,672
public static List<Method> getAccessors(Class<?> type) { List<Method> accessors = null; if (type == null) { accessors = Collections.emptyList(); } else { accessors = new ArrayList<Method>(); Class<?> currentType = type; while (!Object.class.equals(currentType)) { for (Method currentMethod : currentType.getDeclaredMethods()) { Matcher matcher = ACCESSOR_PATTERN.matcher(currentMethod.getName()); if (matcher.matches() && matcher.groupCount() == 3) { accessors.add(currentMethod); } } currentType = currentType.getSuperclass(); } } return accessors; }
static List<Method> function(Class<?> type) { List<Method> accessors = null; if (type == null) { accessors = Collections.emptyList(); } else { accessors = new ArrayList<Method>(); Class<?> currentType = type; while (!Object.class.equals(currentType)) { for (Method currentMethod : currentType.getDeclaredMethods()) { Matcher matcher = ACCESSOR_PATTERN.matcher(currentMethod.getName()); if (matcher.matches() && matcher.groupCount() == 3) { accessors.add(currentMethod); } } currentType = currentType.getSuperclass(); } } return accessors; }
/** * Retrieves all accessors by walking up the inheritance tree and aggregating all methods * whose name mathches one of the following: * * - getSomething() * - isSomething() * - hasSomething() * - canSomethingBeDone() * * The algorithm stops before it inspects {@code Object.class}, so methods such as * {@code getClass()} will not be included. * * @param type the most specific type in the inheritance tree to inspect * @return all accessors that were found. */
Retrieves all accessors by walking up the inheritance tree and aggregating all methods whose name mathches one of the following: - getSomething() - isSomething() - hasSomething() - canSomethingBeDone() The algorithm stops before it inspects Object.class, so methods such as getClass() will not be included
getAccessors
{ "repo_name": "damiancarrillo/agave-framework", "path": "agave-core/src/main/java/co/cdev/agave/util/ReflectionUtils.java", "license": "bsd-3-clause", "size": 12120 }
[ "java.lang.reflect.Method", "java.util.ArrayList", "java.util.Collections", "java.util.List", "java.util.regex.Matcher" ]
import java.lang.reflect.Method; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.regex.Matcher;
import java.lang.reflect.*; import java.util.*; import java.util.regex.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
2,910,614
private static StringBuilder buildHtml(Viewable viewable, boolean prependDivider) { StringBuilder html = new StringBuilder(); if (viewable instanceof Textual) { Part part = ((Textual)viewable).getPart(); addHtmlDivider(html, part, prependDivider); String t = MessageExtractor.getTextFromPart(part); if (t == null) { t = ""; } else if (viewable instanceof Text) { t = HtmlConverter.textToHtml(t); } html.append(t); } else if (viewable instanceof Alternative) { // That's odd - an Alternative as child of an Alternative; go ahead and try to use the // text/html child; fall-back to the text/plain part. Alternative alternative = (Alternative) viewable; List<Viewable> htmlAlternative = alternative.getHtml().isEmpty() ? alternative.getText() : alternative.getHtml(); boolean divider = prependDivider; for (Viewable htmlViewable : htmlAlternative) { html.append(buildHtml(htmlViewable, divider)); divider = true; } } return html; }
static StringBuilder function(Viewable viewable, boolean prependDivider) { StringBuilder html = new StringBuilder(); if (viewable instanceof Textual) { Part part = ((Textual)viewable).getPart(); addHtmlDivider(html, part, prependDivider); String t = MessageExtractor.getTextFromPart(part); if (t == null) { t = ""; } else if (viewable instanceof Text) { t = HtmlConverter.textToHtml(t); } html.append(t); } else if (viewable instanceof Alternative) { Alternative alternative = (Alternative) viewable; List<Viewable> htmlAlternative = alternative.getHtml().isEmpty() ? alternative.getText() : alternative.getHtml(); boolean divider = prependDivider; for (Viewable htmlViewable : htmlAlternative) { html.append(buildHtml(htmlViewable, divider)); divider = true; } } return html; }
/** * Use the contents of a {@link com.fsck.k9.mail.internet.Viewable} to create the HTML to be displayed. * * <p> * This will use {@link com.fsck.k9.helper.HtmlConverter#textToHtml(String)} to convert plain text parts * to HTML if necessary. * </p> * * @param viewable * The viewable part to build the HTML from. * @param prependDivider * {@code true}, if the HTML divider should be inserted as first element. * {@code false}, otherwise. * * @return The contents of the supplied viewable instance as HTML. */
Use the contents of a <code>com.fsck.k9.mail.internet.Viewable</code> to create the HTML to be displayed. This will use <code>com.fsck.k9.helper.HtmlConverter#textToHtml(String)</code> to convert plain text parts to HTML if necessary.
buildHtml
{ "repo_name": "bashrc/k-9", "path": "k9mail/src/main/java/com/fsck/k9/mailstore/LocalMessageExtractor.java", "license": "bsd-3-clause", "size": 24007 }
[ "com.fsck.k9.helper.HtmlConverter", "com.fsck.k9.mail.Part", "com.fsck.k9.mail.internet.MessageExtractor", "com.fsck.k9.mail.internet.Viewable", "java.util.List" ]
import com.fsck.k9.helper.HtmlConverter; import com.fsck.k9.mail.Part; import com.fsck.k9.mail.internet.MessageExtractor; import com.fsck.k9.mail.internet.Viewable; import java.util.List;
import com.fsck.k9.helper.*; import com.fsck.k9.mail.*; import com.fsck.k9.mail.internet.*; import java.util.*;
[ "com.fsck.k9", "java.util" ]
com.fsck.k9; java.util;
1,319,655
@ApiDocItem public Collection<File> filterRuntimeJars (final Collection<File> allUrls) { ClasspathUtils classpathUtils = new ClasspathUtils(); Collection<File> runtimeJars = classpathUtils.getDefaultClasspathAsFileCollection(); List<File> filtered = new ArrayList<File>(); for (File next: allUrls) { if (next.toString().contains("wrapper/dists")) continue; filtered.add(next); } filtered.removeAll(runtimeJars); return filtered; } /** * gets a classpath from a {@link FileCollection} * @param fileCollection collection, e.g. {@link Configuration}
Collection<File> function (final Collection<File> allUrls) { ClasspathUtils classpathUtils = new ClasspathUtils(); Collection<File> runtimeJars = classpathUtils.getDefaultClasspathAsFileCollection(); List<File> filtered = new ArrayList<File>(); for (File next: allUrls) { if (next.toString().contains(STR)) continue; filtered.add(next); } filtered.removeAll(runtimeJars); return filtered; } /** * gets a classpath from a {@link FileCollection} * @param fileCollection collection, e.g. {@link Configuration}
/** * filters the gradle runtime jars, means all files, that are located in wrapper/dists path * @param allUrls unfiltered collection of Files * @return filtered colelction of files */
filters the gradle runtime jars, means all files, that are located in wrapper/dists path
filterRuntimeJars
{ "repo_name": "moley/leguan", "path": "leguan-gradleutils/src/main/java/org/leguan/gradleutils/GradleClasspathUtils.java", "license": "apache-2.0", "size": 3789 }
[ "java.io.File", "java.util.ArrayList", "java.util.Collection", "java.util.List", "org.gradle.api.artifacts.Configuration", "org.gradle.api.file.FileCollection", "org.leguan.ClasspathUtils" ]
import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.gradle.api.artifacts.Configuration; import org.gradle.api.file.FileCollection; import org.leguan.ClasspathUtils;
import java.io.*; import java.util.*; import org.gradle.api.artifacts.*; import org.gradle.api.file.*; import org.leguan.*;
[ "java.io", "java.util", "org.gradle.api", "org.leguan" ]
java.io; java.util; org.gradle.api; org.leguan;
852,798
@Schema(required = true, description = "IPv4 or IPv6 address or host name") public String getServerIp() { return serverIp; }
@Schema(required = true, description = STR) String function() { return serverIp; }
/** * IPv4 or IPv6 address or host name * @return serverIp **/
IPv4 or IPv6 address or host name
getServerIp
{ "repo_name": "iterate-ch/cyberduck", "path": "dracoon/src/main/java/ch/cyberduck/core/sds/io/swagger/client/model/TestActiveDirectoryConfigResponse.java", "license": "gpl-3.0", "size": 7847 }
[ "io.swagger.v3.oas.annotations.media.Schema" ]
import io.swagger.v3.oas.annotations.media.Schema;
import io.swagger.v3.oas.annotations.media.*;
[ "io.swagger.v3" ]
io.swagger.v3;
1,249,577
protected void registerDefaultServices() { // adapter factories registerInjectActivateService(new ModelAdapterFactory()); // sling models injectors registerInjectActivateService(new BindingsInjector()); registerInjectActivateService(new ChildResourceInjector()); registerInjectActivateService(new OSGiServiceInjector()); registerInjectActivateService(new RequestAttributeInjector()); registerInjectActivateService(new ResourcePathInjector()); registerInjectActivateService(new SelfInjector()); registerInjectActivateService(new SlingObjectInjector()); registerInjectActivateService(new ValueMapInjector()); // sling models implementation pickers registerService(ImplementationPicker.class, new FirstImplementationPicker()); // other services registerService(SlingSettingsService.class, new MockSlingSettingService(DEFAULT_RUN_MODES)); registerService(MimeTypeService.class, new MockMimeTypeService()); }
void function() { registerInjectActivateService(new ModelAdapterFactory()); registerInjectActivateService(new BindingsInjector()); registerInjectActivateService(new ChildResourceInjector()); registerInjectActivateService(new OSGiServiceInjector()); registerInjectActivateService(new RequestAttributeInjector()); registerInjectActivateService(new ResourcePathInjector()); registerInjectActivateService(new SelfInjector()); registerInjectActivateService(new SlingObjectInjector()); registerInjectActivateService(new ValueMapInjector()); registerService(ImplementationPicker.class, new FirstImplementationPicker()); registerService(SlingSettingsService.class, new MockSlingSettingService(DEFAULT_RUN_MODES)); registerService(MimeTypeService.class, new MockMimeTypeService()); }
/** * Default services that should be available for every unit test */
Default services that should be available for every unit test
registerDefaultServices
{ "repo_name": "tteofili/sling", "path": "testing/mocks/sling-mock/src/main/java/org/apache/sling/testing/mock/sling/context/SlingContextImpl.java", "license": "apache-2.0", "size": 14007 }
[ "org.apache.sling.commons.mime.MimeTypeService", "org.apache.sling.models.impl.FirstImplementationPicker", "org.apache.sling.models.impl.ModelAdapterFactory", "org.apache.sling.models.impl.injectors.BindingsInjector", "org.apache.sling.models.impl.injectors.ChildResourceInjector", "org.apache.sling.models.impl.injectors.OSGiServiceInjector", "org.apache.sling.models.impl.injectors.RequestAttributeInjector", "org.apache.sling.models.impl.injectors.ResourcePathInjector", "org.apache.sling.models.impl.injectors.SelfInjector", "org.apache.sling.models.impl.injectors.SlingObjectInjector", "org.apache.sling.models.impl.injectors.ValueMapInjector", "org.apache.sling.models.spi.ImplementationPicker", "org.apache.sling.settings.SlingSettingsService", "org.apache.sling.testing.mock.sling.services.MockMimeTypeService", "org.apache.sling.testing.mock.sling.services.MockSlingSettingService" ]
import org.apache.sling.commons.mime.MimeTypeService; import org.apache.sling.models.impl.FirstImplementationPicker; import org.apache.sling.models.impl.ModelAdapterFactory; import org.apache.sling.models.impl.injectors.BindingsInjector; import org.apache.sling.models.impl.injectors.ChildResourceInjector; import org.apache.sling.models.impl.injectors.OSGiServiceInjector; import org.apache.sling.models.impl.injectors.RequestAttributeInjector; import org.apache.sling.models.impl.injectors.ResourcePathInjector; import org.apache.sling.models.impl.injectors.SelfInjector; import org.apache.sling.models.impl.injectors.SlingObjectInjector; import org.apache.sling.models.impl.injectors.ValueMapInjector; import org.apache.sling.models.spi.ImplementationPicker; import org.apache.sling.settings.SlingSettingsService; import org.apache.sling.testing.mock.sling.services.MockMimeTypeService; import org.apache.sling.testing.mock.sling.services.MockSlingSettingService;
import org.apache.sling.commons.mime.*; import org.apache.sling.models.impl.*; import org.apache.sling.models.impl.injectors.*; import org.apache.sling.models.spi.*; import org.apache.sling.settings.*; import org.apache.sling.testing.mock.sling.services.*;
[ "org.apache.sling" ]
org.apache.sling;
287,230
public double jointProbability(Scope projection, double[] variableAssignment, Scope observedVariables, double[] observation);
double function(Scope projection, double[] variableAssignment, Scope observedVariables, double[] observation);
/** * P(Y, E = e) */
P(Y, E = e)
jointProbability
{ "repo_name": "ThorbenLindhauer/graphical-models", "path": "inference-engine/src/main/java/com/github/thorbenlindhauer/inference/ContinuousModelInferencer.java", "license": "apache-2.0", "size": 1218 }
[ "com.github.thorbenlindhauer.variable.Scope" ]
import com.github.thorbenlindhauer.variable.Scope;
import com.github.thorbenlindhauer.variable.*;
[ "com.github.thorbenlindhauer" ]
com.github.thorbenlindhauer;
2,573,531
public static boolean expect(final PsiBuilder builder, final IElementType expectedType) { if (builder.getTokenType() == expectedType) { builder.advanceLexer(); return true; } return false; }
static boolean function(final PsiBuilder builder, final IElementType expectedType) { if (builder.getTokenType() == expectedType) { builder.advanceLexer(); return true; } return false; }
/** * Advances lexer if current token is of expected type, does nothing otherwise. * * @param builder PSI builder to operate on. * @param expectedType expected token. * @return true if token matches, false otherwise. */
Advances lexer if current token is of expected type, does nothing otherwise
expect
{ "repo_name": "jk1/intellij-community", "path": "platform/core-impl/src/com/intellij/lang/PsiBuilderUtil.java", "license": "apache-2.0", "size": 5333 }
[ "com.intellij.psi.tree.IElementType" ]
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.*;
[ "com.intellij.psi" ]
com.intellij.psi;
2,739,723
UserAndGroups getUserAndGroups(String identity) throws AuthorizationAccessException;
UserAndGroups getUserAndGroups(String identity) throws AuthorizationAccessException;
/** * Gets a user and their groups. Must be non null. If the user is not known the UserAndGroups.getUser() and * UserAndGroups.getGroups() should return null * * @return the UserAndGroups for the specified identity * @throws AuthorizationAccessException if there was an unexpected error performing the operation */
Gets a user and their groups. Must be non null. If the user is not known the UserAndGroups.getUser() and UserAndGroups.getGroups() should return null
getUserAndGroups
{ "repo_name": "MikeThomsen/nifi", "path": "nifi-framework-api/src/main/java/org/apache/nifi/authorization/UserGroupProvider.java", "license": "apache-2.0", "size": 5254 }
[ "org.apache.nifi.authorization.exception.AuthorizationAccessException" ]
import org.apache.nifi.authorization.exception.AuthorizationAccessException;
import org.apache.nifi.authorization.exception.*;
[ "org.apache.nifi" ]
org.apache.nifi;
701,149
@SuppressWarnings("unchecked") @Test public void testExecuteTask() { final String MODULE_ID = "some-module-id"; Object SOMETING = null; // something returned by the context. final String SOME_MESSAGE = "some-message"; // messages returned by cmd results Map<String, String> initCmdMessages = EasyMock.createMock(Map.class); EasyMock.expect(initCmdMessages.get(ExecutionResult.MSG_MESSAGE)).andReturn(SOME_MESSAGE).times(3); EasyMock.replay(initCmdMessages); // execution result returned by command runner for running // initialization commands ExecutionResult initCmdsResult = EasyMock.createMock(ExecutionResult.class); EasyMock.expect(initCmdsResult.getMessages()).andReturn(initCmdMessages).times(3); EasyMock.replay(initCmdsResult); // create mock execution result ExecutionResult operationResult = EasyMock.createMock(ExecutionResult.class); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); EasyMock.replay(operationResult); // create mock module info ModuleInfo moduleInfo = EasyMock.createMock(ModuleInfo.class); EasyMock.expect(moduleInfo.getId()).andReturn(MODULE_ID).times(2); EasyMock.expect(moduleInfo.isDescriptorDefined()).andReturn(true); EasyMock.replay(moduleInfo); // create mock execution info executionInfo = EasyMock.createMock(ExecutionInfo.class); EasyMock.expect(executionInfo.getModuleInfo()).andReturn(moduleInfo).times(3); EasyMock.expect(executionInfo.getResult()).andReturn(operationResult).times(4); EasyMock.replay(executionInfo); // create context Context context = EasyMock.createMock(Context.class); EasyMock.expect(context.get(InitializeOperationCommand.MODULE_FILE_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.FILE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.PACKAGE_KEY, Module.class.getPackage())) .andReturn(SOMETING); EasyMock.expect(context.get(UnmarshallJAXBObjectsCommand.UNMARSHALLING_RESULT_KEY)).andReturn(SOMETING); EasyMock.expect(context.get(InitializeOperationCommand.MODULE_MODEL_FILE_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.FILE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.PACKAGE_KEY, Models.class.getPackage())) .andReturn(SOMETING); EasyMock.expect(context.get(UnmarshallJAXBObjectsCommand.UNMARSHALLING_RESULT_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.MODULE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.MODULE_MODEL_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.EXECUTION_INFO_KEY, executionInfo)).andReturn(SOMETING); EasyMock.replay(context); // create mock execution context repository executionContextRepository.register(executionInfo, context); executionContextRepository.unregister(context); EasyMock.replay(executionContextRepository); // complete mock command runner setup commandRunner.setExecutionResult((ExecutionResult) EasyMock.isA(ExecutionResult.class)); EasyMock.expect(commandRunner.createContext()).andReturn(context); EasyMock.expect(commandRunner.lastExecutionSucceeded()).andReturn(true).times(6); EasyMock.expect(commandRunner.run(initializeOperationCommand, messageProvider.getMessage("aot.initialize_operation_info"), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(unmarshallJAXBObjectsCommand, messageProvider.getMessage("aot.load_module_info"), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(unmarshallJAXBObjectsCommand, messageProvider.getMessage("aot.load_modulemodel_info"), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(invokePluginsCommand, operationResult, context)).andReturn(initCmdsResult); EasyMock.replay(commandRunner); // execute operationTask.execute(executionInfo); verifyMocks(initCmdsResult, operationResult, context); }
@SuppressWarnings(STR) void function() { final String MODULE_ID = STR; Object SOMETING = null; final String SOME_MESSAGE = STR; Map<String, String> initCmdMessages = EasyMock.createMock(Map.class); EasyMock.expect(initCmdMessages.get(ExecutionResult.MSG_MESSAGE)).andReturn(SOME_MESSAGE).times(3); EasyMock.replay(initCmdMessages); ExecutionResult initCmdsResult = EasyMock.createMock(ExecutionResult.class); EasyMock.expect(initCmdsResult.getMessages()).andReturn(initCmdMessages).times(3); EasyMock.replay(initCmdsResult); ExecutionResult operationResult = EasyMock.createMock(ExecutionResult.class); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); operationResult.addMessage(ExecutionResult.MSG_MESSAGE, SOME_MESSAGE); EasyMock.replay(operationResult); ModuleInfo moduleInfo = EasyMock.createMock(ModuleInfo.class); EasyMock.expect(moduleInfo.getId()).andReturn(MODULE_ID).times(2); EasyMock.expect(moduleInfo.isDescriptorDefined()).andReturn(true); EasyMock.replay(moduleInfo); executionInfo = EasyMock.createMock(ExecutionInfo.class); EasyMock.expect(executionInfo.getModuleInfo()).andReturn(moduleInfo).times(3); EasyMock.expect(executionInfo.getResult()).andReturn(operationResult).times(4); EasyMock.replay(executionInfo); Context context = EasyMock.createMock(Context.class); EasyMock.expect(context.get(InitializeOperationCommand.MODULE_FILE_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.FILE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.PACKAGE_KEY, Module.class.getPackage())) .andReturn(SOMETING); EasyMock.expect(context.get(UnmarshallJAXBObjectsCommand.UNMARSHALLING_RESULT_KEY)).andReturn(SOMETING); EasyMock.expect(context.get(InitializeOperationCommand.MODULE_MODEL_FILE_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.FILE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(UnmarshallJAXBObjectsCommand.PACKAGE_KEY, Models.class.getPackage())) .andReturn(SOMETING); EasyMock.expect(context.get(UnmarshallJAXBObjectsCommand.UNMARSHALLING_RESULT_KEY)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.MODULE_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.MODULE_MODEL_KEY, SOMETING)).andReturn(SOMETING); EasyMock.expect(context.put(InvokePluginsCommand.EXECUTION_INFO_KEY, executionInfo)).andReturn(SOMETING); EasyMock.replay(context); executionContextRepository.register(executionInfo, context); executionContextRepository.unregister(context); EasyMock.replay(executionContextRepository); commandRunner.setExecutionResult((ExecutionResult) EasyMock.isA(ExecutionResult.class)); EasyMock.expect(commandRunner.createContext()).andReturn(context); EasyMock.expect(commandRunner.lastExecutionSucceeded()).andReturn(true).times(6); EasyMock.expect(commandRunner.run(initializeOperationCommand, messageProvider.getMessage(STR), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(unmarshallJAXBObjectsCommand, messageProvider.getMessage(STR), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(unmarshallJAXBObjectsCommand, messageProvider.getMessage(STR), context)).andReturn(initCmdsResult); EasyMock.expect(commandRunner.run(invokePluginsCommand, operationResult, context)).andReturn(initCmdsResult); EasyMock.replay(commandRunner); operationTask.execute(executionInfo); verifyMocks(initCmdsResult, operationResult, context); }
/** * Test task executes with expected invocations. */
Test task executes with expected invocations
testExecuteTask
{ "repo_name": "athrane/pineapple", "path": "modules/pineapple-core/src/test/java/com/alpha/pineapple/execution/OperationTaskTest.java", "license": "gpl-3.0", "size": 20560 }
[ "com.alpha.pineapple.command.InitializeOperationCommand", "com.alpha.pineapple.command.InvokePluginsCommand", "com.alpha.pineapple.command.UnmarshallJAXBObjectsCommand", "com.alpha.pineapple.model.module.Module", "com.alpha.pineapple.model.module.model.Models", "com.alpha.pineapple.module.ModuleInfo", "java.util.Map", "org.apache.commons.chain.Context", "org.easymock.EasyMock" ]
import com.alpha.pineapple.command.InitializeOperationCommand; import com.alpha.pineapple.command.InvokePluginsCommand; import com.alpha.pineapple.command.UnmarshallJAXBObjectsCommand; import com.alpha.pineapple.model.module.Module; import com.alpha.pineapple.model.module.model.Models; import com.alpha.pineapple.module.ModuleInfo; import java.util.Map; import org.apache.commons.chain.Context; import org.easymock.EasyMock;
import com.alpha.pineapple.command.*; import com.alpha.pineapple.model.module.*; import com.alpha.pineapple.model.module.model.*; import com.alpha.pineapple.module.*; import java.util.*; import org.apache.commons.chain.*; import org.easymock.*;
[ "com.alpha.pineapple", "java.util", "org.apache.commons", "org.easymock" ]
com.alpha.pineapple; java.util; org.apache.commons; org.easymock;
329,132
public float getOptionalFloatAttribute(String attrName, HashMap<String, String> attrMap, float defaultValue) { String attrValue = attrMap.get(attrName); if (attrValue != null) { attrMap.remove(attrName); return Float.parseFloat(attrValue); } else { return defaultValue; } }
float function(String attrName, HashMap<String, String> attrMap, float defaultValue) { String attrValue = attrMap.get(attrName); if (attrValue != null) { attrMap.remove(attrName); return Float.parseFloat(attrValue); } else { return defaultValue; } }
/** * Like getOptionalAttribute, but with the result converted to a float. */
Like getOptionalAttribute, but with the result converted to a float
getOptionalFloatAttribute
{ "repo_name": "jankolkmeier/HmiCore", "path": "HmiXml/src/hmi/xml/XMLStructureAdapter.java", "license": "mit", "size": 97309 }
[ "java.util.HashMap" ]
import java.util.HashMap;
import java.util.*;
[ "java.util" ]
java.util;
1,900,950
@Test public void test020Connection() throws Exception { final String TEST_NAME = "test020Connection"; TestUtil.displayTestTile(TEST_NAME); // GIVEN Task task = createTask(TEST_NAME); OperationResult result = task.getResult(); // Some connector initialization and other things might happen in previous tests. // The monitor is static, not part of spring context, it will not be cleared rememberCounter(InternalCounters.RESOURCE_SCHEMA_FETCH_COUNT); rememberCounter(InternalCounters.CONNECTOR_SCHEMA_PARSE_COUNT); rememberCounter(InternalCounters.CONNECTOR_CAPABILITIES_FETCH_COUNT); rememberCounter(InternalCounters.CONNECTOR_INSTANCE_INITIALIZATION_COUNT); rememberCounter(InternalCounters.RESOURCE_SCHEMA_PARSE_COUNT); rememberResourceCacheStats(); // Check that there is no schema before test (pre-condition) PrismObject<ResourceType> resourceBefore = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, result); ResourceType resourceTypeBefore = resourceBefore.asObjectable(); rememberResourceVersion(resourceBefore.getVersion()); assertNotNull("No connector ref", resourceTypeBefore.getConnectorRef()); assertNotNull("No connector ref OID", resourceTypeBefore.getConnectorRef().getOid()); ConnectorType connector = repositoryService.getObject(ConnectorType.class, resourceTypeBefore.getConnectorRef().getOid(), null, result).asObjectable(); assertNotNull(connector); IntegrationTestTools.assertNoSchema("Found schema before test connection. Bad test setup?", resourceTypeBefore); // WHEN OperationResult testResult = provisioningService.testResource(RESOURCE_DUMMY_OID, task); // THEN display("Test result", testResult); OperationResult connectorResult = assertSingleConnectorTestResult(testResult); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_INITIALIZATION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CONFIGURATION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CONNECTION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CAPABILITIES); assertSuccess(connectorResult); assertTestResourceSuccess(testResult, ConnectorTestOperation.RESOURCE_SCHEMA); assertSuccess(testResult); PrismObject<ResourceType> resourceRepoAfter = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, result); ResourceType resourceTypeRepoAfter = resourceRepoAfter.asObjectable(); display("Resource after test", resourceTypeRepoAfter); XmlSchemaType xmlSchemaTypeAfter = resourceTypeRepoAfter.getSchema(); assertNotNull("No schema after test connection", xmlSchemaTypeAfter); Element resourceXsdSchemaElementAfter = ResourceTypeUtil.getResourceXsdSchema(resourceTypeRepoAfter); assertNotNull("No schema after test connection", resourceXsdSchemaElementAfter); IntegrationTestTools.displayXml("Resource XML", resourceRepoAfter); CachingMetadataType cachingMetadata = xmlSchemaTypeAfter.getCachingMetadata(); assertNotNull("No caching metadata", cachingMetadata); assertNotNull("No retrievalTimestamp", cachingMetadata.getRetrievalTimestamp()); assertNotNull("No serialNumber", cachingMetadata.getSerialNumber()); Element xsdElement = ObjectTypeUtil.findXsdElement(xmlSchemaTypeAfter); ResourceSchema parsedSchema = ResourceSchemaImpl.parse(xsdElement, resourceTypeBefore.toString(), prismContext); assertNotNull("No schema after parsing", parsedSchema); // schema will be checked in next test assertCounterIncrement(InternalCounters.RESOURCE_SCHEMA_FETCH_COUNT, 1); assertCounterIncrement(InternalCounters.CONNECTOR_SCHEMA_PARSE_COUNT, 0); assertCounterIncrement(InternalCounters.CONNECTOR_CAPABILITIES_FETCH_COUNT, 1); assertCounterIncrement(InternalCounters.CONNECTOR_INSTANCE_INITIALIZATION_COUNT, 1); assertCounterIncrement(InternalCounters.RESOURCE_SCHEMA_PARSE_COUNT, 1); // One increment for availablity status, the other for schema assertResourceVersionIncrement(resourceRepoAfter, 2); }
void function() throws Exception { final String TEST_NAME = STR; TestUtil.displayTestTile(TEST_NAME); Task task = createTask(TEST_NAME); OperationResult result = task.getResult(); rememberCounter(InternalCounters.RESOURCE_SCHEMA_FETCH_COUNT); rememberCounter(InternalCounters.CONNECTOR_SCHEMA_PARSE_COUNT); rememberCounter(InternalCounters.CONNECTOR_CAPABILITIES_FETCH_COUNT); rememberCounter(InternalCounters.CONNECTOR_INSTANCE_INITIALIZATION_COUNT); rememberCounter(InternalCounters.RESOURCE_SCHEMA_PARSE_COUNT); rememberResourceCacheStats(); PrismObject<ResourceType> resourceBefore = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, result); ResourceType resourceTypeBefore = resourceBefore.asObjectable(); rememberResourceVersion(resourceBefore.getVersion()); assertNotNull(STR, resourceTypeBefore.getConnectorRef()); assertNotNull(STR, resourceTypeBefore.getConnectorRef().getOid()); ConnectorType connector = repositoryService.getObject(ConnectorType.class, resourceTypeBefore.getConnectorRef().getOid(), null, result).asObjectable(); assertNotNull(connector); IntegrationTestTools.assertNoSchema(STR, resourceTypeBefore); OperationResult testResult = provisioningService.testResource(RESOURCE_DUMMY_OID, task); display(STR, testResult); OperationResult connectorResult = assertSingleConnectorTestResult(testResult); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_INITIALIZATION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CONFIGURATION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CONNECTION); assertTestResourceSuccess(connectorResult, ConnectorTestOperation.CONNECTOR_CAPABILITIES); assertSuccess(connectorResult); assertTestResourceSuccess(testResult, ConnectorTestOperation.RESOURCE_SCHEMA); assertSuccess(testResult); PrismObject<ResourceType> resourceRepoAfter = repositoryService.getObject(ResourceType.class, RESOURCE_DUMMY_OID, null, result); ResourceType resourceTypeRepoAfter = resourceRepoAfter.asObjectable(); display(STR, resourceTypeRepoAfter); XmlSchemaType xmlSchemaTypeAfter = resourceTypeRepoAfter.getSchema(); assertNotNull(STR, xmlSchemaTypeAfter); Element resourceXsdSchemaElementAfter = ResourceTypeUtil.getResourceXsdSchema(resourceTypeRepoAfter); assertNotNull(STR, resourceXsdSchemaElementAfter); IntegrationTestTools.displayXml(STR, resourceRepoAfter); CachingMetadataType cachingMetadata = xmlSchemaTypeAfter.getCachingMetadata(); assertNotNull(STR, cachingMetadata); assertNotNull(STR, cachingMetadata.getRetrievalTimestamp()); assertNotNull(STR, cachingMetadata.getSerialNumber()); Element xsdElement = ObjectTypeUtil.findXsdElement(xmlSchemaTypeAfter); ResourceSchema parsedSchema = ResourceSchemaImpl.parse(xsdElement, resourceTypeBefore.toString(), prismContext); assertNotNull(STR, parsedSchema); assertCounterIncrement(InternalCounters.RESOURCE_SCHEMA_FETCH_COUNT, 1); assertCounterIncrement(InternalCounters.CONNECTOR_SCHEMA_PARSE_COUNT, 0); assertCounterIncrement(InternalCounters.CONNECTOR_CAPABILITIES_FETCH_COUNT, 1); assertCounterIncrement(InternalCounters.CONNECTOR_INSTANCE_INITIALIZATION_COUNT, 1); assertCounterIncrement(InternalCounters.RESOURCE_SCHEMA_PARSE_COUNT, 1); assertResourceVersionIncrement(resourceRepoAfter, 2); }
/** * This should be the very first test that works with the resource. * * The original repository object does not have resource schema. The schema * should be generated from the resource on the first use. This is the test * that executes testResource and checks whether the schema was generated. */
This should be the very first test that works with the resource. The original repository object does not have resource schema. The schema should be generated from the resource on the first use. This is the test that executes testResource and checks whether the schema was generated
test020Connection
{ "repo_name": "Pardus-Engerek/engerek", "path": "provisioning/provisioning-impl/src/test/java/com/evolveum/midpoint/provisioning/impl/dummy/AbstractBasicDummyTest.java", "license": "apache-2.0", "size": 65722 }
[ "com.evolveum.midpoint.prism.PrismObject", "com.evolveum.midpoint.schema.constants.ConnectorTestOperation", "com.evolveum.midpoint.schema.internals.InternalCounters", "com.evolveum.midpoint.schema.processor.ResourceSchema", "com.evolveum.midpoint.schema.processor.ResourceSchemaImpl", "com.evolveum.midpoint.schema.result.OperationResult", "com.evolveum.midpoint.schema.util.ObjectTypeUtil", "com.evolveum.midpoint.schema.util.ResourceTypeUtil", "com.evolveum.midpoint.task.api.Task", "com.evolveum.midpoint.test.IntegrationTestTools", "com.evolveum.midpoint.test.util.TestUtil", "com.evolveum.midpoint.xml.ns._public.common.common_3.CachingMetadataType", "com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType", "com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType", "com.evolveum.midpoint.xml.ns._public.common.common_3.XmlSchemaType", "org.testng.AssertJUnit", "org.w3c.dom.Element" ]
import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.schema.constants.ConnectorTestOperation; import com.evolveum.midpoint.schema.internals.InternalCounters; import com.evolveum.midpoint.schema.processor.ResourceSchema; import com.evolveum.midpoint.schema.processor.ResourceSchemaImpl; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.util.ObjectTypeUtil; import com.evolveum.midpoint.schema.util.ResourceTypeUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.test.IntegrationTestTools; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.xml.ns._public.common.common_3.CachingMetadataType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.XmlSchemaType; import org.testng.AssertJUnit; import org.w3c.dom.Element;
import com.evolveum.midpoint.prism.*; import com.evolveum.midpoint.schema.constants.*; import com.evolveum.midpoint.schema.internals.*; import com.evolveum.midpoint.schema.processor.*; import com.evolveum.midpoint.schema.result.*; import com.evolveum.midpoint.schema.util.*; import com.evolveum.midpoint.task.api.*; import com.evolveum.midpoint.test.*; import com.evolveum.midpoint.test.util.*; import com.evolveum.midpoint.xml.ns._public.common.common_3.*; import org.testng.*; import org.w3c.dom.*;
[ "com.evolveum.midpoint", "org.testng", "org.w3c.dom" ]
com.evolveum.midpoint; org.testng; org.w3c.dom;
2,062,121
RegisteredServiceProvider<Economy> rsp = PlayerWarpGUI.p.getServer().getServicesManager().getRegistration(Economy.class); if (rsp == null) { return false; } econ = rsp.getProvider(); return econ != null; }
RegisteredServiceProvider<Economy> rsp = PlayerWarpGUI.p.getServer().getServicesManager().getRegistration(Economy.class); if (rsp == null) { return false; } econ = rsp.getProvider(); return econ != null; }
/** * Setup economy variable * * @return true/false */
Setup economy variable
setupEconomy
{ "repo_name": "theLastHero/special-octo-palm-tree", "path": "src/PlayerWarpGUI/Hooks/VaultHook.java", "license": "mit", "size": 1915 }
[ "net.milkbowl.vault.economy.Economy", "org.bukkit.plugin.RegisteredServiceProvider" ]
import net.milkbowl.vault.economy.Economy; import org.bukkit.plugin.RegisteredServiceProvider;
import net.milkbowl.vault.economy.*; import org.bukkit.plugin.*;
[ "net.milkbowl.vault", "org.bukkit.plugin" ]
net.milkbowl.vault; org.bukkit.plugin;
2,694,281
public static <T> Iterable<T> randomIterable(Collection<T> col) { List<T> list = new ArrayList<>(col); Collections.shuffle(list); return list; }
static <T> Iterable<T> function(Collection<T> col) { List<T> list = new ArrayList<>(col); Collections.shuffle(list); return list; }
/** * Takes given collection, shuffles it and returns iterable instance. * * @param <T> Type of elements to create iterator for. * @param col Collection to shuffle. * @return Iterable instance over randomly shuffled collection. */
Takes given collection, shuffles it and returns iterable instance
randomIterable
{ "repo_name": "agoncharuk/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java", "license": "apache-2.0", "size": 289549 }
[ "java.util.ArrayList", "java.util.Collection", "java.util.Collections", "java.util.List" ]
import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,222,221
private synchronized void initializeAdvisorChain() throws AopConfigException, BeansException { if (this.advisorChainInitialized) { return; } if (!ObjectUtils.isEmpty(this.interceptorNames)) { if (this.beanFactory == null) { throw new IllegalStateException("No BeanFactory available anymore (probably due to serialization) " + "- cannot resolve interceptor names " + Arrays.asList(this.interceptorNames)); } // Globals can't be last unless we specified a targetSource using the property... if (this.interceptorNames[this.interceptorNames.length - 1].endsWith(GLOBAL_SUFFIX) && this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) { throw new AopConfigException("Target required after globals"); } //添加Advisor链的调用,通过interceptorNames属性进行配置的 // Materialize interceptor chain from bean names. for (String name : this.interceptorNames) { if (logger.isTraceEnabled()) { logger.trace("Configuring advisor or advice '" + name + "'"); } if (name.endsWith(GLOBAL_SUFFIX)) { if (!(this.beanFactory instanceof ListableBeanFactory)) { throw new AopConfigException( "Can only use global advisors or interceptors with a ListableBeanFactory"); } addGlobalAdvisor((ListableBeanFactory) this.beanFactory, name.substring(0, name.length() - GLOBAL_SUFFIX.length())); } else { // If we get here, we need to add a named interceptor. // We must check if it's a singleton or prototype. Object advice; if (this.singleton || this.beanFactory.isSingleton(name)) { // Add the real Advisor/Advice to the chain. advice = this.beanFactory.getBean(name); } else { // It's a prototype Advice or Advisor: replace with a prototype. // Avoid unnecessary creation of prototype bean just for advisor chain initialization. advice = new PrototypePlaceholderAdvisor(name); } addAdvisorOnChainCreation(advice, name); } } } this.advisorChainInitialized = true; }
synchronized void function() throws AopConfigException, BeansException { if (this.advisorChainInitialized) { return; } if (!ObjectUtils.isEmpty(this.interceptorNames)) { if (this.beanFactory == null) { throw new IllegalStateException(STR + STR + Arrays.asList(this.interceptorNames)); } if (this.interceptorNames[this.interceptorNames.length - 1].endsWith(GLOBAL_SUFFIX) && this.targetName == null && this.targetSource == EMPTY_TARGET_SOURCE) { throw new AopConfigException(STR); } for (String name : this.interceptorNames) { if (logger.isTraceEnabled()) { logger.trace(STR + name + "'"); } if (name.endsWith(GLOBAL_SUFFIX)) { if (!(this.beanFactory instanceof ListableBeanFactory)) { throw new AopConfigException( STR); } addGlobalAdvisor((ListableBeanFactory) this.beanFactory, name.substring(0, name.length() - GLOBAL_SUFFIX.length())); } else { Object advice; if (this.singleton this.beanFactory.isSingleton(name)) { advice = this.beanFactory.getBean(name); } else { advice = new PrototypePlaceholderAdvisor(name); } addAdvisorOnChainCreation(advice, name); } } } this.advisorChainInitialized = true; }
/** * Create the advisor (interceptor) chain. Aadvisors that are sourced * from a BeanFactory will be refreshed each time a new prototype instance * is added. Interceptors added programmatically through the factory API * are unaffected by such changes. */
Create the advisor (interceptor) chain. Aadvisors that are sourced from a BeanFactory will be refreshed each time a new prototype instance is added. Interceptors added programmatically through the factory API are unaffected by such changes
initializeAdvisorChain
{ "repo_name": "leogoing/spring_jeesite", "path": "spring-aop-4.0/org/springframework/aop/framework/ProxyFactoryBean.java", "license": "apache-2.0", "size": 25209 }
[ "java.util.Arrays", "org.springframework.beans.BeansException", "org.springframework.beans.factory.ListableBeanFactory", "org.springframework.util.ObjectUtils" ]
import java.util.Arrays; import org.springframework.beans.BeansException; import org.springframework.beans.factory.ListableBeanFactory; import org.springframework.util.ObjectUtils;
import java.util.*; import org.springframework.beans.*; import org.springframework.beans.factory.*; import org.springframework.util.*;
[ "java.util", "org.springframework.beans", "org.springframework.util" ]
java.util; org.springframework.beans; org.springframework.util;
1,677,756
public void skipGram(Word2VecParam param, int i, List<VocabWord> sentence, int b, double alpha, List<Triple<Integer, Integer, Integer>> changed) { final VocabWord word = sentence.get(i); int window = param.getWindow(); if (word != null && !sentence.isEmpty()) { int end = window * 2 + 1 - b; for (int a = b; a < end; a++) { if (a != window) { int c = i - window + a; if (c >= 0 && c < sentence.size()) { VocabWord lastWord = sentence.get(c); iterateSample(param, word, lastWord, alpha, changed); } } } } }
void function(Word2VecParam param, int i, List<VocabWord> sentence, int b, double alpha, List<Triple<Integer, Integer, Integer>> changed) { final VocabWord word = sentence.get(i); int window = param.getWindow(); if (word != null && !sentence.isEmpty()) { int end = window * 2 + 1 - b; for (int a = b; a < end; a++) { if (a != window) { int c = i - window + a; if (c >= 0 && c < sentence.size()) { VocabWord lastWord = sentence.get(c); iterateSample(param, word, lastWord, alpha, changed); } } } } }
/** * Train via skip gram * @param i the current word * @param sentence the sentence to train on * @param b * @param alpha the learning rate */
Train via skip gram
skipGram
{ "repo_name": "kinbod/deeplearning4j", "path": "deeplearning4j-scaleout/spark/dl4j-spark-nlp/src/main/java/org/deeplearning4j/spark/models/embeddings/word2vec/SentenceBatch.java", "license": "apache-2.0", "size": 7690 }
[ "java.util.List", "org.deeplearning4j.models.word2vec.VocabWord", "org.nd4j.linalg.primitives.Triple" ]
import java.util.List; import org.deeplearning4j.models.word2vec.VocabWord; import org.nd4j.linalg.primitives.Triple;
import java.util.*; import org.deeplearning4j.models.word2vec.*; import org.nd4j.linalg.primitives.*;
[ "java.util", "org.deeplearning4j.models", "org.nd4j.linalg" ]
java.util; org.deeplearning4j.models; org.nd4j.linalg;
22,405
@Deprecated boolean verifyId(boolean autoFix, boolean autoSave) throws RepositoryException; /** * Checks if this package is installed. * * Note: the default implementation only checks the {@link org.apache.jackrabbit.vault.packaging.JcrPackageDefinition#getLastUnpacked()}
boolean verifyId(boolean autoFix, boolean autoSave) throws RepositoryException; /** * Checks if this package is installed. * * Note: the default implementation only checks the {@link org.apache.jackrabbit.vault.packaging.JcrPackageDefinition#getLastUnpacked()}
/** * Checks if the package id is correct in respect to the installation path * and adjusts it accordingly. * * @param autoFix {@code true} to automatically fix the id * @param autoSave {@code true} to save changes immediately * @return {@code true} if id is correct. * @throws RepositoryException if an error occurs. * * @since 2.2.18 * * @deprecated As of 3.1.42, the storage location is implementation details. */
Checks if the package id is correct in respect to the installation path and adjusts it accordingly
verifyId
{ "repo_name": "tripodsan/jackrabbit-filevault", "path": "vault-core/src/main/java/org/apache/jackrabbit/vault/packaging/JcrPackage.java", "license": "apache-2.0", "size": 9332 }
[ "javax.jcr.RepositoryException" ]
import javax.jcr.RepositoryException;
import javax.jcr.*;
[ "javax.jcr" ]
javax.jcr;
2,409,661
public MultiCurrencyAmount currencyExposureWithZSpread( ResolvedCapitalIndexedBondTrade trade, RatesProvider ratesProvider, LegalEntityDiscountingProvider issuerDiscountFactorsProvider, ReferenceData refData, double zSpread, CompoundedRateType compoundedRateType, int periodsPerYear) { CurrencyAmount pv = presentValueWithZSpread( trade, ratesProvider, issuerDiscountFactorsProvider, refData, zSpread, compoundedRateType, periodsPerYear); return MultiCurrencyAmount.of(pv); }
MultiCurrencyAmount function( ResolvedCapitalIndexedBondTrade trade, RatesProvider ratesProvider, LegalEntityDiscountingProvider issuerDiscountFactorsProvider, ReferenceData refData, double zSpread, CompoundedRateType compoundedRateType, int periodsPerYear) { CurrencyAmount pv = presentValueWithZSpread( trade, ratesProvider, issuerDiscountFactorsProvider, refData, zSpread, compoundedRateType, periodsPerYear); return MultiCurrencyAmount.of(pv); }
/** * Calculates the currency exposure of the bond trade with z-spread. * * @param trade the trade * @param ratesProvider the rates provider, used to determine price index values * @param issuerDiscountFactorsProvider the discount factors provider * @param refData the reference data used to calculate the settlement date * @param zSpread the z-spread * @param compoundedRateType the compounded rate type * @param periodsPerYear the number of periods per year * @return the currency exposure of the trade */
Calculates the currency exposure of the bond trade with z-spread
currencyExposureWithZSpread
{ "repo_name": "jmptrader/Strata", "path": "modules/pricer/src/main/java/com/opengamma/strata/pricer/bond/DiscountingCapitalIndexedBondTradePricer.java", "license": "apache-2.0", "size": 32661 }
[ "com.opengamma.strata.basics.ReferenceData", "com.opengamma.strata.basics.currency.CurrencyAmount", "com.opengamma.strata.basics.currency.MultiCurrencyAmount", "com.opengamma.strata.pricer.CompoundedRateType", "com.opengamma.strata.pricer.rate.RatesProvider", "com.opengamma.strata.product.bond.ResolvedCapitalIndexedBondTrade" ]
import com.opengamma.strata.basics.ReferenceData; import com.opengamma.strata.basics.currency.CurrencyAmount; import com.opengamma.strata.basics.currency.MultiCurrencyAmount; import com.opengamma.strata.pricer.CompoundedRateType; import com.opengamma.strata.pricer.rate.RatesProvider; import com.opengamma.strata.product.bond.ResolvedCapitalIndexedBondTrade;
import com.opengamma.strata.basics.*; import com.opengamma.strata.basics.currency.*; import com.opengamma.strata.pricer.*; import com.opengamma.strata.pricer.rate.*; import com.opengamma.strata.product.bond.*;
[ "com.opengamma.strata" ]
com.opengamma.strata;
1,479,703
static void splitLog(Path rootDir, Path srcDir, FileSystem fs, Configuration conf) throws IOException { if (!fs.exists(srcDir)) { // Nothing to do return; } FileStatus logfiles[] = fs.listStatus(srcDir); if (logfiles == null || logfiles.length == 0) { // Nothing to do return; } LOG.info("splitting " + logfiles.length + " log(s) in " + srcDir.toString()); Map<Text, SequenceFile.Writer> logWriters = new HashMap<Text, SequenceFile.Writer>(); try { for (int i = 0; i < logfiles.length; i++) { if (LOG.isDebugEnabled()) { LOG.debug("Splitting " + i + " of " + logfiles.length + ": " + logfiles[i].getPath()); } // Check for empty file. if (logfiles[i].getLen() <= 0) { LOG.info("Skipping " + logfiles[i].toString() + " because zero length"); continue; } HLogKey key = new HLogKey(); HLogEdit val = new HLogEdit(); SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i].getPath(), conf); try { int count = 0; for (; in.next(key, val); count++) { Text tableName = key.getTablename(); Text regionName = key.getRegionName(); SequenceFile.Writer w = logWriters.get(regionName); if (w == null) { Path logfile = new Path( HRegion.getRegionDir( HTableDescriptor.getTableDir(rootDir, tableName), HRegionInfo.encodeRegionName(regionName) ), HREGION_OLDLOGFILE_NAME ); Path oldlogfile = null; SequenceFile.Reader old = null; if (fs.exists(logfile)) { LOG.warn("Old log file " + logfile + " already exists. Copying existing file to new file"); oldlogfile = new Path(logfile.toString() + ".old"); fs.rename(logfile, oldlogfile); old = new SequenceFile.Reader(fs, oldlogfile, conf); } w = SequenceFile.createWriter(fs, conf, logfile, HLogKey.class, HLogEdit.class, getCompressionType(conf)); // Use copy of regionName; regionName object is reused inside in // HStoreKey.getRegionName so its content changes as we iterate. logWriters.put(new Text(regionName), w); if (LOG.isDebugEnabled()) { LOG.debug("Creating new log file writer for path " + logfile + " and region " + regionName); } if (old != null) { // Copy from existing log file HLogKey oldkey = new HLogKey(); HLogEdit oldval = new HLogEdit(); for (; old.next(oldkey, oldval); count++) { if (LOG.isDebugEnabled() && count > 0 && count % 10000 == 0) { LOG.debug("Copied " + count + " edits"); } w.append(oldkey, oldval); } old.close(); fs.delete(oldlogfile); } } w.append(key, val); } if (LOG.isDebugEnabled()) { LOG.debug("Applied " + count + " total edits from " + logfiles[i].getPath().toString()); } } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); if (!(e instanceof EOFException)) { LOG.warn("Exception processing " + logfiles[i].getPath() + " -- continuing. Possible DATA LOSS!", e); } } finally { try { in.close(); } catch (IOException e) { LOG.warn("Close in finally threw exception -- continuing", e); } // Delete the input file now so we do not replay edits. We could // have gotten here because of an exception. If so, probably // nothing we can do about it. Replaying it, it could work but we // could be stuck replaying for ever. Just continue though we // could have lost some edits. fs.delete(logfiles[i].getPath()); } } } finally { for (SequenceFile.Writer w : logWriters.values()) { w.close(); } } try { FileUtil.fullyDelete(fs, srcDir); } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); IOException io = new IOException("Cannot delete: " + srcDir); io.initCause(e); throw io; } LOG.info("log file splitting completed for " + srcDir.toString()); }
static void splitLog(Path rootDir, Path srcDir, FileSystem fs, Configuration conf) throws IOException { if (!fs.exists(srcDir)) { return; } FileStatus logfiles[] = fs.listStatus(srcDir); if (logfiles == null logfiles.length == 0) { return; } LOG.info(STR + logfiles.length + STR + srcDir.toString()); Map<Text, SequenceFile.Writer> logWriters = new HashMap<Text, SequenceFile.Writer>(); try { for (int i = 0; i < logfiles.length; i++) { if (LOG.isDebugEnabled()) { LOG.debug(STR + i + STR + logfiles.length + STR + logfiles[i].getPath()); } if (logfiles[i].getLen() <= 0) { LOG.info(STR + logfiles[i].toString() + STR); continue; } HLogKey key = new HLogKey(); HLogEdit val = new HLogEdit(); SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i].getPath(), conf); try { int count = 0; for (; in.next(key, val); count++) { Text tableName = key.getTablename(); Text regionName = key.getRegionName(); SequenceFile.Writer w = logWriters.get(regionName); if (w == null) { Path logfile = new Path( HRegion.getRegionDir( HTableDescriptor.getTableDir(rootDir, tableName), HRegionInfo.encodeRegionName(regionName) ), HREGION_OLDLOGFILE_NAME ); Path oldlogfile = null; SequenceFile.Reader old = null; if (fs.exists(logfile)) { LOG.warn(STR + logfile + STR); oldlogfile = new Path(logfile.toString() + ".old"); fs.rename(logfile, oldlogfile); old = new SequenceFile.Reader(fs, oldlogfile, conf); } w = SequenceFile.createWriter(fs, conf, logfile, HLogKey.class, HLogEdit.class, getCompressionType(conf)); logWriters.put(new Text(regionName), w); if (LOG.isDebugEnabled()) { LOG.debug(STR + logfile + STR + regionName); } if (old != null) { HLogKey oldkey = new HLogKey(); HLogEdit oldval = new HLogEdit(); for (; old.next(oldkey, oldval); count++) { if (LOG.isDebugEnabled() && count > 0 && count % 10000 == 0) { LOG.debug(STR + count + STR); } w.append(oldkey, oldval); } old.close(); fs.delete(oldlogfile); } } w.append(key, val); } if (LOG.isDebugEnabled()) { LOG.debug(STR + count + STR + logfiles[i].getPath().toString()); } } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); if (!(e instanceof EOFException)) { LOG.warn(STR + logfiles[i].getPath() + STR, e); } } finally { try { in.close(); } catch (IOException e) { LOG.warn(STR, e); } fs.delete(logfiles[i].getPath()); } } } finally { for (SequenceFile.Writer w : logWriters.values()) { w.close(); } } try { FileUtil.fullyDelete(fs, srcDir); } catch (IOException e) { e = RemoteExceptionHandler.checkIOException(e); IOException io = new IOException(STR + srcDir); io.initCause(e); throw io; } LOG.info(STR + srcDir.toString()); }
/** * Split up a bunch of log files, that are no longer being written to, into * new files, one per region. Delete the old log files when finished. * * @param rootDir qualified root directory of the HBase instance * @param srcDir Directory of log files to split: e.g. * <code>${ROOTDIR}/log_HOST_PORT</code> * @param fs FileSystem * @param conf HBaseConfiguration * @throws IOException */
Split up a bunch of log files, that are no longer being written to, into new files, one per region. Delete the old log files when finished
splitLog
{ "repo_name": "ALEXGUOQ/hbase", "path": "src/java/org/apache/hadoop/hbase/HLog.java", "license": "apache-2.0", "size": 23997 }
[ "java.io.EOFException", "java.io.IOException", "java.util.HashMap", "java.util.Map", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.fs.FileStatus", "org.apache.hadoop.fs.FileSystem", "org.apache.hadoop.fs.FileUtil", "org.apache.hadoop.fs.Path", "org.apache.hadoop.io.SequenceFile", "org.apache.hadoop.io.Text" ]
import java.io.EOFException; import java.io.IOException; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Text;
import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
1,231,437
Value<Boolean> arms();
Value<Boolean> arms();
/** * Returns whether this armor stand shows arms or not. * <p>Arms that do not show may also not show an item in hand.</p> * * @return Whether this armor stand shows its arms */
Returns whether this armor stand shows arms or not. Arms that do not show may also not show an item in hand
arms
{ "repo_name": "modwizcode/SpongeAPI", "path": "src/main/java/org/spongepowered/api/data/manipulator/mutable/entity/ArmorStandData.java", "license": "mit", "size": 2450 }
[ "org.spongepowered.api.data.value.mutable.Value" ]
import org.spongepowered.api.data.value.mutable.Value;
import org.spongepowered.api.data.value.mutable.*;
[ "org.spongepowered.api" ]
org.spongepowered.api;
2,792,137
public Realm getRealm() { Realm configured=super.getRealm(); // If no set realm has been called - default to JAAS // This can be overriden at engine, context and host level if( configured==null ) { configured=new JAASRealm(); this.setRealm( configured ); } return configured; }
Realm function() { Realm configured=super.getRealm(); if( configured==null ) { configured=new JAASRealm(); this.setRealm( configured ); } return configured; }
/** Provide a default in case no explicit configuration is set * * @return configured realm, or a JAAS realm by default */
Provide a default in case no explicit configuration is set
getRealm
{ "repo_name": "plumer/codana", "path": "tomcat_files/6.0.0/StandardEngine.java", "license": "mit", "size": 15950 }
[ "org.apache.catalina.Realm", "org.apache.catalina.realm.JAASRealm" ]
import org.apache.catalina.Realm; import org.apache.catalina.realm.JAASRealm;
import org.apache.catalina.*; import org.apache.catalina.realm.*;
[ "org.apache.catalina" ]
org.apache.catalina;
383,562
final List<HeapAllocSitesBean.SiteDescriptor> heapSitelist = heapBean.getSiteDetails(); final Map<Integer, HprofData.StackTrace> idToStackTrace = reader.getIdToStackTrace(); final Map<Integer, HTFHeapAllocStackTraceBean> heapAllocStackTraceMap = new TreeMap<Integer, HTFHeapAllocStackTraceBean>(); String[] profilingPackages = null; JobConfig jobConfig = (JobConfig)jobconfig; List<JobDefinition> jobList = jobConfig.getJobs(); if (!JobConfigUtil.isEnable(jobConfig.getIncludeClassJar())) { profilingPackages = getProfilingPackages(profilingPackages, jobList); } final int maxHeapSampleCount = Constants.PROFILING_MAX_HEAP_SAMPLE_COUNT;; boolean showTraceOfAllPackages = false; showTraceOfAllPackages = applyShowTraceOfPackages(profilingPackages, showTraceOfAllPackages); float lowestByteAllocation = 0; int lowestByteAllocStackTraceId = 0; for (HeapAllocSitesBean.SiteDescriptor heap : heapSitelist) { final float currentBytesAlloc = heap.getBytesAllocated(); if (lowestByteAllocation == 0) { lowestByteAllocation = currentBytesAlloc; lowestByteAllocStackTraceId = heap.getStackTraceId(); } List<String> stackTraceList = idToStackTrace.get(heap.getStackTraceId()).getStackTraceList(); if (showTraceOfAllPackages || isRelevantTrace(profilingPackages, stackTraceList)) { if (heapAllocStackTraceMap.size() <= maxHeapSampleCount) { populateHeapAllocMap(heap, stackTraceList, heapAllocStackTraceMap); if (currentBytesAlloc < lowestByteAllocation) { lowestByteAllocation = currentBytesAlloc; lowestByteAllocStackTraceId = heap.getStackTraceId(); } } else if (currentBytesAlloc > lowestByteAllocation) { heapAllocStackTraceMap.remove(lowestByteAllocStackTraceId); populateHeapAllocMap(heap, stackTraceList, heapAllocStackTraceMap); lowestByteAllocStackTraceId = getLowestHeapTraceId(heapAllocStackTraceMap); lowestByteAllocation = heapAllocStackTraceMap.get(lowestByteAllocStackTraceId).getHeapAllocSiteBean().getBytesAllocated(); } } } return sortByHeapAllocBytes(heapAllocStackTraceMap); }
final List<HeapAllocSitesBean.SiteDescriptor> heapSitelist = heapBean.getSiteDetails(); final Map<Integer, HprofData.StackTrace> idToStackTrace = reader.getIdToStackTrace(); final Map<Integer, HTFHeapAllocStackTraceBean> heapAllocStackTraceMap = new TreeMap<Integer, HTFHeapAllocStackTraceBean>(); String[] profilingPackages = null; JobConfig jobConfig = (JobConfig)jobconfig; List<JobDefinition> jobList = jobConfig.getJobs(); if (!JobConfigUtil.isEnable(jobConfig.getIncludeClassJar())) { profilingPackages = getProfilingPackages(profilingPackages, jobList); } final int maxHeapSampleCount = Constants.PROFILING_MAX_HEAP_SAMPLE_COUNT;; boolean showTraceOfAllPackages = false; showTraceOfAllPackages = applyShowTraceOfPackages(profilingPackages, showTraceOfAllPackages); float lowestByteAllocation = 0; int lowestByteAllocStackTraceId = 0; for (HeapAllocSitesBean.SiteDescriptor heap : heapSitelist) { final float currentBytesAlloc = heap.getBytesAllocated(); if (lowestByteAllocation == 0) { lowestByteAllocation = currentBytesAlloc; lowestByteAllocStackTraceId = heap.getStackTraceId(); } List<String> stackTraceList = idToStackTrace.get(heap.getStackTraceId()).getStackTraceList(); if (showTraceOfAllPackages isRelevantTrace(profilingPackages, stackTraceList)) { if (heapAllocStackTraceMap.size() <= maxHeapSampleCount) { populateHeapAllocMap(heap, stackTraceList, heapAllocStackTraceMap); if (currentBytesAlloc < lowestByteAllocation) { lowestByteAllocation = currentBytesAlloc; lowestByteAllocStackTraceId = heap.getStackTraceId(); } } else if (currentBytesAlloc > lowestByteAllocation) { heapAllocStackTraceMap.remove(lowestByteAllocStackTraceId); populateHeapAllocMap(heap, stackTraceList, heapAllocStackTraceMap); lowestByteAllocStackTraceId = getLowestHeapTraceId(heapAllocStackTraceMap); lowestByteAllocation = heapAllocStackTraceMap.get(lowestByteAllocStackTraceId).getHeapAllocSiteBean().getBytesAllocated(); } } } return sortByHeapAllocBytes(heapAllocStackTraceMap); }
/** * This method will iterate over HeapAllocSitesBean to get only topN heapAllocation sites along with there stackTrace element. It will check if * there is any specific package specified by user in json, if yes only those HeapAllocSitesBean's would be selected whose trace contains the * specified package. * * @param heapBean * @param reader * @return A Map sorted on BytesAllocated will be returned */
This method will iterate over HeapAllocSitesBean to get only topN heapAllocation sites along with there stackTrace element. It will check if there is any specific package specified by user in json, if yes only those HeapAllocSitesBean's would be selected whose trace contains the specified package
getTopNHeapMap
{ "repo_name": "impetus-opensource/jumbune", "path": "cluster-monitoring/src/main/java/org/jumbune/monitoring/utils/ProfilerUtil.java", "license": "lgpl-3.0", "size": 13021 }
[ "java.util.List", "java.util.Map", "java.util.TreeMap", "org.jumbune.common.beans.JobDefinition", "org.jumbune.common.job.JobConfig", "org.jumbune.common.utils.Constants", "org.jumbune.common.utils.JobConfigUtil", "org.jumbune.monitoring.hprof.HeapAllocSitesBean", "org.jumbune.monitoring.hprof.HprofData" ]
import java.util.List; import java.util.Map; import java.util.TreeMap; import org.jumbune.common.beans.JobDefinition; import org.jumbune.common.job.JobConfig; import org.jumbune.common.utils.Constants; import org.jumbune.common.utils.JobConfigUtil; import org.jumbune.monitoring.hprof.HeapAllocSitesBean; import org.jumbune.monitoring.hprof.HprofData;
import java.util.*; import org.jumbune.common.beans.*; import org.jumbune.common.job.*; import org.jumbune.common.utils.*; import org.jumbune.monitoring.hprof.*;
[ "java.util", "org.jumbune.common", "org.jumbune.monitoring" ]
java.util; org.jumbune.common; org.jumbune.monitoring;
953,862
public boolean servosAct() { boolean isDone = true; for (Map.Entry<ServoName, ServoControl> entry : servoMap.entrySet()) { ServoControl servoControl = entry.getValue(); if (!servoControl.act()) { isDone = false; } } return isDone; }
boolean function() { boolean isDone = true; for (Map.Entry<ServoName, ServoControl> entry : servoMap.entrySet()) { ServoControl servoControl = entry.getValue(); if (!servoControl.act()) { isDone = false; } } return isDone; }
/** * causes the servo positions to advance * * @return false if any servo is not done */
causes the servo positions to advance
servosAct
{ "repo_name": "FTC7393/EVLib", "path": "EVLib/src/main/java/ftc/evlib/hardware/servos/Servos.java", "license": "mit", "size": 2318 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
2,650,114
public static ChangeMessage extractMessageBody(Message message){ return extractMessageBody(message, ChangeMessage.class); }
static ChangeMessage function(Message message){ return extractMessageBody(message, ChangeMessage.class); }
/** * Extract a ChangeMessage from an Amazon Message * @param message * @return */
Extract a ChangeMessage from an Amazon Message
extractMessageBody
{ "repo_name": "xschildw/Synapse-Repository-Services", "path": "lib/lib-worker/src/main/java/org/sagebionetworks/asynchronous/workers/sqs/MessageUtils.java", "license": "apache-2.0", "size": 9544 }
[ "com.amazonaws.services.sqs.model.Message", "org.sagebionetworks.repo.model.message.ChangeMessage" ]
import com.amazonaws.services.sqs.model.Message; import org.sagebionetworks.repo.model.message.ChangeMessage;
import com.amazonaws.services.sqs.model.*; import org.sagebionetworks.repo.model.message.*;
[ "com.amazonaws.services", "org.sagebionetworks.repo" ]
com.amazonaws.services; org.sagebionetworks.repo;
1,667,298
public ResourceGroupInner withTags(Map<String, String> tags) { this.tags = tags; return this; }
ResourceGroupInner function(Map<String, String> tags) { this.tags = tags; return this; }
/** * Set the tags value. * * @param tags the tags value to set * @return the ResourceGroupInner object itself. */
Set the tags value
withTags
{ "repo_name": "pomortaz/azure-sdk-for-java", "path": "azure-mgmt-resources/src/main/java/com/microsoft/azure/management/resources/implementation/ResourceGroupInner.java", "license": "mit", "size": 3619 }
[ "java.util.Map" ]
import java.util.Map;
import java.util.*;
[ "java.util" ]
java.util;
1,284,082
return InternalUtils.toItemList(result.getItems()); }
return InternalUtils.toItemList(result.getItems()); }
/** * Returns a non-null list of the returned items; can be empty. */
Returns a non-null list of the returned items; can be empty
getItems
{ "repo_name": "aws/aws-sdk-java", "path": "aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/document/ScanOutcome.java", "license": "apache-2.0", "size": 1589 }
[ "com.amazonaws.services.dynamodbv2.document.internal.InternalUtils" ]
import com.amazonaws.services.dynamodbv2.document.internal.InternalUtils;
import com.amazonaws.services.dynamodbv2.document.internal.*;
[ "com.amazonaws.services" ]
com.amazonaws.services;
716,416
private void put(String znode, byte[] data, boolean update) throws YarnException { // Create the znode boolean created = false; try { created = zkManager.create(znode); } catch (Exception e) { String errMsg = "Cannot create znode " + znode + ": " + e.getMessage(); FederationStateStoreUtils.logAndThrowStoreException(LOG, errMsg); } if (!created) { LOG.debug("{} not created", znode); if (!update) { LOG.info("{} already existed and we are not updating", znode); return; } } // Write the data into the znode try { zkManager.setData(znode, data, -1); } catch (Exception e) { String errMsg = "Cannot write data into znode " + znode + ": " + e.getMessage(); FederationStateStoreUtils.logAndThrowStoreException(LOG, errMsg); } }
void function(String znode, byte[] data, boolean update) throws YarnException { boolean created = false; try { created = zkManager.create(znode); } catch (Exception e) { String errMsg = STR + znode + STR + e.getMessage(); FederationStateStoreUtils.logAndThrowStoreException(LOG, errMsg); } if (!created) { LOG.debug(STR, znode); if (!update) { LOG.info(STR, znode); return; } } try { zkManager.setData(znode, data, -1); } catch (Exception e) { String errMsg = STR + znode + STR + e.getMessage(); FederationStateStoreUtils.logAndThrowStoreException(LOG, errMsg); } }
/** * Put data into a znode in Zookeeper. * @param znode Path of the znode. * @param data Data to write. * @throws YarnException If it cannot contact ZooKeeper. */
Put data into a znode in Zookeeper
put
{ "repo_name": "apurtell/hadoop", "path": "hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/federation/store/impl/ZookeeperFederationStateStore.java", "license": "apache-2.0", "size": 24935 }
[ "org.apache.hadoop.yarn.exceptions.YarnException", "org.apache.hadoop.yarn.server.federation.store.utils.FederationStateStoreUtils" ]
import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.federation.store.utils.FederationStateStoreUtils;
import org.apache.hadoop.yarn.exceptions.*; import org.apache.hadoop.yarn.server.federation.store.utils.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
98,555
public void startupServer(int port) throws Exception { jettyServer = new Server(port); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); jettyServer.setHandler(context); context.addServlet(new ServletHolder(this),"/*"); jettyServer.start(); jettyPort = jettyServer.getConnectors()[0].getLocalPort(); jettyServer.join(); }
void function(int port) throws Exception { jettyServer = new Server(port); ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS); context.setContextPath("/"); jettyServer.setHandler(context); context.addServlet(new ServletHolder(this),"/*"); jettyServer.start(); jettyPort = jettyServer.getConnectors()[0].getLocalPort(); jettyServer.join(); }
/** * Starts a test jetty server. Blocks until the * server is terminated. * @param port the port to which the server will connect. * @throws Exception if the server couldn't be started. */
Starts a test jetty server. Blocks until the server is terminated
startupServer
{ "repo_name": "kbase/java_common", "path": "src/us/kbase/common/service/JsonServerServlet.java", "license": "mit", "size": 28093 }
[ "org.eclipse.jetty.server.Server", "org.eclipse.jetty.servlet.ServletContextHandler", "org.eclipse.jetty.servlet.ServletHolder" ]
import org.eclipse.jetty.server.Server; import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletHolder;
import org.eclipse.jetty.server.*; import org.eclipse.jetty.servlet.*;
[ "org.eclipse.jetty" ]
org.eclipse.jetty;
543,677
public void testDiffExcel2003Files() throws Exception { CIFSContentComparator contentComparator = new CIFSContentComparator(); contentComparator.init(); ClassPathResource file0Resource = new ClassPathResource("filesys/ContentComparatorTestExcel2003-1.xls"); assertNotNull("unable to find test resource filesys/ContentComparatorTestExcel2003-1.xls", file0Resource); ClassPathResource file1Resource = new ClassPathResource("filesys/ContentComparatorTestExcel2003-2.xls"); assertNotNull("unable to find test resource filesys/ContentComparatorTestExcel2003-2.xls", file1Resource); ClassPathResource file3Resource = new ClassPathResource("filesys/ContentComparatorTestExcel2003-3.xls"); assertNotNull("unable to find test resource filesys/ContentComparatorTestExcel2003-3.xls", file3Resource); ClassPathResource file4Resource = new ClassPathResource("filesys/ContentComparatorTestExcel2003-4.xls"); assertNotNull("unable to find test resource filesys/ContentComparatorTestExcel2003-4.xls", file4Resource); ClassPathResource file5Resource = new ClassPathResource("filesys/ContentComparatorTestExcel2003-5.xls"); assertNotNull("unable to find test resource filesys/ContentComparatorTestExcel2003-5.xls", file5Resource); { File file0 = file0Resource.getFile(); File file1 = file1Resource.getFile(); ContentReader reader = new FileContentReader(file0); reader.setMimetype("application/vnd.ms-excel"); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file1); assertTrue("compare trivially different project file, should be equal", result); } { File file0 = file0Resource.getFile(); File file3 = file3Resource.getFile(); ContentReader reader = new FileContentReader(file0); reader.setMimetype("application/vnd.ms-excel"); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file3); assertTrue("different excel2003 file, failed to note difference", !result); } { File file4 = file4Resource.getFile(); File file5 = file5Resource.getFile(); ContentReader reader = new FileContentReader(file4); reader.setMimetype("application/vnd.ms-excel"); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file5); assertTrue("compare trivially different xls files, should be equal", result); } }
void function() throws Exception { CIFSContentComparator contentComparator = new CIFSContentComparator(); contentComparator.init(); ClassPathResource file0Resource = new ClassPathResource(STR); assertNotNull(STR, file0Resource); ClassPathResource file1Resource = new ClassPathResource(STR); assertNotNull(STR, file1Resource); ClassPathResource file3Resource = new ClassPathResource(STR); assertNotNull(STR, file3Resource); ClassPathResource file4Resource = new ClassPathResource(STR); assertNotNull(STR, file4Resource); ClassPathResource file5Resource = new ClassPathResource(STR); assertNotNull(STR, file5Resource); { File file0 = file0Resource.getFile(); File file1 = file1Resource.getFile(); ContentReader reader = new FileContentReader(file0); reader.setMimetype(STR); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file1); assertTrue(STR, result); } { File file0 = file0Resource.getFile(); File file3 = file3Resource.getFile(); ContentReader reader = new FileContentReader(file0); reader.setMimetype(STR); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file3); assertTrue(STR, !result); } { File file4 = file4Resource.getFile(); File file5 = file5Resource.getFile(); ContentReader reader = new FileContentReader(file4); reader.setMimetype(STR); reader.setEncoding("UTF-8"); boolean result = contentComparator.isContentEqual(reader, file5); assertTrue(STR, result); } }
/** * Open and close of an excel 2003 file changes certain header properties. * Test File 1 has been opened and closed in excel2003. * @throws Exception */
Open and close of an excel 2003 file changes certain header properties. Test File 1 has been opened and closed in excel2003
testDiffExcel2003Files
{ "repo_name": "nguyentienlong/community-edition", "path": "projects/repository/source/test-java/org/alfresco/filesys/repo/CIFSContentComparatorTest.java", "license": "lgpl-3.0", "size": 16334 }
[ "java.io.File", "org.alfresco.repo.content.filestore.FileContentReader", "org.alfresco.service.cmr.repository.ContentReader", "org.springframework.core.io.ClassPathResource" ]
import java.io.File; import org.alfresco.repo.content.filestore.FileContentReader; import org.alfresco.service.cmr.repository.ContentReader; import org.springframework.core.io.ClassPathResource;
import java.io.*; import org.alfresco.repo.content.filestore.*; import org.alfresco.service.cmr.repository.*; import org.springframework.core.io.*;
[ "java.io", "org.alfresco.repo", "org.alfresco.service", "org.springframework.core" ]
java.io; org.alfresco.repo; org.alfresco.service; org.springframework.core;
1,970,036
protected void fireSelectionChanged(ISelection selection) { // create an event final SelectionChangedEvent event = new SelectionChangedEvent(this, selection);
void function(ISelection selection) { final SelectionChangedEvent event = new SelectionChangedEvent(this, selection);
/** * Fires a selection changed event. * * @param selection * the new selection */
Fires a selection changed event
fireSelectionChanged
{ "repo_name": "ccw-ide/ccw", "path": "ccw.core/src/java/ccw/repl/NamespaceBrowser.java", "license": "epl-1.0", "size": 16431 }
[ "org.eclipse.jface.viewers.ISelection", "org.eclipse.jface.viewers.SelectionChangedEvent" ]
import org.eclipse.jface.viewers.ISelection; import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.*;
[ "org.eclipse.jface" ]
org.eclipse.jface;
115,485
public static SMIMECapabilities getInstance( Object o) { if (o == null || o instanceof SMIMECapabilities) { return (SMIMECapabilities)o; } if (o instanceof ASN1Sequence) { return new SMIMECapabilities((ASN1Sequence)o); } if (o instanceof Attribute) { return new SMIMECapabilities( (ASN1Sequence)(((Attribute)o).getAttrValues().getObjectAt(0))); } throw new IllegalArgumentException("unknown object in factory: " + o.getClass().getName()); } public SMIMECapabilities( ASN1Sequence seq) { capabilities = seq; }
static SMIMECapabilities function( Object o) { if (o == null o instanceof SMIMECapabilities) { return (SMIMECapabilities)o; } if (o instanceof ASN1Sequence) { return new SMIMECapabilities((ASN1Sequence)o); } if (o instanceof Attribute) { return new SMIMECapabilities( (ASN1Sequence)(((Attribute)o).getAttrValues().getObjectAt(0))); } throw new IllegalArgumentException(STR + o.getClass().getName()); } public SMIMECapabilities( ASN1Sequence seq) { capabilities = seq; }
/** * return an Attribute object from the given object. * * @param o the object we want converted. * @exception IllegalArgumentException if the object cannot be converted. */
return an Attribute object from the given object
getInstance
{ "repo_name": "sake/bouncycastle-java", "path": "src/org/bouncycastle/asn1/smime/SMIMECapabilities.java", "license": "mit", "size": 3380 }
[ "org.bouncycastle.asn1.ASN1Sequence", "org.bouncycastle.asn1.cms.Attribute" ]
import org.bouncycastle.asn1.ASN1Sequence; import org.bouncycastle.asn1.cms.Attribute;
import org.bouncycastle.asn1.*; import org.bouncycastle.asn1.cms.*;
[ "org.bouncycastle.asn1" ]
org.bouncycastle.asn1;
130,310
UserConvert usercon = new UserConvert(); User user = new User(11, "mouse", "mouseland"); User user2 = new User(1112, "fish", "fishland"); User user3 = new User(11234, "volf", "volfland"); HashMap<Integer, User> expectetion = new HashMap<>(); expectetion.put(11, user); expectetion.put(1112, user2); expectetion.put(11234, user3); List<User> list = new ArrayList<>(); list.add(user); list.add(user2); list.add(user3); HashMap<Integer, User> reality = usercon.process(list); assertThat(expectetion, is(reality)); }
UserConvert usercon = new UserConvert(); User user = new User(11, "mouse", STR); User user2 = new User(1112, "fish", STR); User user3 = new User(11234, "volf", STR); HashMap<Integer, User> expectetion = new HashMap<>(); expectetion.put(11, user); expectetion.put(1112, user2); expectetion.put(11234, user3); List<User> list = new ArrayList<>(); list.add(user); list.add(user2); list.add(user3); HashMap<Integer, User> reality = usercon.process(list); assertThat(expectetion, is(reality)); }
/** * Test convert collection to array. */
Test convert collection to array
whenCollectionHasNineElementsThenArrayIsTheSame
{ "repo_name": "AVBaranov/abaranov", "path": "chapter_005/src/test/java/ru/job4j/collectionsframework/UserConvertTest.java", "license": "apache-2.0", "size": 1066 }
[ "java.util.ArrayList", "java.util.HashMap", "java.util.List", "org.hamcrest.core.Is", "org.junit.Assert" ]
import java.util.ArrayList; import java.util.HashMap; import java.util.List; import org.hamcrest.core.Is; import org.junit.Assert;
import java.util.*; import org.hamcrest.core.*; import org.junit.*;
[ "java.util", "org.hamcrest.core", "org.junit" ]
java.util; org.hamcrest.core; org.junit;
2,580,954
private Constraint unlinkFirst(Node node) { final Constraint constraint = node.constraint; final Node next = node.next; node.constraint = null; node.next = null; // help GC first = next; if (next == null) { last = null; } else { next.prev = null; } size--; modCount++; if (null != listener) { listener.notifyRemoved(constraint, node); } return constraint; }
Constraint function(Node node) { final Constraint constraint = node.constraint; final Node next = node.next; node.constraint = null; node.next = null; first = next; if (next == null) { last = null; } else { next.prev = null; } size--; modCount++; if (null != listener) { listener.notifyRemoved(constraint, node); } return constraint; }
/** * Unlinks non-null first node <code>node</code>. * * @param node the node to unlink * @return constraint the removed constraint */
Unlinks non-null first node <code>node</code>
unlinkFirst
{ "repo_name": "SSEHUB/EASyProducer", "path": "Plugins/Reasoner/ReasonerCore/ReasonerCore/src/net/ssehub/easy/reasoning/core/reasoner/ConstraintList.java", "license": "apache-2.0", "size": 27229 }
[ "net.ssehub.easy.varModel.model.Constraint" ]
import net.ssehub.easy.varModel.model.Constraint;
import net.ssehub.easy.*;
[ "net.ssehub.easy" ]
net.ssehub.easy;
1,915,419
@Test public void testHashCode() { final PyFactory.PyLongBuilder builder = getPyFactory().getPyLongBuilder(); builder.value(1); final PyLong testSubject11 = builder.build(); final PyLong testSubject12 = builder.build(); builder.value(2); final PyLong testSubject21 = builder.build(); final PyLong testSubject22 = builder.build(); assertThat(testSubject11.hashCode(), equalTo(testSubject12.hashCode())); assertThat(testSubject21.hashCode(), equalTo(testSubject22.hashCode())); assertThat(testSubject11.hashCode(), not(equalTo(testSubject22.hashCode()))); }
void function() { final PyFactory.PyLongBuilder builder = getPyFactory().getPyLongBuilder(); builder.value(1); final PyLong testSubject11 = builder.build(); final PyLong testSubject12 = builder.build(); builder.value(2); final PyLong testSubject21 = builder.build(); final PyLong testSubject22 = builder.build(); assertThat(testSubject11.hashCode(), equalTo(testSubject12.hashCode())); assertThat(testSubject21.hashCode(), equalTo(testSubject22.hashCode())); assertThat(testSubject11.hashCode(), not(equalTo(testSubject22.hashCode()))); }
/** * Test method for {@link PyLong#hashCode()}. */
Test method for <code>PyLong#hashCode()</code>
testHashCode
{ "repo_name": "jevetools/unmarshal", "path": "com.jevetools.unmarshal.python.api.impl.tests/src/com/jevetools/unmarshal/python/api/impl/test/PyLongTest.java", "license": "bsd-3-clause", "size": 7106 }
[ "com.jevetools.unmarshal.python.api.PyFactory", "com.jevetools.unmarshal.python.api.PyLong", "org.hamcrest.CoreMatchers", "org.junit.Assert" ]
import com.jevetools.unmarshal.python.api.PyFactory; import com.jevetools.unmarshal.python.api.PyLong; import org.hamcrest.CoreMatchers; import org.junit.Assert;
import com.jevetools.unmarshal.python.api.*; import org.hamcrest.*; import org.junit.*;
[ "com.jevetools.unmarshal", "org.hamcrest", "org.junit" ]
com.jevetools.unmarshal; org.hamcrest; org.junit;
2,786,407
public EndpointInner withOrigins(List<DeepCreatedOrigin> origins) { this.origins = origins; return this; }
EndpointInner function(List<DeepCreatedOrigin> origins) { this.origins = origins; return this; }
/** * Set the origins value. * * @param origins the origins value to set * @return the EndpointInner object itself. */
Set the origins value
withOrigins
{ "repo_name": "jianghaolu/azure-sdk-for-java", "path": "azure-mgmt-cdn/src/main/java/com/microsoft/azure/management/cdn/implementation/EndpointInner.java", "license": "mit", "size": 10155 }
[ "com.microsoft.azure.management.cdn.DeepCreatedOrigin", "java.util.List" ]
import com.microsoft.azure.management.cdn.DeepCreatedOrigin; import java.util.List;
import com.microsoft.azure.management.cdn.*; import java.util.*;
[ "com.microsoft.azure", "java.util" ]
com.microsoft.azure; java.util;
1,064,772
private MediaPlayer createMediaplayer(final String pPath) { MediaPlayer mediaPlayer = new MediaPlayer(); try { if (pPath.startsWith("/")) { final FileInputStream fis = new FileInputStream(pPath); mediaPlayer.setDataSource(fis.getFD()); fis.close(); } else { final AssetFileDescriptor assetFileDescritor = this.mContext.getAssets().openFd(pPath); mediaPlayer.setDataSource(assetFileDescritor.getFileDescriptor(), assetFileDescritor.getStartOffset(), assetFileDescritor.getLength()); } mediaPlayer.prepare(); mediaPlayer.setVolume(this.mLeftVolume, this.mRightVolume); } catch (final Exception e) { mediaPlayer = null; Log.e(Cocos2dxMusic.TAG, "error: " + e.getMessage(), e); } return mediaPlayer; } // =========================================================== // Inner and Anonymous Classes // ===========================================================
MediaPlayer function(final String pPath) { MediaPlayer mediaPlayer = new MediaPlayer(); try { if (pPath.startsWith("/")) { final FileInputStream fis = new FileInputStream(pPath); mediaPlayer.setDataSource(fis.getFD()); fis.close(); } else { final AssetFileDescriptor assetFileDescritor = this.mContext.getAssets().openFd(pPath); mediaPlayer.setDataSource(assetFileDescritor.getFileDescriptor(), assetFileDescritor.getStartOffset(), assetFileDescritor.getLength()); } mediaPlayer.prepare(); mediaPlayer.setVolume(this.mLeftVolume, this.mRightVolume); } catch (final Exception e) { mediaPlayer = null; Log.e(Cocos2dxMusic.TAG, STR + e.getMessage(), e); } return mediaPlayer; }
/** * create mediaplayer for music * * @param pPath * the pPath relative to assets * @return */
create mediaplayer for music
createMediaplayer
{ "repo_name": "likunpeng/cocos", "path": "QuickGame/frameworks/cocos2d-x/cocos/platform/android/java/src/org/cocos2dx/lib/Cocos2dxMusic.java", "license": "apache-2.0", "size": 9580 }
[ "android.content.res.AssetFileDescriptor", "android.media.MediaPlayer", "android.util.Log", "java.io.FileInputStream" ]
import android.content.res.AssetFileDescriptor; import android.media.MediaPlayer; import android.util.Log; import java.io.FileInputStream;
import android.content.res.*; import android.media.*; import android.util.*; import java.io.*;
[ "android.content", "android.media", "android.util", "java.io" ]
android.content; android.media; android.util; java.io;
2,369,850
protected final void deferMessage(Message msg) { mSmHandler.deferMessage(msg); }
final void function(Message msg) { mSmHandler.deferMessage(msg); }
/** * Defer this message until next state transition. * Upon transitioning all deferred messages will be * placed on the queue and reprocessed in the original * order. (i.e. The next state the oldest messages will * be processed first) * * @param msg is deferred until the next transition. */
Defer this message until next state transition. Upon transitioning all deferred messages will be placed on the queue and reprocessed in the original order. (i.e. The next state the oldest messages will be processed first)
deferMessage
{ "repo_name": "syslover33/ctank", "path": "java/android-sdk-linux_r24.4.1_src/sources/android-23/com/android/internal/util/StateMachine.java", "license": "gpl-3.0", "size": 68450 }
[ "android.os.Message" ]
import android.os.Message;
import android.os.*;
[ "android.os" ]
android.os;
2,526,357
private boolean isPasswordValid(String password) { return !TextUtils.isEmpty(password) && password.length() >= 6; }
boolean function(String password) { return !TextUtils.isEmpty(password) && password.length() >= 6; }
/** * check password is valid or not. password must not be empty and length should at least 6. * * @param password password that user inputted. * @return true if password is valid. */
check password is valid or not. password must not be empty and length should at least 6
isPasswordValid
{ "repo_name": "StupidL/Cooker", "path": "app/src/main/java/me/stupidme/cooker/view/login/LoginFragment.java", "license": "apache-2.0", "size": 8660 }
[ "android.text.TextUtils" ]
import android.text.TextUtils;
import android.text.*;
[ "android.text" ]
android.text;
1,375,318
public static Type collectionElementType(Type context, Class<?> contextRawType) { Type collectionType = getSupertype(context, contextRawType, Collection.class); if (collectionType instanceof WildcardType) { collectionType = ((WildcardType) collectionType).getUpperBounds()[0]; } if (collectionType instanceof ParameterizedType) { return ((ParameterizedType) collectionType).getActualTypeArguments()[0]; } return Object.class; }
static Type function(Type context, Class<?> contextRawType) { Type collectionType = getSupertype(context, contextRawType, Collection.class); if (collectionType instanceof WildcardType) { collectionType = ((WildcardType) collectionType).getUpperBounds()[0]; } if (collectionType instanceof ParameterizedType) { return ((ParameterizedType) collectionType).getActualTypeArguments()[0]; } return Object.class; }
/** * Returns the element type of this collection type. * @throws IllegalArgumentException if this type is not a collection. */
Returns the element type of this collection type
collectionElementType
{ "repo_name": "serj-lotutovici/moshi", "path": "moshi/src/main/java/com/squareup/moshi/Types.java", "license": "apache-2.0", "size": 24324 }
[ "java.lang.reflect.ParameterizedType", "java.lang.reflect.Type", "java.lang.reflect.WildcardType", "java.util.Collection" ]
import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; import java.lang.reflect.WildcardType; import java.util.Collection;
import java.lang.reflect.*; import java.util.*;
[ "java.lang", "java.util" ]
java.lang; java.util;
282,618
public MavenRepositoryURL getLocalRepository() { if (!contains(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY)) { // look for a local repository property String spec = m_propertyResolver.get(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY); // if not set get local repository from maven settings if (spec == null && m_settings != null) { spec = m_settings.getLocalRepository(); } if (spec != null) { if (!spec.toLowerCase().contains("@snapshots")) { spec += "@snapshots"; } // check if we have a valid url try { return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, new MavenRepositoryURL(spec)); } catch (MalformedURLException e) { // maybe is just a file? try { return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, new MavenRepositoryURL(new File(spec).toURI().toASCIIString()) ); } catch (MalformedURLException ignore) { LOGGER.warn("Local repository [" + spec + "] cannot be used and will be skipped"); return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, null); } } } } return get(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY); } /** * {@inheritDoc}
MavenRepositoryURL function() { if (!contains(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY)) { String spec = m_propertyResolver.get(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY); if (spec == null && m_settings != null) { spec = m_settings.getLocalRepository(); } if (spec != null) { if (!spec.toLowerCase().contains(STR)) { spec += STR; } try { return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, new MavenRepositoryURL(spec)); } catch (MalformedURLException e) { try { return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, new MavenRepositoryURL(new File(spec).toURI().toASCIIString()) ); } catch (MalformedURLException ignore) { LOGGER.warn(STR + spec + STR); return set(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY, null); } } } } return get(m_pid + MavenConstants.PROPERTY_LOCAL_REPOSITORY); } /** * {@inheritDoc}
/** * Resolves local repository directory by using the following resolution:<br/> * 1. looks for a configuration property named localRepository; * 2. looks for a framework property/system setting localRepository;<br/> * 3. looks in settings.xml (see settings.xml resolution);<br/> * 4. falls back to ${user.home}/.m2/repository. * * @see MavenConfiguration#getLocalRepository() */
Resolves local repository directory by using the following resolution: 1. looks for a configuration property named localRepository; 2. looks for a framework property/system setting localRepository; 3. looks in settings.xml (see settings.xml resolution); 4. falls back to ${user.home}/.m2/repository
getLocalRepository
{ "repo_name": "hekonsek/fabric8", "path": "sandbox/fabric/fabric-agent/src/main/java/io/fabric8/agent/mvn/MavenConfigurationImpl.java", "license": "apache-2.0", "size": 14611 }
[ "java.io.File", "java.net.MalformedURLException" ]
import java.io.File; import java.net.MalformedURLException;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
767,605
public void setJsonErrorConsumer( Consumer<? super JsonError> jsonErrorConsumer) { this.jsonErrorConsumer = jsonErrorConsumer; } /** * Read the {@link GltfAsset} from the given URI * * @param uri The URI * @return The {@link GltfAsset}
void function( Consumer<? super JsonError> jsonErrorConsumer) { this.jsonErrorConsumer = jsonErrorConsumer; } /** * Read the {@link GltfAsset} from the given URI * * @param uri The URI * @return The {@link GltfAsset}
/** * Set the given consumer to receive {@link JsonError}s that may * occur when a glTF is read * * @param jsonErrorConsumer The {@link JsonError} consumer */
Set the given consumer to receive <code>JsonError</code>s that may occur when a glTF is read
setJsonErrorConsumer
{ "repo_name": "javagl/JglTF", "path": "jgltf-model/src/main/java/de/javagl/jgltf/model/io/GltfAssetReader.java", "license": "mit", "size": 6832 }
[ "java.util.function.Consumer" ]
import java.util.function.Consumer;
import java.util.function.*;
[ "java.util" ]
java.util;
651,306
Publisher<Void> commit();
Publisher<Void> commit();
/** * Commits all changes made on this transaction. */
Commits all changes made on this transaction
commit
{ "repo_name": "jackygurui/redisson", "path": "redisson/src/main/java/org/redisson/api/RTransactionReactive.java", "license": "apache-2.0", "size": 4952 }
[ "org.reactivestreams.Publisher" ]
import org.reactivestreams.Publisher;
import org.reactivestreams.*;
[ "org.reactivestreams" ]
org.reactivestreams;
24,344
public void setUI(MenuBarUI ui) { super.setUI(ui); }
void function(MenuBarUI ui) { super.setUI(ui); }
/** * Set the "UI" property of the menu bar, which is a look and feel class * responsible for handling menuBar's input events and painting it. * * @param ui The new "UI" property */
Set the "UI" property of the menu bar, which is a look and feel class responsible for handling menuBar's input events and painting it
setUI
{ "repo_name": "taciano-perez/JamVM-PH", "path": "src/classpath/javax/swing/JMenuBar.java", "license": "gpl-2.0", "size": 19985 }
[ "javax.swing.plaf.MenuBarUI" ]
import javax.swing.plaf.MenuBarUI;
import javax.swing.plaf.*;
[ "javax.swing" ]
javax.swing;
1,095,332
public void readFrom(File keyFile) throws IOException, GeneralSecurityException { final String password = isPemEncrypted(keyFile) ? askForPasswd(keyFile.getCanonicalPath()) : null ; privateKeys.add(loadKey(keyFile, password)); }
void function(File keyFile) throws IOException, GeneralSecurityException { final String password = isPemEncrypted(keyFile) ? askForPasswd(keyFile.getCanonicalPath()) : null ; privateKeys.add(loadKey(keyFile, password)); }
/** * Read key from keyFile. */
Read key from keyFile
readFrom
{ "repo_name": "viqueen/jenkins", "path": "cli/src/main/java/hudson/cli/PrivateKeyProvider.java", "license": "mit", "size": 5008 }
[ "java.io.File", "java.io.IOException", "java.security.GeneralSecurityException" ]
import java.io.File; import java.io.IOException; import java.security.GeneralSecurityException;
import java.io.*; import java.security.*;
[ "java.io", "java.security" ]
java.io; java.security;
596,009
public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String environmentName, String accessPolicyName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (environmentName == null) { throw new IllegalArgumentException("Parameter environmentName is required and cannot be null."); } if (accessPolicyName == null) { throw new IllegalArgumentException("Parameter accessPolicyName is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); }
Observable<ServiceResponse<Void>> function(String resourceGroupName, String environmentName, String accessPolicyName) { if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (environmentName == null) { throw new IllegalArgumentException(STR); } if (accessPolicyName == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
/** * Deletes the access policy with the specified name in the specified subscription, resource group, and environment. * * @param resourceGroupName Name of an Azure Resource group. * @param environmentName The name of the Time Series Insights environment associated with the specified resource group. * @param accessPolicyName The name of the Time Series Insights access policy associated with the specified environment. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */
Deletes the access policy with the specified name in the specified subscription, resource group, and environment
deleteWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/timeseriesinsights/mgmt-v2017_11_15/src/main/java/com/microsoft/azure/management/timeseriesinsights/v2017_11_15/implementation/AccessPoliciesInner.java", "license": "mit", "size": 37008 }
[ "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,805,380
private Result pXmlToken(final int yyStart) throws IOException { int yyC; Result yyResult; int yyOption1; Node yyOpValue1; Node yyValue; ParseError yyError = ParseError.DUMMY; // Alternative 1. yyResult = pWs(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$1 = yyResult.semanticValue(); yyResult = pLexerScala$XmlElement(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$2 = yyResult.semanticValue(); yyValue = GNode.create("XmlToken", v$g$1, v$g$2); return yyResult.createValue(yyValue, yyError); } } // Alternative 2. yyC = character(yyStart); if ('(' == yyC) { yyResult = pSeparator(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$3 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { // Start scope for v$g$4. Node v$g$4 = yyOpValue1; yyResult = pLexerScala$XmlElement(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$5 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { // Start scope for v$g$6. Node v$g$6 = yyOpValue1; yyC = character(yyOption1); if (')' == yyC) { yyResult = pSeparator(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$7 = yyResult.semanticValue(); yyValue = GNode.create("XmlToken", v$g$3, v$g$4, v$g$5, v$g$6, v$g$7); return yyResult.createValue(yyValue, yyError); } } } // End scope for v$g$6. } } // End scope for v$g$4. } } // Alternative 3. yyC = character(yyStart); if ('{' == yyC) { yyResult = pSeparator(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$8 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$3 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$3; } { // Start scope for v$g$9. Node v$g$9 = yyOpValue1; yyResult = pLexerScala$XmlElement(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$10 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$4 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$4; } { // Start scope for v$g$11. Node v$g$11 = yyOpValue1; yyC = character(yyOption1); if ('}' == yyC) { yyResult = pSeparator(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$12 = yyResult.semanticValue(); yyValue = GNode.create("XmlToken", v$g$8, v$g$9, v$g$10, v$g$11, v$g$12); return yyResult.createValue(yyValue, yyError); } } } // End scope for v$g$11. } } // End scope for v$g$9. } } // Done. yyError = yyError.select("xml token expected", yyStart); return yyError; } // =========================================================================
Result function(final int yyStart) throws IOException { int yyC; Result yyResult; int yyOption1; Node yyOpValue1; Node yyValue; ParseError yyError = ParseError.DUMMY; yyResult = pWs(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$1 = yyResult.semanticValue(); yyResult = pLexerScala$XmlElement(yyResult.index); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$2 = yyResult.semanticValue(); yyValue = GNode.create(STR, v$g$1, v$g$2); return yyResult.createValue(yyValue, yyError); } } yyC = character(yyStart); if ('(' == yyC) { yyResult = pSeparator(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$3 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$1 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$1; } { Node v$g$4 = yyOpValue1; yyResult = pLexerScala$XmlElement(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$5 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$2 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$2; } { Node v$g$6 = yyOpValue1; yyC = character(yyOption1); if (')' == yyC) { yyResult = pSeparator(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$7 = yyResult.semanticValue(); yyValue = GNode.create(STR, v$g$3, v$g$4, v$g$5, v$g$6, v$g$7); return yyResult.createValue(yyValue, yyError); } } } } } } } yyC = character(yyStart); if ('{' == yyC) { yyResult = pSeparator(yyStart); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$8 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$3 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$3; } { Node v$g$9 = yyOpValue1; yyResult = pLexerScala$XmlElement(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$10 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = null; yyResult = pWs(yyOption1); yyError = yyResult.select(yyError, yyOption1); if (yyResult.hasValue()) { Node v$el$4 = yyResult.semanticValue(); yyOption1 = yyResult.index; yyOpValue1 = v$el$4; } { Node v$g$11 = yyOpValue1; yyC = character(yyOption1); if ('}' == yyC) { yyResult = pSeparator(yyOption1); yyError = yyResult.select(yyError); if (yyResult.hasValue()) { Node v$g$12 = yyResult.semanticValue(); yyValue = GNode.create(STR, v$g$8, v$g$9, v$g$10, v$g$11, v$g$12); return yyResult.createValue(yyValue, yyError); } } } } } } } yyError = yyError.select(STR, yyStart); return yyError; }
/** * Parse nonterminal * org.netbeans.modules.scala.core.rats.LexerScala.XmlToken. * * @param yyStart The index. * @return The result. * @throws IOException Signals an I/O error. */
Parse nonterminal org.netbeans.modules.scala.core.rats.LexerScala.XmlToken
pXmlToken
{ "repo_name": "vnkmr7620/kojo", "path": "ScalaEditorLite/src/org/netbeans/modules/scala/core/rats/LexerScala.java", "license": "gpl-3.0", "size": 391546 }
[ "java.io.IOException", "xtc.parser.ParseError", "xtc.parser.Result", "xtc.tree.GNode", "xtc.tree.Node" ]
import java.io.IOException; import xtc.parser.ParseError; import xtc.parser.Result; import xtc.tree.GNode; import xtc.tree.Node;
import java.io.*; import xtc.parser.*; import xtc.tree.*;
[ "java.io", "xtc.parser", "xtc.tree" ]
java.io; xtc.parser; xtc.tree;
1,635,077
public void write(File f) throws IOException { RobustFileWriter out = new RobustFileWriter(f, "UTF-8"); write(out); out.close(); }
void function(File f) throws IOException { RobustFileWriter out = new RobustFileWriter(f, "UTF-8"); write(out); out.close(); }
/** Write XML WBS data to the given file. */
Write XML WBS data to the given file
write
{ "repo_name": "superzadeh/processdash", "path": "teamdash/src/teamdash/wbs/WBSDataWriter.java", "license": "gpl-3.0", "size": 33045 }
[ "java.io.File", "java.io.IOException", "net.sourceforge.processdash.util.RobustFileWriter" ]
import java.io.File; import java.io.IOException; import net.sourceforge.processdash.util.RobustFileWriter;
import java.io.*; import net.sourceforge.processdash.util.*;
[ "java.io", "net.sourceforge.processdash" ]
java.io; net.sourceforge.processdash;
2,060,465
int updateByPrimaryKeySelective(Customer record);
int updateByPrimaryKeySelective(Customer record);
/** * This method was generated by MyBatis Generator. * This method corresponds to the database table m_crm_customer * * @mbggenerated Thu Jul 16 10:50:10 ICT 2015 */
This method was generated by MyBatis Generator. This method corresponds to the database table m_crm_customer
updateByPrimaryKeySelective
{ "repo_name": "uniteddiversity/mycollab", "path": "mycollab-services/src/main/java/com/esofthead/mycollab/module/crm/dao/CustomerMapper.java", "license": "agpl-3.0", "size": 4711 }
[ "com.esofthead.mycollab.module.crm.domain.Customer" ]
import com.esofthead.mycollab.module.crm.domain.Customer;
import com.esofthead.mycollab.module.crm.domain.*;
[ "com.esofthead.mycollab" ]
com.esofthead.mycollab;
2,523,411
public boolean evaluate(int comparisonResult) { if (getIndex() == -1) { throw new BuildException("Comparison value not set."); } int[] i = comparisonResult < 0 ? LESS_INDEX : comparisonResult > 0 ? GREATER_INDEX : EQUAL_INDEX; return Arrays.binarySearch(i, getIndex()) >= 0; }
boolean function(int comparisonResult) { if (getIndex() == -1) { throw new BuildException(STR); } int[] i = comparisonResult < 0 ? LESS_INDEX : comparisonResult > 0 ? GREATER_INDEX : EQUAL_INDEX; return Arrays.binarySearch(i, getIndex()) >= 0; }
/** * Evaluate a comparison result as from Comparator.compare() or Comparable.compareTo(). * @param comparisonResult the result to evaluate. * @return true if the comparison result fell within the parameters of this Comparison. */
Evaluate a comparison result as from Comparator.compare() or Comparable.compareTo()
evaluate
{ "repo_name": "Mayo-WE01051879/mayosapp", "path": "Build/src/main/org/apache/tools/ant/types/Comparison.java", "license": "mit", "size": 3233 }
[ "java.util.Arrays", "org.apache.tools.ant.BuildException" ]
import java.util.Arrays; import org.apache.tools.ant.BuildException;
import java.util.*; import org.apache.tools.ant.*;
[ "java.util", "org.apache.tools" ]
java.util; org.apache.tools;
81,997
protected void setResponseStream(InputStream response) { this.response = response; }
void function(InputStream response) { this.response = response; }
/** * Sets the stream of the response from a delivery attempt * @param response The stream provided from the http connection * from a deliver attempt */
Sets the stream of the response from a delivery attempt
setResponseStream
{ "repo_name": "wpride/javarosa", "path": "j2me/communication/src/org/javarosa/service/transport/securehttp/AuthenticatedHttpTransportMessage.java", "license": "apache-2.0", "size": 13910 }
[ "java.io.InputStream" ]
import java.io.InputStream;
import java.io.*;
[ "java.io" ]
java.io;
2,127,778
static String formatQueryResult(Object result) { if (result == null) { return "null"; } else if (result == QueryService.UNDEFINED) { return "UNDEFINED"; } if (result instanceof SelectResults) { Collection<?> collection = ((SelectResults<?>)result).asList(); StringBuffer sb = new StringBuffer(); for (Object e: collection) { sb.append(e + "\n\t"); } return sb.toString(); } else { return result.toString(); } } class Runner implements Runnable { private Thread thread; private volatile boolean running = true;
static String formatQueryResult(Object result) { if (result == null) { return "null"; } else if (result == QueryService.UNDEFINED) { return STR; } if (result instanceof SelectResults) { Collection<?> collection = ((SelectResults<?>)result).asList(); StringBuffer sb = new StringBuffer(); for (Object e: collection) { sb.append(e + "\n\t"); } return sb.toString(); } else { return result.toString(); } } class Runner implements Runnable { private Thread thread; private volatile boolean running = true;
/** * Returns formatted query results for printing. */
Returns formatted query results for printing
formatQueryResult
{ "repo_name": "gemxd/gemfirexd-oss", "path": "gemfire-examples/src/osgi/java/cacheWorker/src/main/java/cacheworker/internal/CacheWorker.java", "license": "apache-2.0", "size": 5666 }
[ "com.gemstone.gemfire.cache.query.QueryService", "com.gemstone.gemfire.cache.query.SelectResults", "java.util.Collection" ]
import com.gemstone.gemfire.cache.query.QueryService; import com.gemstone.gemfire.cache.query.SelectResults; import java.util.Collection;
import com.gemstone.gemfire.cache.query.*; import java.util.*;
[ "com.gemstone.gemfire", "java.util" ]
com.gemstone.gemfire; java.util;
2,312,626
public List<String> groupIds() { return this.groupIds; }
List<String> function() { return this.groupIds; }
/** * Get the groupIds property: The groupIds property. * * @return the groupIds value. */
Get the groupIds property: The groupIds property
groupIds
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/models/DirectoryRoleTemplatesCheckMemberGroupsRequestBody.java", "license": "mit", "size": 2831 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
203,542
private EdmEntitySet loadEdmEntitySetFromEntityName(String entityName) { // Let's get the metadata for the resource EntityMetadata entityMetadata = metadata.getEntityMetadata(entityName); if (entityMetadata == null) throw new NotFoundException("Fail to find/load Entity Metadata for [" + entityName + "]"); // Lets build the EdmEntitySet form EntityMetadata Map<String, EdmComplexType.Builder> complexTypes = new HashMap<String, EdmComplexType.Builder>(); EdmEntityType.Builder entityType = getEdmTypeBuilder(entityMetadata, complexTypes, false); if (entityType != null) { EdmEntitySet.Builder bEntitySetBuilder = EdmEntitySet.newBuilder().setName(getEdmEntitySetName(entityName)).setEntityType(entityType); EdmEntitySet edmEntitySet = bEntitySetBuilder.build(); for(Map.Entry<String, EdmComplexType.Builder> entry: complexTypes.entrySet()) { this.nonSrvDocEdmComplexTypeMap.put(entry.getKey(), entry.getValue().build()); } // Append to the map nonSrvDocEdmEntitySetMap.put(getEdmEntitySetName(entityName), edmEntitySet); return edmEntitySet; } return null; }
EdmEntitySet function(String entityName) { EntityMetadata entityMetadata = metadata.getEntityMetadata(entityName); if (entityMetadata == null) throw new NotFoundException(STR + entityName + "]"); Map<String, EdmComplexType.Builder> complexTypes = new HashMap<String, EdmComplexType.Builder>(); EdmEntityType.Builder entityType = getEdmTypeBuilder(entityMetadata, complexTypes, false); if (entityType != null) { EdmEntitySet.Builder bEntitySetBuilder = EdmEntitySet.newBuilder().setName(getEdmEntitySetName(entityName)).setEntityType(entityType); EdmEntitySet edmEntitySet = bEntitySetBuilder.build(); for(Map.Entry<String, EdmComplexType.Builder> entry: complexTypes.entrySet()) { this.nonSrvDocEdmComplexTypeMap.put(entry.getKey(), entry.getValue().build()); } nonSrvDocEdmEntitySetMap.put(getEdmEntitySetName(entityName), edmEntitySet); return edmEntitySet; } return null; }
/** * Method to load EdmEntitySet if not loaded as yet * @param entityName * @return */
Method to load EdmEntitySet if not loaded as yet
loadEdmEntitySetFromEntityName
{ "repo_name": "iris-scrum-1/IRIS", "path": "interaction-odata4j-ext/src/main/java/com/temenos/interaction/odataext/entity/MetadataOData4j.java", "license": "agpl-3.0", "size": 37302 }
[ "com.temenos.interaction.core.entity.EntityMetadata", "java.util.HashMap", "java.util.Map", "org.odata4j.edm.EdmComplexType", "org.odata4j.edm.EdmDataServices", "org.odata4j.edm.EdmEntitySet", "org.odata4j.edm.EdmEntityType", "org.odata4j.exceptions.NotFoundException" ]
import com.temenos.interaction.core.entity.EntityMetadata; import java.util.HashMap; import java.util.Map; import org.odata4j.edm.EdmComplexType; import org.odata4j.edm.EdmDataServices; import org.odata4j.edm.EdmEntitySet; import org.odata4j.edm.EdmEntityType; import org.odata4j.exceptions.NotFoundException;
import com.temenos.interaction.core.entity.*; import java.util.*; import org.odata4j.edm.*; import org.odata4j.exceptions.*;
[ "com.temenos.interaction", "java.util", "org.odata4j.edm", "org.odata4j.exceptions" ]
com.temenos.interaction; java.util; org.odata4j.edm; org.odata4j.exceptions;
2,442,794
@Override public void onBackPressed() { DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else { finish(); Intent menu = new Intent(SettingsActivity.this, StartActivity .class); startActivity(menu); } }
void function() { DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } else { finish(); Intent menu = new Intent(SettingsActivity.this, StartActivity .class); startActivity(menu); } }
/** * return to startview */
return to startview
onBackPressed
{ "repo_name": "RobTain/Busfahrplan-Passau", "path": "Busfahrplan-Passau/app/src/main/java/com/robtain/busfahrplan_passau/SettingsActivity.java", "license": "mit", "size": 4118 }
[ "android.content.Intent", "android.support.v4.view.GravityCompat", "android.support.v4.widget.DrawerLayout" ]
import android.content.Intent; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout;
import android.content.*; import android.support.v4.view.*; import android.support.v4.widget.*;
[ "android.content", "android.support" ]
android.content; android.support;
2,916,020
public ResultSet getResultSet(String sql) {//throws SQLException { try { if(this.conn == null){ // Check if connection object already exists createDbConnection(); // If does not exist, create new connection } Statement statement = this.conn.createStatement(); return statement.executeQuery(sql); // Return ResultSet } catch (Exception e) { e.printStackTrace(); // debug } return null; }
ResultSet function(String sql) { try { if(this.conn == null){ createDbConnection(); } Statement statement = this.conn.createStatement(); return statement.executeQuery(sql); } catch (Exception e) { e.printStackTrace(); } return null; }
/** * Get SQL result set (data set) based on an SQL query * @param sql - SQL SELECT query * @return - ResultSet - java.sql.ResultSet object, contains results from SQL query argument * @throws SQLException */
Get SQL result set (data set) based on an SQL query
getResultSet
{ "repo_name": "mcalamosca/mdc80_SpaceInvadersDesktop", "path": "src/edu/pitt/is1017/spaceinvaders/DbUtilities.java", "license": "mit", "size": 4463 }
[ "java.sql.ResultSet", "java.sql.Statement" ]
import java.sql.ResultSet; import java.sql.Statement;
import java.sql.*;
[ "java.sql" ]
java.sql;
858,771
public void show() { if (debugMode) { MessageHandler.printToLogFile("Check Dialog: Goto next Error"); } dialog.setEnabled(false); dialog.setEnabled(true); if (dialogX < 0 || dialogY < 0) { Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); Dimension frameSize = dialog.getSize(); dialogX = screenSize.width / 2 - frameSize.width / 2; dialogY = screenSize.height / 2 - frameSize.height / 2; } dialog.setLocation(dialogX, dialogY); // documents.setLtDialog(this); isRunning = true; setInitialButtonState(); dialog.setAutoRequestFocus(true); dialog.setVisible(true); dialog.toFront(); initCursor(); runCheckForNextError(false); }
void function() { if (debugMode) { MessageHandler.printToLogFile(STR); } dialog.setEnabled(false); dialog.setEnabled(true); if (dialogX < 0 dialogY < 0) { Dimension screenSize = Toolkit.getDefaultToolkit().getScreenSize(); Dimension frameSize = dialog.getSize(); dialogX = screenSize.width / 2 - frameSize.width / 2; dialogY = screenSize.height / 2 - frameSize.height / 2; } dialog.setLocation(dialogX, dialogY); isRunning = true; setInitialButtonState(); dialog.setAutoRequestFocus(true); dialog.setVisible(true); dialog.toFront(); initCursor(); runCheckForNextError(false); }
/** * show the dialog */
show the dialog
show
{ "repo_name": "jimregan/languagetool", "path": "languagetool-office-extension/src/main/java/org/languagetool/openoffice/SpellAndGrammarCheckDialog.java", "license": "lgpl-2.1", "size": 96346 }
[ "java.awt.Dimension", "java.awt.Toolkit" ]
import java.awt.Dimension; import java.awt.Toolkit;
import java.awt.*;
[ "java.awt" ]
java.awt;
761,916
@Test public void testGetInvalidateWorkPctPerIteration_NegativeValue() { Configuration conf = new Configuration(); float blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertTrue(blocksInvalidateWorkPct > 0); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "-0.5f"); exception.expect(IllegalArgumentException.class); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); }
void function() { Configuration conf = new Configuration(); float blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); assertTrue(blocksInvalidateWorkPct > 0); conf.set(DFSConfigKeys. DFS_NAMENODE_INVALIDATE_WORK_PCT_PER_ITERATION, "-0.5f"); exception.expect(IllegalArgumentException.class); blocksInvalidateWorkPct = DFSUtil.getInvalidateWorkPctPerIteration(conf); }
/** * This testcase tests whether an IllegalArgumentException * will be thrown when a negative value is retrieved by * DFSUtil#getInvalidateWorkPctPerIteration */
This testcase tests whether an IllegalArgumentException will be thrown when a negative value is retrieved by DFSUtil#getInvalidateWorkPctPerIteration
testGetInvalidateWorkPctPerIteration_NegativeValue
{ "repo_name": "robzor92/hops", "path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestReplicationPolicy.java", "license": "apache-2.0", "size": 43280 }
[ "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hdfs.DFSConfigKeys", "org.apache.hadoop.hdfs.DFSUtil", "org.junit.Assert" ]
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.junit.Assert;
import org.apache.hadoop.conf.*; import org.apache.hadoop.hdfs.*; import org.junit.*;
[ "org.apache.hadoop", "org.junit" ]
org.apache.hadoop; org.junit;
1,005,291
private void extend() throws IOException { while (true) { long nowMsSinceEpoch = now(); List<String> assumeExpired = new ArrayList<>(); List<String> toBeExtended = new ArrayList<>(); List<String> toBeExpired = new ArrayList<>(); // Messages will be in increasing deadline order. for (Map.Entry<String, InFlightState> entry : inFlight.entrySet()) { if (entry.getValue().ackDeadlineMsSinceEpoch - (ackTimeoutMs * ACK_SAFETY_PCT) / 100 > nowMsSinceEpoch) { // All remaining messages don't need their ACKs to be extended. break; } if (entry.getValue().ackDeadlineMsSinceEpoch - ACK_TOO_LATE.getMillis() < nowMsSinceEpoch) { // Pubsub may have already considered this message to have expired. // If so it will (eventually) be made available on a future pull request. // If this message ends up being committed then it will be considered a duplicate // when re-pulled. assumeExpired.add(entry.getKey()); continue; } if (entry.getValue().requestTimeMsSinceEpoch + PROCESSING_TIMEOUT.getMillis() < nowMsSinceEpoch) { // This message has been in-flight for too long. // Give up on it, otherwise we risk extending its ACK indefinitely. toBeExpired.add(entry.getKey()); continue; } // Extend the ACK for this message. toBeExtended.add(entry.getKey()); if (toBeExtended.size() >= ACK_BATCH_SIZE) { // Enough for one batch. break; } } if (assumeExpired.isEmpty() && toBeExtended.isEmpty() && toBeExpired.isEmpty()) { // Nothing to be done. return; } if (!assumeExpired.isEmpty()) { // If we didn't make the ACK deadline assume expired and no longer in flight. numLateDeadlines.add(nowMsSinceEpoch, assumeExpired.size()); for (String ackId : assumeExpired) { inFlight.remove(ackId); } } if (!toBeExpired.isEmpty()) { // Expired messages are no longer considered in flight. numExpired.add(nowMsSinceEpoch, toBeExpired.size()); for (String ackId : toBeExpired) { inFlight.remove(ackId); } } if (!toBeExtended.isEmpty()) { // Pubsub extends acks from it's notion of current time. // We'll try to track that on our side, but note the deadlines won't necessarily agree. long newDeadlineMsSinceEpoch = nowMsSinceEpoch + (ackTimeoutMs * ACK_EXTENSION_PCT) / 100; for (String ackId : toBeExtended) { // Maintain increasing ack deadline order. InFlightState state = inFlight.remove(ackId); inFlight.put(ackId, new InFlightState(state.requestTimeMsSinceEpoch, newDeadlineMsSinceEpoch)); } // BLOCKs until extended. extendBatch(nowMsSinceEpoch, toBeExtended); } } }
void function() throws IOException { while (true) { long nowMsSinceEpoch = now(); List<String> assumeExpired = new ArrayList<>(); List<String> toBeExtended = new ArrayList<>(); List<String> toBeExpired = new ArrayList<>(); for (Map.Entry<String, InFlightState> entry : inFlight.entrySet()) { if (entry.getValue().ackDeadlineMsSinceEpoch - (ackTimeoutMs * ACK_SAFETY_PCT) / 100 > nowMsSinceEpoch) { break; } if (entry.getValue().ackDeadlineMsSinceEpoch - ACK_TOO_LATE.getMillis() < nowMsSinceEpoch) { assumeExpired.add(entry.getKey()); continue; } if (entry.getValue().requestTimeMsSinceEpoch + PROCESSING_TIMEOUT.getMillis() < nowMsSinceEpoch) { toBeExpired.add(entry.getKey()); continue; } toBeExtended.add(entry.getKey()); if (toBeExtended.size() >= ACK_BATCH_SIZE) { break; } } if (assumeExpired.isEmpty() && toBeExtended.isEmpty() && toBeExpired.isEmpty()) { return; } if (!assumeExpired.isEmpty()) { numLateDeadlines.add(nowMsSinceEpoch, assumeExpired.size()); for (String ackId : assumeExpired) { inFlight.remove(ackId); } } if (!toBeExpired.isEmpty()) { numExpired.add(nowMsSinceEpoch, toBeExpired.size()); for (String ackId : toBeExpired) { inFlight.remove(ackId); } } if (!toBeExtended.isEmpty()) { long newDeadlineMsSinceEpoch = nowMsSinceEpoch + (ackTimeoutMs * ACK_EXTENSION_PCT) / 100; for (String ackId : toBeExtended) { InFlightState state = inFlight.remove(ackId); inFlight.put(ackId, new InFlightState(state.requestTimeMsSinceEpoch, newDeadlineMsSinceEpoch)); } extendBatch(nowMsSinceEpoch, toBeExtended); } } }
/** * BLOCKING * Extend deadline for all messages which need it. * CAUTION: If extensions can't keep up with wallclock then we'll never return. */
BLOCKING Extend deadline for all messages which need it
extend
{ "repo_name": "jasonkuster/beam", "path": "sdks/java/core/src/main/java/org/apache/beam/sdk/io/PubsubUnboundedSource.java", "license": "apache-2.0", "size": 52609 }
[ "java.io.IOException", "java.util.ArrayList", "java.util.List", "java.util.Map" ]
import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map;
import java.io.*; import java.util.*;
[ "java.io", "java.util" ]
java.io; java.util;
238,038
@Override public boolean isNull( Object[] dataRow, int index ) throws KettleValueException { if ( dataRow == null ) { // I guess so... return true; } return getValueMeta( index ).isNull( dataRow[index] ); }
boolean function( Object[] dataRow, int index ) throws KettleValueException { if ( dataRow == null ) { return true; } return getValueMeta( index ).isNull( dataRow[index] ); }
/** * Determines whether a value in a row is null. A value is null when the object is null or when it's an empty String * * @param dataRow * The row of data * @param index * the index to reference * @return true if the value on the index is null. * @throws KettleValueException * in case there is a conversion error (only thrown in case of lazy conversion) */
Determines whether a value in a row is null. A value is null when the object is null or when it's an empty String
isNull
{ "repo_name": "ma459006574/pentaho-kettle", "path": "core/src/org/pentaho/di/core/row/RowMeta.java", "license": "apache-2.0", "size": 34343 }
[ "org.pentaho.di.core.exception.KettleValueException" ]
import org.pentaho.di.core.exception.KettleValueException;
import org.pentaho.di.core.exception.*;
[ "org.pentaho.di" ]
org.pentaho.di;
2,600,714
public static void cacheNativeFlags(ChromeApplication application) { cacheHerbFlavor(); InstantAppsHandler.getInstance(application).cacheInstantAppsEnabled( application.getApplicationContext()); }
static void function(ChromeApplication application) { cacheHerbFlavor(); InstantAppsHandler.getInstance(application).cacheInstantAppsEnabled( application.getApplicationContext()); }
/** * Caches flags that must take effect on startup but are set via native code. */
Caches flags that must take effect on startup but are set via native code
cacheNativeFlags
{ "repo_name": "danakj/chromium", "path": "chrome/android/java/src/org/chromium/chrome/browser/util/FeatureUtilities.java", "license": "bsd-3-clause", "size": 11610 }
[ "org.chromium.chrome.browser.ChromeApplication", "org.chromium.chrome.browser.InstantAppsHandler" ]
import org.chromium.chrome.browser.ChromeApplication; import org.chromium.chrome.browser.InstantAppsHandler;
import org.chromium.chrome.browser.*;
[ "org.chromium.chrome" ]
org.chromium.chrome;
2,623,485
@Override public HtmlData getInquiryUrl(BusinessObject bo, String propertyName) { HtmlData hRef = (new AccountBalanceByConsolidationInquirableImpl()).getInquiryUrl(bo, propertyName); return hRef; }
HtmlData function(BusinessObject bo, String propertyName) { HtmlData hRef = (new AccountBalanceByConsolidationInquirableImpl()).getInquiryUrl(bo, propertyName); return hRef; }
/** * Returns the inquiry url for a result field. * * @param bo the business object instance to build the urls for * @param propertyName the property which links to an inquirable * @return String url to inquiry */
Returns the inquiry url for a result field
getInquiryUrl
{ "repo_name": "bhutchinson/kfs", "path": "kfs-core/src/main/java/org/kuali/kfs/gl/businessobject/lookup/AccountBalanceByConsolidationLookupableHelperServiceImpl.java", "license": "agpl-3.0", "size": 5945 }
[ "org.kuali.kfs.gl.businessobject.inquiry.AccountBalanceByConsolidationInquirableImpl", "org.kuali.rice.kns.lookup.HtmlData", "org.kuali.rice.krad.bo.BusinessObject" ]
import org.kuali.kfs.gl.businessobject.inquiry.AccountBalanceByConsolidationInquirableImpl; import org.kuali.rice.kns.lookup.HtmlData; import org.kuali.rice.krad.bo.BusinessObject;
import org.kuali.kfs.gl.businessobject.inquiry.*; import org.kuali.rice.kns.lookup.*; import org.kuali.rice.krad.bo.*;
[ "org.kuali.kfs", "org.kuali.rice" ]
org.kuali.kfs; org.kuali.rice;
2,041,776
public static SmileDeltaParameters of( double expiry, double atmVolatility, DoubleArray delta, DoubleArray riskReversal, DoubleArray strangle, List<ParameterMetadata> parameterMetadata) { ArgChecker.notNull(delta, "delta"); ArgChecker.notNull(riskReversal, "riskReversal"); ArgChecker.notNull(strangle, "strangle"); int nbDelta = delta.size(); ArgChecker.isTrue(nbDelta == riskReversal.size(), "Length of delta {} should be equal to length of riskReversal {}", delta.size(), riskReversal.size()); ArgChecker.isTrue(nbDelta == strangle.size(), "Length of delta {} should be equal to length of strangle {} ", delta.size(), strangle.size()); double[] volatility = new double[2 * nbDelta + 1]; volatility[nbDelta] = atmVolatility; for (int i = 0; i < nbDelta; i++) { volatility[i] = strangle.get(i) + atmVolatility - riskReversal.get(i) / 2.0; // Put volatility[2 * nbDelta - i] = strangle.get(i) + atmVolatility + riskReversal.get(i) / 2.0; // Call } return of(expiry, delta, DoubleArray.ofUnsafe(volatility), parameterMetadata); }
static SmileDeltaParameters function( double expiry, double atmVolatility, DoubleArray delta, DoubleArray riskReversal, DoubleArray strangle, List<ParameterMetadata> parameterMetadata) { ArgChecker.notNull(delta, "delta"); ArgChecker.notNull(riskReversal, STR); ArgChecker.notNull(strangle, STR); int nbDelta = delta.size(); ArgChecker.isTrue(nbDelta == riskReversal.size(), STR, delta.size(), riskReversal.size()); ArgChecker.isTrue(nbDelta == strangle.size(), STR, delta.size(), strangle.size()); double[] volatility = new double[2 * nbDelta + 1]; volatility[nbDelta] = atmVolatility; for (int i = 0; i < nbDelta; i++) { volatility[i] = strangle.get(i) + atmVolatility - riskReversal.get(i) / 2.0; volatility[2 * nbDelta - i] = strangle.get(i) + atmVolatility + riskReversal.get(i) / 2.0; } return of(expiry, delta, DoubleArray.ofUnsafe(volatility), parameterMetadata); }
/** * Obtains an instance from market data at-the-money, delta, risk-reversal and strangle. * * @param expiry the time to expiry associated to the data * @param atmVolatility the at-the-money volatility * @param delta the delta of the different data points, must be positive and sorted in ascending order, * the put will have as delta the opposite of the numbers * @param riskReversal the risk reversal volatility figures, in the same order as the delta * @param strangle the strangle volatility figures, in the same order as the delta * @param parameterMetadata the parameter metadata * @return the smile definition */
Obtains an instance from market data at-the-money, delta, risk-reversal and strangle
of
{ "repo_name": "OpenGamma/Strata", "path": "modules/pricer/src/main/java/com/opengamma/strata/pricer/fxopt/SmileDeltaParameters.java", "license": "apache-2.0", "size": 27328 }
[ "com.opengamma.strata.collect.ArgChecker", "com.opengamma.strata.collect.array.DoubleArray", "com.opengamma.strata.market.param.ParameterMetadata", "java.util.List" ]
import com.opengamma.strata.collect.ArgChecker; import com.opengamma.strata.collect.array.DoubleArray; import com.opengamma.strata.market.param.ParameterMetadata; import java.util.List;
import com.opengamma.strata.collect.*; import com.opengamma.strata.collect.array.*; import com.opengamma.strata.market.param.*; import java.util.*;
[ "com.opengamma.strata", "java.util" ]
com.opengamma.strata; java.util;
270,946
public static void blockUntilAllComplete(boolean throwTaskException, Future<?>... waitFor) throws Throwable { LinkedList<Future<?>> running = new LinkedList<Future<?>>(); for (Future<?> f : waitFor) running.add(f); blockUntilAllComplete(throwTaskException, running); }
static void function(boolean throwTaskException, Future<?>... waitFor) throws Throwable { LinkedList<Future<?>> running = new LinkedList<Future<?>>(); for (Future<?> f : waitFor) running.add(f); blockUntilAllComplete(throwTaskException, running); }
/** * Blocks until the runnables for the given Futures complete. * * Optionally rethrows for issues encountered by the tasks. * * @param waitFor * @return * @throws Throwable */
Blocks until the runnables for the given Futures complete. Optionally rethrows for issues encountered by the tasks
blockUntilAllComplete
{ "repo_name": "powerofpi/FlowMiner", "path": "edu.iastate.flowminer.io/src/edu/iastate/flowminer/io/common/ThreadPool.java", "license": "mit", "size": 3757 }
[ "java.util.LinkedList", "java.util.concurrent.Future" ]
import java.util.LinkedList; import java.util.concurrent.Future;
import java.util.*; import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,692,076
protected static BufferedImage scaleImageIncrementally(BufferedImage src, final int targetWidth, final int targetHeight, final Method scalingMethod, final Object interpolationHintValue) { boolean hasReassignedSrc = false; int incrementCount = 0; int currentWidth = src.getWidth(); int currentHeight = src.getHeight(); final int fraction = (scalingMethod == Method.ULTRA_QUALITY ? 7 : 2); do { final int prevCurrentWidth = currentWidth; final int prevCurrentHeight = currentHeight; if (currentWidth > targetWidth) { currentWidth -= (currentWidth / fraction); if (currentWidth < targetWidth) { currentWidth = targetWidth; } } if (currentHeight > targetHeight) { currentHeight -= (currentHeight / fraction); if (currentHeight < targetHeight) { currentHeight = targetHeight; } } if (prevCurrentWidth == currentWidth && prevCurrentHeight == currentHeight) { break; } if (DEBUG) { log(2, "Scaling from [%d x %d] to [%d x %d]", prevCurrentWidth, prevCurrentHeight, currentWidth, currentHeight); } // Render the incremental scaled image. final BufferedImage incrementalImage = scaleImage(src, currentWidth, currentHeight, interpolationHintValue); if (hasReassignedSrc) { src.flush(); } src = incrementalImage; hasReassignedSrc = true; // Track how many times we go through this cycle to scale the image. incrementCount++; } while (currentWidth != targetWidth || currentHeight != targetHeight); if (DEBUG) { log(2, "Incrementally Scaled Image in %d steps.", incrementCount); } return src; }
static BufferedImage function(BufferedImage src, final int targetWidth, final int targetHeight, final Method scalingMethod, final Object interpolationHintValue) { boolean hasReassignedSrc = false; int incrementCount = 0; int currentWidth = src.getWidth(); int currentHeight = src.getHeight(); final int fraction = (scalingMethod == Method.ULTRA_QUALITY ? 7 : 2); do { final int prevCurrentWidth = currentWidth; final int prevCurrentHeight = currentHeight; if (currentWidth > targetWidth) { currentWidth -= (currentWidth / fraction); if (currentWidth < targetWidth) { currentWidth = targetWidth; } } if (currentHeight > targetHeight) { currentHeight -= (currentHeight / fraction); if (currentHeight < targetHeight) { currentHeight = targetHeight; } } if (prevCurrentWidth == currentWidth && prevCurrentHeight == currentHeight) { break; } if (DEBUG) { log(2, STR, prevCurrentWidth, prevCurrentHeight, currentWidth, currentHeight); } final BufferedImage incrementalImage = scaleImage(src, currentWidth, currentHeight, interpolationHintValue); if (hasReassignedSrc) { src.flush(); } src = incrementalImage; hasReassignedSrc = true; incrementCount++; } while (currentWidth != targetWidth currentHeight != targetHeight); if (DEBUG) { log(2, STR, incrementCount); } return src; }
/** * Used to implement Chris Campbell's incremental-scaling algorithm: <a * href="http://today.java.net/pub/a/today/2007/04/03/perils * -of-image-getscaledinstance * .html">http://today.java.net/pub/a/today/2007/04/03/perils * -of-image-getscaledinstance.html</a>. * <p/> * Modifications to the original algorithm are variable names and comments * added for clarity and the hard-coding of using BICUBIC interpolation as * well as the explicit "flush()" operation on the interim BufferedImage * instances to avoid resource leaking. * * @param src * The image that will be scaled. * @param targetWidth * The target width for the scaled image. * @param targetHeight * The target height for the scaled image. * @param scalingMethod * The scaling method specified by the user (or calculated by * imgscalr) to use for this incremental scaling operation. * @param interpolationHintValue * The {@link RenderingHints} interpolation value used to * indicate the method that {@link Graphics2D} should use when * scaling the image. * * @return an image scaled to the given dimensions using the given rendering * hint. */
Used to implement Chris Campbell's incremental-scaling algorithm: HREF -of-image-getscaledinstance.html. Modifications to the original algorithm are variable names and comments added for clarity and the hard-coding of using BICUBIC interpolation as well as the explicit "flush()" operation on the interim BufferedImage instances to avoid resource leaking
scaleImageIncrementally
{ "repo_name": "foto-andreas/slideshow", "path": "src/main/java/de/schrell/slides/Scalr.java", "license": "agpl-3.0", "size": 101975 }
[ "java.awt.image.BufferedImage" ]
import java.awt.image.BufferedImage;
import java.awt.image.*;
[ "java.awt" ]
java.awt;
1,869,314
@Override public boolean deleteOlderThan(int days) { try { // Days must be negative int dayCount = days > 0 ? days * -1 : days; // Create a calendar object with today date. Calendar calendar = Calendar.getInstance(); // Move calendar backwards according to the given day count calendar.add(Calendar.DATE, dayCount); LOGGER.info("Delete documents older than \"{}\" days from \"{}\" collection.", calendar.getTime(), Constants.TABLE_HISTORICAL_STATE); MongoDatabase db = mongoClient.getDatabase(Constants.DB_NAME); MongoCollection table = db.getCollection(Constants.TABLE_HISTORICAL_STATE); Document document = new Document(); document.put("$lt", calendar.getTime()); Bson query = new Document(Constants.COLUMN_CREATED_DATE, document); DeleteResult deleteResult = table.deleteMany(query); LOGGER.info("Deleted {} documents from \"{}\" collection.", deleteResult.getDeletedCount(), Constants.TABLE_HISTORICAL_STATE); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); return false; } return true; }
boolean function(int days) { try { int dayCount = days > 0 ? days * -1 : days; Calendar calendar = Calendar.getInstance(); calendar.add(Calendar.DATE, dayCount); LOGGER.info(STR{}\STR{}\STR, calendar.getTime(), Constants.TABLE_HISTORICAL_STATE); MongoDatabase db = mongoClient.getDatabase(Constants.DB_NAME); MongoCollection table = db.getCollection(Constants.TABLE_HISTORICAL_STATE); Document document = new Document(); document.put("$lt", calendar.getTime()); Bson query = new Document(Constants.COLUMN_CREATED_DATE, document); DeleteResult deleteResult = table.deleteMany(query); LOGGER.info(STR{}\STR, deleteResult.getDeletedCount(), Constants.TABLE_HISTORICAL_STATE); } catch (Exception ex) { LOGGER.error(ex.getMessage(), ex); return false; } return true; }
/** * Deletes all the entries older than the given days from the historical * state collection. * * @param days number of days * @return true if and only if the entries were deleted successfully, * otherwise false */
Deletes all the entries older than the given days from the historical state collection
deleteOlderThan
{ "repo_name": "petkivim/xrde2e", "path": "src/client/src/main/java/com/pkrete/xrde2e/client/mongodb/MongoDbManager.java", "license": "mit", "size": 10742 }
[ "com.mongodb.client.MongoCollection", "com.mongodb.client.MongoDatabase", "com.mongodb.client.result.DeleteResult", "com.pkrete.xrde2e.common.util.Constants", "java.util.Calendar", "org.bson.Document", "org.bson.conversions.Bson" ]
import com.mongodb.client.MongoCollection; import com.mongodb.client.MongoDatabase; import com.mongodb.client.result.DeleteResult; import com.pkrete.xrde2e.common.util.Constants; import java.util.Calendar; import org.bson.Document; import org.bson.conversions.Bson;
import com.mongodb.client.*; import com.mongodb.client.result.*; import com.pkrete.xrde2e.common.util.*; import java.util.*; import org.bson.*; import org.bson.conversions.*;
[ "com.mongodb.client", "com.pkrete.xrde2e", "java.util", "org.bson", "org.bson.conversions" ]
com.mongodb.client; com.pkrete.xrde2e; java.util; org.bson; org.bson.conversions;
2,112,502
public void setExternalTestDir( LocationManager locationManager ) { setDependency( locationManager, "externalDir", EXT_TEST_DIR, String.class ); }
void function( LocationManager locationManager ) { setDependency( locationManager, STR, EXT_TEST_DIR, String.class ); }
/** * Injects the externalDir property of LocationManager to * user.home/dhis2_test_dir. LocationManager dependency must be retrieved * from the context up front. * * @param locationManager The LocationManager to be injected with the * external directory. */
Injects the externalDir property of LocationManager to user.home/dhis2_test_dir. LocationManager dependency must be retrieved from the context up front
setExternalTestDir
{ "repo_name": "EyeSeeTea/dhis2", "path": "dhis-2/dhis-support/dhis-support-test/src/main/java/org/hisp/dhis/DhisConvenienceTest.java", "license": "gpl-3.0", "size": 66730 }
[ "org.hisp.dhis.external.location.LocationManager" ]
import org.hisp.dhis.external.location.LocationManager;
import org.hisp.dhis.external.location.*;
[ "org.hisp.dhis" ]
org.hisp.dhis;
1,184,165
public Map[] agencyBannerStatistics(Integer id, Date startDate) throws XmlRpcException, IOException { return vectorToArrayMaps(execute(AGENCY_BANNER_STATISTICS_METHOD, id, startDate)); }
Map[] function(Integer id, Date startDate) throws XmlRpcException, IOException { return vectorToArrayMaps(execute(AGENCY_BANNER_STATISTICS_METHOD, id, startDate)); }
/** * Agency banner statistics. * * @param id the id * @param startDate the start date * * @return the Map[] * * @throws XmlRpcException, IOException the xml rpc exception */
Agency banner statistics
agencyBannerStatistics
{ "repo_name": "xvip87/a45435345345", "path": "lib/xmlrpc/java/ApacheLib2/org/openads/proxy/AgencyService.java", "license": "gpl-2.0", "size": 11776 }
[ "java.io.IOException", "java.util.Date", "java.util.Map", "org.apache.xmlrpc.XmlRpcException" ]
import java.io.IOException; import java.util.Date; import java.util.Map; import org.apache.xmlrpc.XmlRpcException;
import java.io.*; import java.util.*; import org.apache.xmlrpc.*;
[ "java.io", "java.util", "org.apache.xmlrpc" ]
java.io; java.util; org.apache.xmlrpc;
2,493,324
public String getAuthorizationUri() { return this.authorizationUri; } /** * Returns the {@link AuthorizationGrantType grant type}. * * @return the {@link AuthorizationGrantType}
String function() { return this.authorizationUri; } /** * Returns the {@link AuthorizationGrantType grant type}. * * @return the {@link AuthorizationGrantType}
/** * Returns the uri for the authorization endpoint. * * @return the uri for the authorization endpoint */
Returns the uri for the authorization endpoint
getAuthorizationUri
{ "repo_name": "eddumelendez/spring-security", "path": "oauth2/oauth2-core/src/main/java/org/springframework/security/oauth2/core/endpoint/OAuth2AuthorizationRequest.java", "license": "apache-2.0", "size": 12751 }
[ "org.springframework.security.oauth2.core.AuthorizationGrantType" ]
import org.springframework.security.oauth2.core.AuthorizationGrantType;
import org.springframework.security.oauth2.core.*;
[ "org.springframework.security" ]
org.springframework.security;
1,106,181
@Test void testAddFormatNull() { assertThrows(() -> AudioFactory.addFormat(null), "Unexpected null argument !"); }
void testAddFormatNull() { assertThrows(() -> AudioFactory.addFormat(null), STR); }
/** * Test add format <code>null</code>. */
Test add format <code>null</code>
testAddFormatNull
{ "repo_name": "b3dgs/lionengine", "path": "lionengine-core/src/test/java/com/b3dgs/lionengine/audio/AudioFactoryTest.java", "license": "gpl-3.0", "size": 5969 }
[ "com.b3dgs.lionengine.UtilAssert" ]
import com.b3dgs.lionengine.UtilAssert;
import com.b3dgs.lionengine.*;
[ "com.b3dgs.lionengine" ]
com.b3dgs.lionengine;
140,650
@Test public void testReadKillCursors() throws IOException, InterruptedException, TimeoutException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final BsonOutputStream bout = new BsonOutputStream(out); final KillCursors killMsg = new KillCursors(new long[] { 1234 }, ReadPreference.PRIMARY); killMsg.write(0, bout); ourServer.setReplies(Arrays.asList(out.toByteArray())); connect(); assertTrue("Should have connected to the server.", ourServer.waitForClient(TimeUnit.SECONDS.toMillis(10))); final FutureReplyCallback future = new FutureReplyCallback(); final GetLastError error = new GetLastError("fo", false, false, 0, 0); myTestConnection.send(error, future); // Wake up the server. assertTrue("Should receive the request after flush.", ourServer.waitForRequest(1, TimeUnit.SECONDS.toMillis(10))); try { future.get(1, TimeUnit.SECONDS); fail("Should have timedout waiting for a reply."); } catch (final ExecutionException te) { // Good. assertThat(te.getCause(), instanceOf(MongoDbException.class)); assertThat(te.getCause().getCause(), instanceOf(StreamCorruptedException.class)); } }
void function() throws IOException, InterruptedException, TimeoutException { final ByteArrayOutputStream out = new ByteArrayOutputStream(); final BsonOutputStream bout = new BsonOutputStream(out); final KillCursors killMsg = new KillCursors(new long[] { 1234 }, ReadPreference.PRIMARY); killMsg.write(0, bout); ourServer.setReplies(Arrays.asList(out.toByteArray())); connect(); assertTrue(STR, ourServer.waitForClient(TimeUnit.SECONDS.toMillis(10))); final FutureReplyCallback future = new FutureReplyCallback(); final GetLastError error = new GetLastError("fo", false, false, 0, 0); myTestConnection.send(error, future); assertTrue(STR, ourServer.waitForRequest(1, TimeUnit.SECONDS.toMillis(10))); try { future.get(1, TimeUnit.SECONDS); fail(STR); } catch (final ExecutionException te) { assertThat(te.getCause(), instanceOf(MongoDbException.class)); assertThat(te.getCause().getCause(), instanceOf(StreamCorruptedException.class)); } }
/** * Test method for {@link TransportConnection}. * * @throws IOException * On a failure connecting to the Mock MongoDB server. * @throws TimeoutException * On a failure waiting for a reply. * @throws InterruptedException * On a failure waiting for a reply. */
Test method for <code>TransportConnection</code>
testReadKillCursors
{ "repo_name": "allanbank/mongodb-async-driver", "path": "src/test/java/com/allanbank/mongodb/client/connection/socket/AbstractTransportConnectionTestCases.java", "license": "apache-2.0", "size": 125000 }
[ "com.allanbank.mongodb.MongoDbException", "com.allanbank.mongodb.ReadPreference", "com.allanbank.mongodb.bson.io.BsonOutputStream", "com.allanbank.mongodb.client.callback.FutureReplyCallback", "com.allanbank.mongodb.client.message.GetLastError", "com.allanbank.mongodb.client.message.KillCursors", "java.io.ByteArrayOutputStream", "java.io.IOException", "java.io.StreamCorruptedException", "java.util.Arrays", "java.util.concurrent.ExecutionException", "java.util.concurrent.TimeUnit", "java.util.concurrent.TimeoutException", "org.hamcrest.Matchers", "org.junit.Assert" ]
import com.allanbank.mongodb.MongoDbException; import com.allanbank.mongodb.ReadPreference; import com.allanbank.mongodb.bson.io.BsonOutputStream; import com.allanbank.mongodb.client.callback.FutureReplyCallback; import com.allanbank.mongodb.client.message.GetLastError; import com.allanbank.mongodb.client.message.KillCursors; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.StreamCorruptedException; import java.util.Arrays; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import org.hamcrest.Matchers; import org.junit.Assert;
import com.allanbank.mongodb.*; import com.allanbank.mongodb.bson.io.*; import com.allanbank.mongodb.client.callback.*; import com.allanbank.mongodb.client.message.*; import java.io.*; import java.util.*; import java.util.concurrent.*; import org.hamcrest.*; import org.junit.*;
[ "com.allanbank.mongodb", "java.io", "java.util", "org.hamcrest", "org.junit" ]
com.allanbank.mongodb; java.io; java.util; org.hamcrest; org.junit;
2,402,710
public synchronized void down() { int ya=point.getBrettY()+1; int xa=point.getBrettX(); //System.out.println("DOWN: x:"+xa+" y:"+ya+" | Playerx:"+x+" Playery:"+y); if(mapn.get(ya).get(xa).getStein() == 0) point.setY(point.getY()); else point.setY(point.getY()+Variables.BLOCKHEIGHT); try { super.setImage(new Image(Variables.res+"p.png").getSubImage(0,0,95,116)); } catch (SlickException e) { e.printStackTrace(); } }
synchronized void function() { int ya=point.getBrettY()+1; int xa=point.getBrettX(); if(mapn.get(ya).get(xa).getStein() == 0) point.setY(point.getY()); else point.setY(point.getY()+Variables.BLOCKHEIGHT); try { super.setImage(new Image(Variables.res+"p.png").getSubImage(0,0,95,116)); } catch (SlickException e) { e.printStackTrace(); } }
/*** * Beschreibung: Button Down = Hinunter bewegen */
Beschreibung: Button Down = Hinunter bewegen
down
{ "repo_name": "MichaelJ2/big-buum-man", "path": "source/big-buum-man-server/src/main/java/at/big_buum_man/server/gui/objects/Player.java", "license": "gpl-3.0", "size": 5925 }
[ "at.big_buum_man.server.gui.helper.Variables", "org.newdawn.slick.Image", "org.newdawn.slick.SlickException" ]
import at.big_buum_man.server.gui.helper.Variables; import org.newdawn.slick.Image; import org.newdawn.slick.SlickException;
import at.big_buum_man.server.gui.helper.*; import org.newdawn.slick.*;
[ "at.big_buum_man.server", "org.newdawn.slick" ]
at.big_buum_man.server; org.newdawn.slick;
2,506,131
public static boolean isODKTablesInstanceDataDirectory(File directory) { String dirPath = directory.getAbsolutePath(); if (dirPath.startsWith(Collect.ODK_ROOT)) { dirPath = dirPath.substring(Collect.ODK_ROOT.length()); String[] parts = dirPath.split(File.separator); // [appName, instances, tableId, instanceId ] if (parts.length == 4 && parts[1].equals("instances")) { return true; } } return false; }
static boolean function(File directory) { String dirPath = directory.getAbsolutePath(); if (dirPath.startsWith(Collect.ODK_ROOT)) { dirPath = dirPath.substring(Collect.ODK_ROOT.length()); String[] parts = dirPath.split(File.separator); if (parts.length == 4 && parts[1].equals(STR)) { return true; } } return false; }
/** * Predicate that tests whether a directory path might refer to an * ODK Tables instance data directory (e.g., for media attachments). */
Predicate that tests whether a directory path might refer to an ODK Tables instance data directory (e.g., for media attachments)
isODKTablesInstanceDataDirectory
{ "repo_name": "mapkon/collect", "path": "collect_app/src/main/java/org/odk/collect/android/application/Collect.java", "license": "apache-2.0", "size": 8863 }
[ "java.io.File" ]
import java.io.File;
import java.io.*;
[ "java.io" ]
java.io;
2,275,835
@Test public void testT1RV9D2_T1LV4D7() { test_id = getTestId("T1RV9D2", "T1LV4D7", "246"); String src = selectTRVD("T1RV9D2"); String dest = selectTLVD("T1LV4D7"); String result = "."; try { result = TRVD_TLVD_Action(src, dest); } catch (RecognitionException e) { e.printStackTrace(); } catch (TokenStreamException e) { e.printStackTrace(); } assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result)); GraphicalEditor editor = getActiveEditor(); if (editor != null) { validateOrGenerateResults(editor, generateResults); } }
void function() { test_id = getTestId(STR, STR, "246"); String src = selectTRVD(STR); String dest = selectTLVD(STR); String result = "."; try { result = TRVD_TLVD_Action(src, dest); } catch (RecognitionException e) { e.printStackTrace(); } catch (TokenStreamException e) { e.printStackTrace(); } assertTrue(ParamFailure2, checkResult_ParamFailure2(src, dest, result)); GraphicalEditor editor = getActiveEditor(); if (editor != null) { validateOrGenerateResults(editor, generateResults); } }
/** * Perform the test for the given matrix column (T1RV9D2) and row (T1LV4D7). * */
Perform the test for the given matrix column (T1RV9D2) and row (T1LV4D7)
testT1RV9D2_T1LV4D7
{ "repo_name": "jason-rhodes/bridgepoint", "path": "src/org.xtuml.bp.als.oal.test/src/org/xtuml/bp/als/oal/test/SingleDimensionFixedArrayAssigmentTest_16_Generics.java", "license": "apache-2.0", "size": 186177 }
[ "org.xtuml.bp.ui.graphics.editor.GraphicalEditor" ]
import org.xtuml.bp.ui.graphics.editor.GraphicalEditor;
import org.xtuml.bp.ui.graphics.editor.*;
[ "org.xtuml.bp" ]
org.xtuml.bp;
573,048
protected TextView createDefaultTabView(Context context) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER); textView.setSingleLine(); textView.setEllipsize(TextUtils.TruncateAt.END); textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP); textView.setTypeface(Typeface.DEFAULT_BOLD); textView.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); textView.setTextColor(Color.WHITE); int hpadding = (int) (TAB_VIEW_HORIZONTAL_PADDING_DIPS * getResources().getDisplayMetrics().density); int vpadding = (int) (TAB_VIEW_VERTICAL_PADDING_DIPS * getResources().getDisplayMetrics().density); textView.setPadding(hpadding, vpadding, hpadding, vpadding); return textView; }
TextView function(Context context) { TextView textView = new TextView(context); textView.setGravity(Gravity.CENTER); textView.setSingleLine(); textView.setEllipsize(TextUtils.TruncateAt.END); textView.setTextSize(TypedValue.COMPLEX_UNIT_SP, TAB_VIEW_TEXT_SIZE_SP); textView.setTypeface(Typeface.DEFAULT_BOLD); textView.setLayoutParams(new LinearLayout.LayoutParams( ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT)); textView.setTextColor(Color.WHITE); int hpadding = (int) (TAB_VIEW_HORIZONTAL_PADDING_DIPS * getResources().getDisplayMetrics().density); int vpadding = (int) (TAB_VIEW_VERTICAL_PADDING_DIPS * getResources().getDisplayMetrics().density); textView.setPadding(hpadding, vpadding, hpadding, vpadding); return textView; }
/** * Create a default view to be used for tabs. This is called if a custom tab view is not set via * {@link #setCustomTabView(int, int)}. */
Create a default view to be used for tabs. This is called if a custom tab view is not set via <code>#setCustomTabView(int, int)</code>
createDefaultTabView
{ "repo_name": "dant3/vlc-android", "path": "vlc-android/src/com/android/widget/SlidingTabLayout.java", "license": "gpl-2.0", "size": 13435 }
[ "android.content.Context", "android.graphics.Color", "android.graphics.Typeface", "android.text.TextUtils", "android.util.TypedValue", "android.view.Gravity", "android.view.ViewGroup", "android.widget.LinearLayout", "android.widget.TextView" ]
import android.content.Context; import android.graphics.Color; import android.graphics.Typeface; import android.text.TextUtils; import android.util.TypedValue; import android.view.Gravity; import android.view.ViewGroup; import android.widget.LinearLayout; import android.widget.TextView;
import android.content.*; import android.graphics.*; import android.text.*; import android.util.*; import android.view.*; import android.widget.*;
[ "android.content", "android.graphics", "android.text", "android.util", "android.view", "android.widget" ]
android.content; android.graphics; android.text; android.util; android.view; android.widget;
1,480,570
public List<CathDomain> filterByNodeName(String query);
List<CathDomain> function(String query);
/** Return list of CATH domains whose node name (e.g. Orthogonal Bundle) starts with the query. * * @param query * @return CATH domains */
Return list of CATH domains whose node name (e.g. Orthogonal Bundle) starts with the query
filterByNodeName
{ "repo_name": "sbliven/biojava", "path": "biojava3-structure/src/main/java/org/biojava/bio/structure/cath/CathDatabase.java", "license": "lgpl-2.1", "size": 3173 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,877,275
private void findScrollView(ViewGroup viewGroup) { scrollChild = viewGroup; if (viewGroup.getChildCount() > 0) { int count = viewGroup.getChildCount(); View child; for (int i = 0; i < count; i++) { child = viewGroup.getChildAt(i); if (child instanceof AbsListView || child instanceof ScrollView || child instanceof ViewPager || child instanceof WebView) { scrollChild = child; return; } } } }
void function(ViewGroup viewGroup) { scrollChild = viewGroup; if (viewGroup.getChildCount() > 0) { int count = viewGroup.getChildCount(); View child; for (int i = 0; i < count; i++) { child = viewGroup.getChildAt(i); if (child instanceof AbsListView child instanceof ScrollView child instanceof ViewPager child instanceof WebView) { scrollChild = child; return; } } } }
/** * Find out the scrollable child view from a ViewGroup. * * @param viewGroup */
Find out the scrollable child view from a ViewGroup
findScrollView
{ "repo_name": "oreo/core", "path": "core/src/main/java/com/android/core/widget/SwipeBackLayout.java", "license": "apache-2.0", "size": 14137 }
[ "android.support.v4.view.ViewPager", "android.view.View", "android.view.ViewGroup", "android.webkit.WebView", "android.widget.AbsListView", "android.widget.ScrollView" ]
import android.support.v4.view.ViewPager; import android.view.View; import android.view.ViewGroup; import android.webkit.WebView; import android.widget.AbsListView; import android.widget.ScrollView;
import android.support.v4.view.*; import android.view.*; import android.webkit.*; import android.widget.*;
[ "android.support", "android.view", "android.webkit", "android.widget" ]
android.support; android.view; android.webkit; android.widget;
652,018
Validator<?> validator = get(exchange); if (validator != null) { try { ValidationResult result = applyValidator(exchange, validator); if (!result.isValid()) { String msg = RuntimeMessages.MESSAGES.validatorFailed( validator.getClass().getName(), validator.getType().getName(), validator.getName().toString(), result.getDetail()); throw new ValidationFailureException(validator, result, msg); } } catch (SwitchYardException e) { // Validators which throw SwitchYardException should be reported as a ValidationFailureException String msg = RuntimeMessages.MESSAGES.validatorFailed( validator.getClass().getName(), validator.getType().getName(), validator.getName().toString(), e); throw new ValidationFailureException(validator, e.getCause() != null ? e.getCause() : e, msg); } } }
Validator<?> validator = get(exchange); if (validator != null) { try { ValidationResult result = applyValidator(exchange, validator); if (!result.isValid()) { String msg = RuntimeMessages.MESSAGES.validatorFailed( validator.getClass().getName(), validator.getType().getName(), validator.getName().toString(), result.getDetail()); throw new ValidationFailureException(validator, result, msg); } } catch (SwitchYardException e) { String msg = RuntimeMessages.MESSAGES.validatorFailed( validator.getClass().getName(), validator.getType().getName(), validator.getName().toString(), e); throw new ValidationFailureException(validator, e.getCause() != null ? e.getCause() : e, msg); } } }
/** * Validate the current message on the exchange. * @param exchange exchange * @throws ValidationFailureException validation failure exception */
Validate the current message on the exchange
handleMessage
{ "repo_name": "tadayosi/switchyard", "path": "core/runtime/src/main/java/org/switchyard/handlers/ValidateHandler.java", "license": "apache-2.0", "size": 5808 }
[ "org.switchyard.SwitchYardException", "org.switchyard.runtime.RuntimeMessages", "org.switchyard.validate.ValidationFailureException", "org.switchyard.validate.ValidationResult", "org.switchyard.validate.Validator" ]
import org.switchyard.SwitchYardException; import org.switchyard.runtime.RuntimeMessages; import org.switchyard.validate.ValidationFailureException; import org.switchyard.validate.ValidationResult; import org.switchyard.validate.Validator;
import org.switchyard.*; import org.switchyard.runtime.*; import org.switchyard.validate.*;
[ "org.switchyard", "org.switchyard.runtime", "org.switchyard.validate" ]
org.switchyard; org.switchyard.runtime; org.switchyard.validate;
579,801
public Exchange build() { Exchange exchange = new DefaultExchange(context); Message message = exchange.getIn(); message.setBody(body); if (headers.size() > 0) { message.setHeaders(headers); } // setup the properties on the exchange for (Map.Entry<String, Object> entry : properties.entrySet()) { exchange.setProperty(entry.getKey(), entry.getValue()); } if (pattern != null) { exchange.setPattern(pattern); } return exchange; }
Exchange function() { Exchange exchange = new DefaultExchange(context); Message message = exchange.getIn(); message.setBody(body); if (headers.size() > 0) { message.setHeaders(headers); } for (Map.Entry<String, Object> entry : properties.entrySet()) { exchange.setProperty(entry.getKey(), entry.getValue()); } if (pattern != null) { exchange.setPattern(pattern); } return exchange; }
/** * Build up the exchange from the exchange builder * * @return exchange */
Build up the exchange from the exchange builder
build
{ "repo_name": "DariusX/camel", "path": "core/camel-core-engine/src/main/java/org/apache/camel/builder/ExchangeBuilder.java", "license": "apache-2.0", "size": 3721 }
[ "java.util.Map", "org.apache.camel.Exchange", "org.apache.camel.Message", "org.apache.camel.support.DefaultExchange" ]
import java.util.Map; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.support.DefaultExchange;
import java.util.*; import org.apache.camel.*; import org.apache.camel.support.*;
[ "java.util", "org.apache.camel" ]
java.util; org.apache.camel;
2,202,301
public Observable<ServiceResponse<EventhubInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String namespaceName, String eventHubName, EventhubInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (namespaceName == null) { throw new IllegalArgumentException("Parameter namespaceName is required and cannot be null."); } if (eventHubName == null) { throw new IllegalArgumentException("Parameter eventHubName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); }
Observable<ServiceResponse<EventhubInner>> function(String resourceGroupName, String namespaceName, String eventHubName, EventhubInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException(STR); } if (namespaceName == null) { throw new IllegalArgumentException(STR); } if (eventHubName == null) { throw new IllegalArgumentException(STR); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException(STR); } if (parameters == null) { throw new IllegalArgumentException(STR); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException(STR); }
/** * Creates or updates a new Event Hub as a nested resource within a Namespace. * * @param resourceGroupName Name of the resource group within the azure subscription. * @param namespaceName The Namespace name * @param eventHubName The Event Hub name * @param parameters Parameters supplied to create an Event Hub resource. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the EventhubInner object */
Creates or updates a new Event Hub as a nested resource within a Namespace
createOrUpdateWithServiceResponseAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/eventhubs/mgmt-v2017_04_01/src/main/java/com/microsoft/azure/management/eventhubs/v2017_04_01/implementation/EventHubsInner.java", "license": "mit", "size": 95104 }
[ "com.microsoft.rest.ServiceResponse" ]
import com.microsoft.rest.ServiceResponse;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,767,954
@Override protected void internalReceiveCommand(String itemName, Command command) { for (HomematicBindingProvider provider : providers) { Item item = provider.getItem(itemName); HomematicBindingConfig config = provider.getBindingFor(itemName); communicator.receiveCommand(item, command, config); } }
void function(String itemName, Command command) { for (HomematicBindingProvider provider : providers) { Item item = provider.getItem(itemName); HomematicBindingConfig config = provider.getBindingFor(itemName); communicator.receiveCommand(item, command, config); } }
/** * Receives a command and send it to the Homematic communicator. */
Receives a command and send it to the Homematic communicator
internalReceiveCommand
{ "repo_name": "joek/openhab1-addons", "path": "bundles/binding/org.openhab.binding.homematic/src/main/java/org/openhab/binding/homematic/internal/bus/HomematicBinding.java", "license": "epl-1.0", "size": 7796 }
[ "org.openhab.binding.homematic.HomematicBindingProvider", "org.openhab.binding.homematic.internal.config.binding.HomematicBindingConfig", "org.openhab.core.items.Item", "org.openhab.core.types.Command" ]
import org.openhab.binding.homematic.HomematicBindingProvider; import org.openhab.binding.homematic.internal.config.binding.HomematicBindingConfig; import org.openhab.core.items.Item; import org.openhab.core.types.Command;
import org.openhab.binding.homematic.*; import org.openhab.binding.homematic.internal.config.binding.*; import org.openhab.core.items.*; import org.openhab.core.types.*;
[ "org.openhab.binding", "org.openhab.core" ]
org.openhab.binding; org.openhab.core;
1,107,288
if (ruleSetNames == null || ruleSetNames.isEmpty()) { throw new IngestModule.IngestModuleException(Bundle.YaraIngestModule_no_ruleSets()); } // Find javac File exeFile = InstalledFileLocator.getDefault().locate( Paths.get(YARA_DIR, YARA_C_EXE).toString(), YaraIngestModule.class.getPackage().getName(), false); if (exeFile == null) { throw new IngestModuleException(Bundle.YaraIngestModule_yarac_not_found()); } for (RuleSet set : getRuleSetsForNames(ruleSetNames)) { compileRuleSet(set, outputDir, exeFile); } }
if (ruleSetNames == null ruleSetNames.isEmpty()) { throw new IngestModule.IngestModuleException(Bundle.YaraIngestModule_no_ruleSets()); } File exeFile = InstalledFileLocator.getDefault().locate( Paths.get(YARA_DIR, YARA_C_EXE).toString(), YaraIngestModule.class.getPackage().getName(), false); if (exeFile == null) { throw new IngestModuleException(Bundle.YaraIngestModule_yarac_not_found()); } for (RuleSet set : getRuleSetsForNames(ruleSetNames)) { compileRuleSet(set, outputDir, exeFile); } }
/** * Uses the yarac tool to compile the rules in the given rule sets. * * @param ruleSetNames List of names of the selected rule sets. * @param tempDir Path of the directory to put the compiled rule files. * * @throws org.sleuthkit.autopsy.ingest.IngestModule.IngestModuleException */
Uses the yarac tool to compile the rules in the given rule sets
compileRules
{ "repo_name": "eugene7646/autopsy", "path": "Core/src/org/sleuthkit/autopsy/modules/yara/YaraIngestHelper.java", "license": "apache-2.0", "size": 10534 }
[ "java.io.File", "java.nio.file.Paths", "org.openide.modules.InstalledFileLocator", "org.sleuthkit.autopsy.ingest.IngestModule", "org.sleuthkit.autopsy.modules.yara.rules.RuleSet" ]
import java.io.File; import java.nio.file.Paths; import org.openide.modules.InstalledFileLocator; import org.sleuthkit.autopsy.ingest.IngestModule; import org.sleuthkit.autopsy.modules.yara.rules.RuleSet;
import java.io.*; import java.nio.file.*; import org.openide.modules.*; import org.sleuthkit.autopsy.ingest.*; import org.sleuthkit.autopsy.modules.yara.rules.*;
[ "java.io", "java.nio", "org.openide.modules", "org.sleuthkit.autopsy" ]
java.io; java.nio; org.openide.modules; org.sleuthkit.autopsy;
2,072,205
@Override public ParcelFileDescriptor openFile(Uri uri, String mode) throws FileNotFoundException { if (Constants.LOGVV) { logVerboseOpenFileInfo(uri, mode); } Cursor cursor = query(uri, new String[] {"_data"}, null, null, null); String path; try { int count = (cursor != null) ? cursor.getCount() : 0; if (count != 1) { // If there is not exactly one result, throw an appropriate exception. if (count == 0) { throw new FileNotFoundException("No entry for " + uri); } throw new FileNotFoundException("Multiple items at " + uri); } cursor.moveToFirst(); path = cursor.getString(0); } finally { if (cursor != null) { cursor.close(); } } if (path == null) { throw new FileNotFoundException("No filename found."); } if (!Helpers.isFilenameValid(path)) { throw new FileNotFoundException("Invalid filename."); } if (!"r".equals(mode)) { throw new FileNotFoundException("Bad mode for " + uri + ": " + mode); } ParcelFileDescriptor ret = ParcelFileDescriptor.open(new File(path), ParcelFileDescriptor.MODE_READ_ONLY); if (ret == null) { if (Constants.LOGV) { Log.v(Constants.TAG, "couldn't open file"); } throw new FileNotFoundException("couldn't open file"); } return ret; }
ParcelFileDescriptor function(Uri uri, String mode) throws FileNotFoundException { if (Constants.LOGVV) { logVerboseOpenFileInfo(uri, mode); } Cursor cursor = query(uri, new String[] {"_data"}, null, null, null); String path; try { int count = (cursor != null) ? cursor.getCount() : 0; if (count != 1) { if (count == 0) { throw new FileNotFoundException(STR + uri); } throw new FileNotFoundException(STR + uri); } cursor.moveToFirst(); path = cursor.getString(0); } finally { if (cursor != null) { cursor.close(); } } if (path == null) { throw new FileNotFoundException(STR); } if (!Helpers.isFilenameValid(path)) { throw new FileNotFoundException(STR); } if (!"r".equals(mode)) { throw new FileNotFoundException(STR + uri + STR + mode); } ParcelFileDescriptor ret = ParcelFileDescriptor.open(new File(path), ParcelFileDescriptor.MODE_READ_ONLY); if (ret == null) { if (Constants.LOGV) { Log.v(Constants.TAG, STR); } throw new FileNotFoundException(STR); } return ret; }
/** * Remotely opens a file */
Remotely opens a file
openFile
{ "repo_name": "msafin/wmc", "path": "src/com/sharegogo/wireless/download/DownloadProvider.java", "license": "gpl-2.0", "size": 48644 }
[ "android.database.Cursor", "android.net.Uri", "android.os.ParcelFileDescriptor", "android.util.Log", "java.io.File", "java.io.FileNotFoundException" ]
import android.database.Cursor; import android.net.Uri; import android.os.ParcelFileDescriptor; import android.util.Log; import java.io.File; import java.io.FileNotFoundException;
import android.database.*; import android.net.*; import android.os.*; import android.util.*; import java.io.*;
[ "android.database", "android.net", "android.os", "android.util", "java.io" ]
android.database; android.net; android.os; android.util; java.io;
2,061,465
private void openServerConnection(String url) throws IOException { _serviceUrl = url; JMXServiceURL serviceUrl = new JMXServiceURL(_serviceUrl); _serverConnector = JMXConnectorFactory.connect(serviceUrl); _serviceConnection = _serverConnector.getMBeanServerConnection(); }
void function(String url) throws IOException { _serviceUrl = url; JMXServiceURL serviceUrl = new JMXServiceURL(_serviceUrl); _serverConnector = JMXConnectorFactory.connect(serviceUrl); _serviceConnection = _serverConnector.getMBeanServerConnection(); }
/** * Open a connection to remote JMX server * * @param url * @throws IOException */
Open a connection to remote JMX server
openServerConnection
{ "repo_name": "xmpace/jetty-read", "path": "jetty-monitor/src/main/java/org/eclipse/jetty/monitor/jmx/ServiceConnection.java", "license": "apache-2.0", "size": 5104 }
[ "java.io.IOException", "javax.management.remote.JMXConnectorFactory", "javax.management.remote.JMXServiceURL" ]
import java.io.IOException; import javax.management.remote.JMXConnectorFactory; import javax.management.remote.JMXServiceURL;
import java.io.*; import javax.management.remote.*;
[ "java.io", "javax.management" ]
java.io; javax.management;
2,744,238
public JoinColumn<OneToMany<T>> getOrCreateJoinColumn() { List<Node> nodeList = childNode.get("join-column"); if (nodeList != null && nodeList.size() > 0) { return new JoinColumnImpl<OneToMany<T>>(this, "join-column", childNode, nodeList.get(0)); } return createJoinColumn(); }
JoinColumn<OneToMany<T>> function() { List<Node> nodeList = childNode.get(STR); if (nodeList != null && nodeList.size() > 0) { return new JoinColumnImpl<OneToMany<T>>(this, STR, childNode, nodeList.get(0)); } return createJoinColumn(); }
/** * If not already created, a new <code>join-column</code> element will be created and returned. * Otherwise, the first existing <code>join-column</code> element will be returned. * @return the instance defined for the element <code>join-column</code> */
If not already created, a new <code>join-column</code> element will be created and returned. Otherwise, the first existing <code>join-column</code> element will be returned
getOrCreateJoinColumn
{ "repo_name": "forge/javaee-descriptors", "path": "impl/src/main/java/org/jboss/shrinkwrap/descriptor/impl/orm21/OneToManyImpl.java", "license": "epl-1.0", "size": 34625 }
[ "java.util.List", "org.jboss.shrinkwrap.descriptor.api.orm21.JoinColumn", "org.jboss.shrinkwrap.descriptor.api.orm21.OneToMany", "org.jboss.shrinkwrap.descriptor.spi.node.Node" ]
import java.util.List; import org.jboss.shrinkwrap.descriptor.api.orm21.JoinColumn; import org.jboss.shrinkwrap.descriptor.api.orm21.OneToMany; import org.jboss.shrinkwrap.descriptor.spi.node.Node;
import java.util.*; import org.jboss.shrinkwrap.descriptor.api.orm21.*; import org.jboss.shrinkwrap.descriptor.spi.node.*;
[ "java.util", "org.jboss.shrinkwrap" ]
java.util; org.jboss.shrinkwrap;
43,821
public static NodeList distinct(NodeList nl) { return ExsltSets.distinct(nl); }
static NodeList function(NodeList nl) { return ExsltSets.distinct(nl); }
/** * Returns node-set containing distinct string values. * * @param nl NodeList for node-set * @return a NodeList with nodes from nl containing distinct string values. * In other words, if more than one node in nl contains the same string value, * only include the first such node found. * * Note: The usage of this extension function in the xalan namespace * is deprecated. Please use the same function in the EXSLT sets extension * (http://exslt.org/sets). */
Returns node-set containing distinct string values
distinct
{ "repo_name": "openjdk/jdk7u", "path": "jaxp/src/com/sun/org/apache/xalan/internal/lib/Extensions.java", "license": "gpl-2.0", "size": 9427 }
[ "org.w3c.dom.NodeList" ]
import org.w3c.dom.NodeList;
import org.w3c.dom.*;
[ "org.w3c.dom" ]
org.w3c.dom;
2,668,175
public ServiceFuture<SubnetInner> getAsync(String resourceGroupName, String virtualNetworkName, String subnetName, final ServiceCallback<SubnetInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, virtualNetworkName, subnetName), serviceCallback); }
ServiceFuture<SubnetInner> function(String resourceGroupName, String virtualNetworkName, String subnetName, final ServiceCallback<SubnetInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, virtualNetworkName, subnetName), serviceCallback); }
/** * Gets the specified subnet by virtual network and resource group. * * @param resourceGroupName The name of the resource group. * @param virtualNetworkName The name of the virtual network. * @param subnetName The name of the subnet. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */
Gets the specified subnet by virtual network and resource group
getAsync
{ "repo_name": "selvasingh/azure-sdk-for-java", "path": "sdk/network/mgmt-v2018_06_01/src/main/java/com/microsoft/azure/management/network/v2018_06_01/implementation/SubnetsInner.java", "license": "mit", "size": 48775 }
[ "com.microsoft.rest.ServiceCallback", "com.microsoft.rest.ServiceFuture" ]
import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture;
import com.microsoft.rest.*;
[ "com.microsoft.rest" ]
com.microsoft.rest;
1,439,699
@Message(id=16816, value = "Unexpected exception while creating an instance of Result type '%s'.") SwitchYardException unsupportedExceptionCreatingResult(String type, @Cause Exception e);
@Message(id=16816, value = STR) SwitchYardException unsupportedExceptionCreatingResult(String type, @Cause Exception e);
/** * unsupportedExceptionCreatingResult method definition. * @param type type * @param e e * @return SwitchYardException */
unsupportedExceptionCreatingResult method definition
unsupportedExceptionCreatingResult
{ "repo_name": "cunningt/switchyard", "path": "core/transform/src/main/java/org/switchyard/transform/internal/TransformMessages.java", "license": "apache-2.0", "size": 22568 }
[ "org.jboss.logging.annotations.Cause", "org.jboss.logging.annotations.Message", "org.switchyard.SwitchYardException" ]
import org.jboss.logging.annotations.Cause; import org.jboss.logging.annotations.Message; import org.switchyard.SwitchYardException;
import org.jboss.logging.annotations.*; import org.switchyard.*;
[ "org.jboss.logging", "org.switchyard" ]
org.jboss.logging; org.switchyard;
2,377,650
public void createControls(Composite parent) { parent.setLayout(new RowLayout(SWT.HORIZONTAL)); Label lblName = new Label(parent, SWT.NONE); lblName.setText("Name:"); lblDataName = new Label(parent, SWT.NONE); lblDataName.setLayoutData(new RowData(426, 14)); lblDataName.setText("No track selected"); Label lblTotalDistance = new Label(parent, SWT.NONE); lblTotalDistance.setText("Total distance:"); lblDataTotalDistance = new Label(parent, SWT.NONE); lblDataTotalDistance.setText("-"); lblDataTotalDistance.setLayoutData(new RowData(82, SWT.DEFAULT)); Label lblTotalElevation = new Label(parent, SWT.NONE); lblTotalElevation.setText("Total elevation:"); lblDataTotalElevation = new Label(parent, SWT.NONE); lblDataTotalElevation.setText("-"); lblDataTotalElevation.setLayoutData(new RowData(88, SWT.DEFAULT)); }
void function(Composite parent) { parent.setLayout(new RowLayout(SWT.HORIZONTAL)); Label lblName = new Label(parent, SWT.NONE); lblName.setText("Name:"); lblDataName = new Label(parent, SWT.NONE); lblDataName.setLayoutData(new RowData(426, 14)); lblDataName.setText(STR); Label lblTotalDistance = new Label(parent, SWT.NONE); lblTotalDistance.setText(STR); lblDataTotalDistance = new Label(parent, SWT.NONE); lblDataTotalDistance.setText("-"); lblDataTotalDistance.setLayoutData(new RowData(82, SWT.DEFAULT)); Label lblTotalElevation = new Label(parent, SWT.NONE); lblTotalElevation.setText(STR); lblDataTotalElevation = new Label(parent, SWT.NONE); lblDataTotalElevation.setText("-"); lblDataTotalElevation.setLayoutData(new RowData(88, SWT.DEFAULT)); }
/** * Create controls for displaying the name, total distance * and total elevation of a track. */
Create controls for displaying the name, total distance and total elevation of a track
createControls
{ "repo_name": "HenrichN/trackexplorer", "path": "org.trackexplorer/src/org/trackexplorer/parts/TrackInfoPart.java", "license": "mit", "size": 2590 }
[ "org.eclipse.swt.layout.RowData", "org.eclipse.swt.layout.RowLayout", "org.eclipse.swt.widgets.Composite", "org.eclipse.swt.widgets.Label" ]
import org.eclipse.swt.layout.RowData; import org.eclipse.swt.layout.RowLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.layout.*; import org.eclipse.swt.widgets.*;
[ "org.eclipse.swt" ]
org.eclipse.swt;
142,853
public boolean ready() throws IOException { synchronized (lock) { ensureOpen(); if (skipLF) { if (nextChar >= nChars && in.ready()) { fill(); } if (nextChar < nChars) { if (cb[nextChar] == '\n') nextChar++; skipLF = false; } } return (nextChar < nChars) || in.ready(); } }
boolean function() throws IOException { synchronized (lock) { ensureOpen(); if (skipLF) { if (nextChar >= nChars && in.ready()) { fill(); } if (nextChar < nChars) { if (cb[nextChar] == '\n') nextChar++; skipLF = false; } } return (nextChar < nChars) in.ready(); } }
/** * Tells whether this stream is ready to be read. A buffered character * stream is ready if the buffer is not empty, or if the underlying * character stream is ready. * * @exception IOException If an I/O error occurs */
Tells whether this stream is ready to be read. A buffered character stream is ready if the buffer is not empty, or if the underlying character stream is ready
ready
{ "repo_name": "shahankhatch/ScalableGraphSummaries", "path": "SGBGraphChi/src/main/java/edu/toronto/cs/sgb/util/CustomBufferedReader.java", "license": "apache-2.0", "size": 20689 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
2,143,406
ArrayList<VmStatus> sorted = new ArrayList<VmStatus>(); // Remove VMs with less CPU load than the CPU load by which the source // host is stressed. double cpuExcess = source.getSandboxStatus().getResourcesInUse().getCpu() - source.getHostDescription().getResourceCapacity().getCpu() * this.upperThreshold; for (VmStatus vm : sourceVms) if (vm.getResourcesInUse().getCpu() >= cpuExcess) sorted.add(vm); if (!sorted.isEmpty()) // Sort VMs in increasing order by CPU load. Collections.sort(sorted, VmStatusComparator.getComparator(VmStatusComparator.CPU_IN_USE)); else { // Add original list of VMs and sort them in decreasing order by // CPU load, so as to avoid trying to migrate the smallest VMs // first (which would not help resolve the stress situation). sorted.addAll(sourceVms); Collections.sort(sorted, VmStatusComparator.getComparator(VmStatusComparator.CPU_IN_USE)); Collections.reverse(sorted); } return sorted; }
ArrayList<VmStatus> sorted = new ArrayList<VmStatus>(); double cpuExcess = source.getSandboxStatus().getResourcesInUse().getCpu() - source.getHostDescription().getResourceCapacity().getCpu() * this.upperThreshold; for (VmStatus vm : sourceVms) if (vm.getResourcesInUse().getCpu() >= cpuExcess) sorted.add(vm); if (!sorted.isEmpty()) Collections.sort(sorted, VmStatusComparator.getComparator(VmStatusComparator.CPU_IN_USE)); else { sorted.addAll(sourceVms); Collections.sort(sorted, VmStatusComparator.getComparator(VmStatusComparator.CPU_IN_USE)); Collections.reverse(sorted); } return sorted; }
/** * Sorts the relocation candidates in increasing order by <CPU load>, * previously removing from consideration those VMs with less CPU load * than the CPU load by which the host is stressed. */
Sorts the relocation candidates in increasing order by , previously removing from consideration those VMs with less CPU load than the CPU load by which the host is stressed
orderSourceVms
{ "repo_name": "digs-uwo/dcsim-projects", "path": "src/edu/uwo/csd/dcsim/projects/im2013/policies/VmRelocationPolicyFFIMDHybrid.java", "license": "gpl-3.0", "size": 4709 }
[ "edu.uwo.csd.dcsim.management.VmStatus", "edu.uwo.csd.dcsim.management.VmStatusComparator", "java.util.ArrayList", "java.util.Collections" ]
import edu.uwo.csd.dcsim.management.VmStatus; import edu.uwo.csd.dcsim.management.VmStatusComparator; import java.util.ArrayList; import java.util.Collections;
import edu.uwo.csd.dcsim.management.*; import java.util.*;
[ "edu.uwo.csd", "java.util" ]
edu.uwo.csd; java.util;
817,536
void onFinish(@NonNull List<BaseMedia> medias);
void onFinish(@NonNull List<BaseMedia> medias);
/** * call when the view should be finished or the process is finished * * @param medias the selection of medias. */
call when the view should be finished or the process is finished
onFinish
{ "repo_name": "storix/boxing", "path": "boxing/src/main/java/com/bilibili/boxing/presenter/PickerContract.java", "license": "apache-2.0", "size": 4099 }
[ "android.support.annotation.NonNull", "com.bilibili.boxing.model.entity.BaseMedia", "java.util.List" ]
import android.support.annotation.NonNull; import com.bilibili.boxing.model.entity.BaseMedia; import java.util.List;
import android.support.annotation.*; import com.bilibili.boxing.model.entity.*; import java.util.*;
[ "android.support", "com.bilibili.boxing", "java.util" ]
android.support; com.bilibili.boxing; java.util;
2,327,886
private void shutdownDerbyEmbedded() { final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(this, tc, "shutdownDerbyEmbedded", classloader, embDerbyRefCount); // Shut down Derby embedded if the reference count drops to 0 if (embDerbyRefCount.remove(classloader) && !embDerbyRefCount.contains(classloader)) try { Class<?> EmbDS = AdapterUtil.forNameWithPriv("org.apache.derby.jdbc.EmbeddedDataSource40", true, classloader); DataSource ds = (DataSource) EmbDS.newInstance(); EmbDS.getMethod("setShutdownDatabase", String.class).invoke(ds, "shutdown"); ds.getConnection().close(); if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "shutdownDerbyEmbedded"); } catch (SQLException x) { // expected for shutdown if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "shutdownDerbyEmbedded", x.getSQLState() + ' ' + x.getErrorCode() + ':' + x.getMessage()); } catch (Throwable x) { // Work around Derby issue when the JVM is shutting down while Derby shutdown is requested. if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "shutdownDerbyEmbedded", x); } else if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, "shutdownDerbyEmbedded", false); }
void function() { final boolean trace = TraceComponent.isAnyTracingEnabled(); if (trace && tc.isEntryEnabled()) Tr.entry(this, tc, STR, classloader, embDerbyRefCount); if (embDerbyRefCount.remove(classloader) && !embDerbyRefCount.contains(classloader)) try { Class<?> EmbDS = AdapterUtil.forNameWithPriv(STR, true, classloader); DataSource ds = (DataSource) EmbDS.newInstance(); EmbDS.getMethod(STR, String.class).invoke(ds, STR); ds.getConnection().close(); if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, STR); } catch (SQLException x) { if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, STR, x.getSQLState() + ' ' + x.getErrorCode() + ':' + x.getMessage()); } catch (Throwable x) { if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, STR, x); } else if (trace && tc.isEntryEnabled()) Tr.exit(this, tc, STR, false); }
/** * Shut down the Derby system if the reference count for the class loader drops to 0. */
Shut down the Derby system if the reference count for the class loader drops to 0
shutdownDerbyEmbedded
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.jdbc/src/com/ibm/ws/jdbc/internal/JDBCDriverService.java", "license": "epl-1.0", "size": 53710 }
[ "com.ibm.websphere.ras.Tr", "com.ibm.websphere.ras.TraceComponent", "com.ibm.ws.rsadapter.AdapterUtil", "java.sql.SQLException", "javax.sql.DataSource" ]
import com.ibm.websphere.ras.Tr; import com.ibm.websphere.ras.TraceComponent; import com.ibm.ws.rsadapter.AdapterUtil; import java.sql.SQLException; import javax.sql.DataSource;
import com.ibm.websphere.ras.*; import com.ibm.ws.rsadapter.*; import java.sql.*; import javax.sql.*;
[ "com.ibm.websphere", "com.ibm.ws", "java.sql", "javax.sql" ]
com.ibm.websphere; com.ibm.ws; java.sql; javax.sql;
2,564,540