method
stringlengths
13
441k
clean_method
stringlengths
7
313k
doc
stringlengths
17
17.3k
comment
stringlengths
3
1.42k
method_name
stringlengths
1
273
extra
dict
imports
sequence
imports_info
stringlengths
19
34.8k
cluster_imports_info
stringlengths
15
3.66k
libraries
sequence
libraries_info
stringlengths
6
661
id
int64
0
2.92M
private static void assertReadingCompressedFileMatchesExpected( File file, Compression compression, List<String> expected, Pipeline p) { TextIO.Read read = TextIO.read().from(file.getPath()).withCompression(compression); PAssert.that(p.apply("Read_" + file + "_" + compression.toString(), read)) .containsInAnyOrder(expected); PAssert.that( p.apply( "Read_" + file + "_" + compression.toString() + "_many", read.withHintMatchesManyFiles())) .containsInAnyOrder(expected); PAssert.that( p.apply("Create_Paths_ReadFiles_" + file, Create.of(file.getPath())) .apply("Match_" + file, FileIO.matchAll()) .apply("ReadMatches_" + file, FileIO.readMatches().withCompression(compression)) .apply("ReadFiles_" + compression.toString(), TextIO.readFiles())) .containsInAnyOrder(expected); PAssert.that( p.apply("Create_Paths_ReadAll_" + file, Create.of(file.getPath())) .apply( "ReadAll_" + compression.toString(), TextIO.readAll().withCompression(compression))) .containsInAnyOrder(expected); }
static void function( File file, Compression compression, List<String> expected, Pipeline p) { TextIO.Read read = TextIO.read().from(file.getPath()).withCompression(compression); PAssert.that(p.apply("Read_" + file + "_" + compression.toString(), read)) .containsInAnyOrder(expected); PAssert.that( p.apply( "Read_" + file + "_" + compression.toString() + "_many", read.withHintMatchesManyFiles())) .containsInAnyOrder(expected); PAssert.that( p.apply(STR + file, Create.of(file.getPath())) .apply(STR + file, FileIO.matchAll()) .apply(STR + file, FileIO.readMatches().withCompression(compression)) .apply(STR + compression.toString(), TextIO.readFiles())) .containsInAnyOrder(expected); PAssert.that( p.apply(STR + file, Create.of(file.getPath())) .apply( STR + compression.toString(), TextIO.readAll().withCompression(compression))) .containsInAnyOrder(expected); }
/** * Helper method that runs a variety of ways to read a single file using TextIO and checks that * they all match the given expected output. * * <p>The transforms being verified are: * * <ul> * <li>TextIO.read().from(filename).withCompression(compressionType) * <li>TextIO.read().from(filename).withCompression(compressionType) .withHintMatchesManyFiles() * <li>TextIO.readFiles().withCompression(compressionType) * <li>TextIO.readAll().withCompression(compressionType) * </ul> */
Helper method that runs a variety of ways to read a single file using TextIO and checks that they all match the given expected output. The transforms being verified are: TextIO.read().from(filename).withCompression(compressionType) TextIO.read().from(filename).withCompression(compressionType) .withHintMatchesManyFiles() TextIO.readFiles().withCompression(compressionType) TextIO.readAll().withCompression(compressionType)
assertReadingCompressedFileMatchesExpected
{ "repo_name": "RyanSkraba/beam", "path": "sdks/java/core/src/test/java/org/apache/beam/sdk/io/TextIOReadTest.java", "license": "apache-2.0", "size": 34887 }
[ "java.io.File", "java.util.List", "org.apache.beam.sdk.Pipeline", "org.apache.beam.sdk.testing.PAssert", "org.apache.beam.sdk.transforms.Create", "org.hamcrest.Matchers" ]
import java.io.File; import java.util.List; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.transforms.Create; import org.hamcrest.Matchers;
import java.io.*; import java.util.*; import org.apache.beam.sdk.*; import org.apache.beam.sdk.testing.*; import org.apache.beam.sdk.transforms.*; import org.hamcrest.*;
[ "java.io", "java.util", "org.apache.beam", "org.hamcrest" ]
java.io; java.util; org.apache.beam; org.hamcrest;
182,695
return MenuType.CONTEXTUAL == menu.getType(); }
return MenuType.CONTEXTUAL == menu.getType(); }
/** * Check if the menu descriptor is for contextual menu * * @param menu * @return boolean */
Check if the menu descriptor is for contextual menu
isContextualMenu
{ "repo_name": "debabratahazra/DS", "path": "designstudio/components/page/ui/com.odcgroup.page.ui/src/main/java/com/odcgroup/page/ui/PageUIContextMenuProvider.java", "license": "epl-1.0", "size": 8437 }
[ "com.odcgroup.page.uimodel.MenuType" ]
import com.odcgroup.page.uimodel.MenuType;
import com.odcgroup.page.uimodel.*;
[ "com.odcgroup.page" ]
com.odcgroup.page;
1,048,250
public List<String> categoryTags() { return this.categoryTags; }
List<String> function() { return this.categoryTags; }
/** * Get the list of category tags that this recommendation belongs to. * * @return the categoryTags value */
Get the list of category tags that this recommendation belongs to
categoryTags
{ "repo_name": "hovsepm/azure-sdk-for-java", "path": "appservice/resource-manager/v2018_02_01/src/main/java/com/microsoft/azure/management/appservice/v2018_02_01/implementation/RecommendationInner.java", "license": "mit", "size": 17998 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,363,104
public Application getApplication() { return application; }
Application function() { return application; }
/** * Get application * @return application **/
Get application
getApplication
{ "repo_name": "plaid/plaid-java", "path": "src/main/java/com/plaid/client/model/ApplicationGetResponse.java", "license": "mit", "size": 3648 }
[ "com.plaid.client.model.Application" ]
import com.plaid.client.model.Application;
import com.plaid.client.model.*;
[ "com.plaid.client" ]
com.plaid.client;
2,759,400
public double getLengthOffeneAbschn(final String owner, final Integer wdm) { double length = 0; for (final SbObj tmp : objList) { if (tmp.getGuName().equals(owner) && (tmp.getWidmung() == wdm)) { if (tmp.getArt().equals("p")) { length += tmp.getLength(); } } } return length; }
double function(final String owner, final Integer wdm) { double length = 0; for (final SbObj tmp : objList) { if (tmp.getGuName().equals(owner) && (tmp.getWidmung() == wdm)) { if (tmp.getArt().equals("p")) { length += tmp.getLength(); } } } return length; }
/** * DOCUMENT ME! * * @param owner DOCUMENT ME! * @param wdm gemNr DOCUMENT ME! * * @return DOCUMENT ME! */
DOCUMENT ME
getLengthOffeneAbschn
{ "repo_name": "cismet/watergis-client", "path": "src/main/java/de/cismet/watergis/reports/SbHelper.java", "license": "lgpl-3.0", "size": 18698 }
[ "de.cismet.watergis.reports.types.SbObj" ]
import de.cismet.watergis.reports.types.SbObj;
import de.cismet.watergis.reports.types.*;
[ "de.cismet.watergis" ]
de.cismet.watergis;
384,071
private FSDataOutputStream create(Path f, FsPermission permission, boolean overwrite, boolean createParent, int bufferSize, short replication, long blockSize, Progressable progress, SelfRenewingLease parentFolderLease) throws FileAlreadyExistsException, IOException { LOG.debug("Creating file: {}", f.toString()); if (containsColon(f)) { throw new IOException("Cannot create file " + f + " through WASB that has colons in the name"); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); FileMetadata existingMetadata = store.retrieveMetadata(key); if (existingMetadata != null) { if (existingMetadata.isDir()) { throw new FileAlreadyExistsException("Cannot create file " + f + "; already exists as a directory."); } if (!overwrite) { throw new FileAlreadyExistsException("File already exists:" + f); } } Path parentFolder = absolutePath.getParent(); if (parentFolder != null && parentFolder.getParent() != null) { // skip root // Update the parent folder last modified time if the parent folder // already exists. String parentKey = pathToKey(parentFolder); FileMetadata parentMetadata = store.retrieveMetadata(parentKey); if (parentMetadata != null && parentMetadata.isDir() && parentMetadata.getBlobMaterialization() == BlobMaterialization.Explicit) { if (parentFolderLease != null) { store.updateFolderLastModifiedTime(parentKey, parentFolderLease); } else { updateParentFolderLastModifiedTime(key); } } else { // Make sure that the parent folder exists. // Create it using inherited permissions from the first existing directory going up the path Path firstExisting = parentFolder.getParent(); FileMetadata metadata = store.retrieveMetadata(pathToKey(firstExisting)); while(metadata == null) { // Guaranteed to terminate properly because we will eventually hit root, which will return non-null metadata firstExisting = firstExisting.getParent(); metadata = store.retrieveMetadata(pathToKey(firstExisting)); } mkdirs(parentFolder, metadata.getPermissionStatus().getPermission(), true); } } // Mask the permission first (with the default permission mask as well). FsPermission masked = applyUMask(permission, UMaskApplyMode.NewFile); PermissionStatus permissionStatus = createPermissionStatus(masked); OutputStream bufOutStream; if (store.isPageBlobKey(key)) { // Store page blobs directly in-place without renames. bufOutStream = store.storefile(key, permissionStatus); } else { // This is a block blob, so open the output blob stream based on the // encoded key. // String keyEncoded = encodeKey(key); // First create a blob at the real key, pointing back to the temporary file // This accomplishes a few things: // 1. Makes sure we can create a file there. // 2. Makes it visible to other concurrent threads/processes/nodes what // we're // doing. // 3. Makes it easier to restore/cleanup data in the event of us crashing. store.storeEmptyLinkFile(key, keyEncoded, permissionStatus); // The key is encoded to point to a common container at the storage server. // This reduces the number of splits on the server side when load balancing. // Ingress to Azure storage can take advantage of earlier splits. We remove // the root path to the key and prefix a random GUID to the tail (or leaf // filename) of the key. Keys are thus broadly and randomly distributed over // a single container to ease load balancing on the storage server. When the // blob is committed it is renamed to its earlier key. Uncommitted blocks // are not cleaned up and we leave it to Azure storage to garbage collect // these // blocks. bufOutStream = new NativeAzureFsOutputStream(store.storefile( keyEncoded, permissionStatus), key, keyEncoded); } // Construct the data output stream from the buffered output stream. FSDataOutputStream fsOut = new FSDataOutputStream(bufOutStream, statistics); // Increment the counter instrumentation.fileCreated(); // Return data output stream to caller. return fsOut; }
FSDataOutputStream function(Path f, FsPermission permission, boolean overwrite, boolean createParent, int bufferSize, short replication, long blockSize, Progressable progress, SelfRenewingLease parentFolderLease) throws FileAlreadyExistsException, IOException { LOG.debug(STR, f.toString()); if (containsColon(f)) { throw new IOException(STR + f + STR); } Path absolutePath = makeAbsolute(f); String key = pathToKey(absolutePath); FileMetadata existingMetadata = store.retrieveMetadata(key); if (existingMetadata != null) { if (existingMetadata.isDir()) { throw new FileAlreadyExistsException(STR + f + STR); } if (!overwrite) { throw new FileAlreadyExistsException(STR + f); } } Path parentFolder = absolutePath.getParent(); if (parentFolder != null && parentFolder.getParent() != null) { String parentKey = pathToKey(parentFolder); FileMetadata parentMetadata = store.retrieveMetadata(parentKey); if (parentMetadata != null && parentMetadata.isDir() && parentMetadata.getBlobMaterialization() == BlobMaterialization.Explicit) { if (parentFolderLease != null) { store.updateFolderLastModifiedTime(parentKey, parentFolderLease); } else { updateParentFolderLastModifiedTime(key); } } else { Path firstExisting = parentFolder.getParent(); FileMetadata metadata = store.retrieveMetadata(pathToKey(firstExisting)); while(metadata == null) { firstExisting = firstExisting.getParent(); metadata = store.retrieveMetadata(pathToKey(firstExisting)); } mkdirs(parentFolder, metadata.getPermissionStatus().getPermission(), true); } } FsPermission masked = applyUMask(permission, UMaskApplyMode.NewFile); PermissionStatus permissionStatus = createPermissionStatus(masked); OutputStream bufOutStream; if (store.isPageBlobKey(key)) { bufOutStream = store.storefile(key, permissionStatus); } else { store.storeEmptyLinkFile(key, keyEncoded, permissionStatus); bufOutStream = new NativeAzureFsOutputStream(store.storefile( keyEncoded, permissionStatus), key, keyEncoded); } FSDataOutputStream fsOut = new FSDataOutputStream(bufOutStream, statistics); instrumentation.fileCreated(); return fsOut; }
/** * Create an Azure blob and return an output stream to use * to write data to it. * * @param f * @param permission * @param overwrite * @param createParent * @param bufferSize * @param replication * @param blockSize * @param progress * @param parentFolderLease Lease on parent folder (or null if * no lease). * @return * @throws IOException */
Create an Azure blob and return an output stream to use to write data to it
create
{ "repo_name": "jaypatil/hadoop", "path": "hadoop-tools/hadoop-azure/src/main/java/org/apache/hadoop/fs/azure/NativeAzureFileSystem.java", "license": "gpl-3.0", "size": 104024 }
[ "java.io.IOException", "java.io.OutputStream", "org.apache.hadoop.fs.FSDataOutputStream", "org.apache.hadoop.fs.FileAlreadyExistsException", "org.apache.hadoop.fs.Path", "org.apache.hadoop.fs.permission.FsPermission", "org.apache.hadoop.fs.permission.PermissionStatus", "org.apache.hadoop.util.Progressable" ]
import java.io.IOException; import java.io.OutputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.util.Progressable;
import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.fs.permission.*; import org.apache.hadoop.util.*;
[ "java.io", "org.apache.hadoop" ]
java.io; org.apache.hadoop;
1,151,564
public int count__db(HashMap<String, Object> whereKeyValue, String orderBy, int limit, int skip){ String whereQuery = getWhereQuery(whereKeyValue); String countQuery; if(orderBy != null){ countQuery = "SELECT * FROM `City` " + whereQuery + " ORDER BY " + orderBy ; if(limit != 0){ // Select All Query countQuery = countQuery + " " + " LIMIT " + limit + " OFFSET " + skip; }else{ countQuery = countQuery + " LIMIT -1 OFFSET " + skip; } }else{ if(limit != 0){ // Select All Query countQuery = "SELECT * FROM `City` " + whereQuery + " LIMIT " + limit + " OFFSET " + skip; }else{ countQuery = "SELECT * FROM `City` " + whereQuery + " LIMIT -1 OFFSET " + skip; } } SQLiteDatabase db = DbHandler.getInstance(context, DATABASE_NAME).getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); return count; }
int function(HashMap<String, Object> whereKeyValue, String orderBy, int limit, int skip){ String whereQuery = getWhereQuery(whereKeyValue); String countQuery; if(orderBy != null){ countQuery = STR + whereQuery + STR + orderBy ; if(limit != 0){ countQuery = countQuery + " " + STR + limit + STR + skip; }else{ countQuery = countQuery + STR + skip; } }else{ if(limit != 0){ countQuery = STR + whereQuery + STR + limit + STR + skip; }else{ countQuery = STR + whereQuery + STR + skip; } } SQLiteDatabase db = DbHandler.getInstance(context, DATABASE_NAME).getReadableDatabase(); Cursor cursor = db.rawQuery(countQuery, null); int count = cursor.getCount(); cursor.close(); return count; }
/** * Check count of database. * @param whereKeyValue * @param orderBy * @param limit * @return */
Check count of database
count__db
{ "repo_name": "snaphy/generator-snaphy", "path": "generators/androidSdk/templates/snaphyandroidsdk/src/main/java/com/androidsdk/snaphy/snaphyandroidsdk/db/CityDb.java", "license": "mit", "size": 27048 }
[ "android.database.Cursor", "android.database.sqlite.SQLiteDatabase", "java.util.HashMap" ]
import android.database.Cursor; import android.database.sqlite.SQLiteDatabase; import java.util.HashMap;
import android.database.*; import android.database.sqlite.*; import java.util.*;
[ "android.database", "java.util" ]
android.database; java.util;
197,093
public DayPrayers getPrayerTimes(final GregorianCalendar date) { return getPrayerTimes(new SimpleDate(date)); }
DayPrayers function(final GregorianCalendar date) { return getPrayerTimes(new SimpleDate(date)); }
/** * generates prayer times * @param date the date of prayers * @return a DayPrayers object containing time of different * prayers */
generates prayer times
getPrayerTimes
{ "repo_name": "xperimental/jitl", "path": "src/net/sourceforge/jitl/Jitl.java", "license": "lgpl-2.1", "size": 23909 }
[ "java.util.GregorianCalendar", "net.sourceforge.jitl.astro.SimpleDate" ]
import java.util.GregorianCalendar; import net.sourceforge.jitl.astro.SimpleDate;
import java.util.*; import net.sourceforge.jitl.astro.*;
[ "java.util", "net.sourceforge.jitl" ]
java.util; net.sourceforge.jitl;
856,893
@NotEmpty(message = "org.form.offer.field.descr.empty") @NotNull(message = "org.form.offer.field.descr.empty") @NotBlank(message = "org.form.offer.field.descr.empty") public String getDescr() { return this.m_strDescr; }
@NotEmpty(message = STR) @NotNull(message = STR) @NotBlank(message = STR) String function() { return this.m_strDescr; }
/** * Gets the descr. * * @return the descr */
Gets the descr
getDescr
{ "repo_name": "fraunhoferfokus/particity", "path": "lib_data/src/main/java/de/fraunhofer/fokus/oefit/adhoc/forms/OfferForm.java", "license": "bsd-3-clause", "size": 17963 }
[ "javax.validation.constraints.NotNull", "org.hibernate.validator.constraints.NotBlank", "org.hibernate.validator.constraints.NotEmpty" ]
import javax.validation.constraints.NotNull; import org.hibernate.validator.constraints.NotBlank; import org.hibernate.validator.constraints.NotEmpty;
import javax.validation.constraints.*; import org.hibernate.validator.constraints.*;
[ "javax.validation", "org.hibernate.validator" ]
javax.validation; org.hibernate.validator;
1,031,086
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<DenyAssignmentInner> listByResourceGroup(String resourceGroupName, String filter, Context context);
@ServiceMethod(returns = ReturnType.COLLECTION) PagedIterable<DenyAssignmentInner> listByResourceGroup(String resourceGroupName, String filter, Context context);
/** * Gets deny assignments for a resource group. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param filter The filter to apply on the operation. Use $filter=atScope() to return all deny assignments at or * above the scope. Use $filter=denyAssignmentName eq '{name}' to search deny assignments by name at specified * scope. Use $filter=principalId eq '{id}' to return all deny assignments at, above and below the scope for the * specified principal. Use $filter=gdprExportPrincipalId eq '{id}' to return all deny assignments at, above and * below the scope for the specified principal. This filter is different from the principalId filter as it * returns not only those deny assignments that contain the specified principal is the Principals list but also * those deny assignments that contain the specified principal is the ExcludePrincipals list. Additionally, when * gdprExportPrincipalId filter is used, only the deny assignment name and description properties are returned. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return deny assignments for a resource group. */
Gets deny assignments for a resource group
listByResourceGroup
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-authorization/src/main/java/com/azure/resourcemanager/authorization/fluent/DenyAssignmentsClient.java", "license": "mit", "size": 26696 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.http.rest.PagedIterable", "com.azure.core.util.Context", "com.azure.resourcemanager.authorization.fluent.models.DenyAssignmentInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.http.rest.PagedIterable; import com.azure.core.util.Context; import com.azure.resourcemanager.authorization.fluent.models.DenyAssignmentInner;
import com.azure.core.annotation.*; import com.azure.core.http.rest.*; import com.azure.core.util.*; import com.azure.resourcemanager.authorization.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
477,744
@SuppressWarnings("deprecation") private void showNotification () { int id = getOptions().getId(); if (Build.VERSION.SDK_INT <= 15) { // Notification for HoneyComb to ICS getNotMgr().notify(id, builder.getNotification()); } else { // Notification for Jellybean and above getNotMgr().notify(id, builder.build()); } }
@SuppressWarnings(STR) void function () { int id = getOptions().getId(); if (Build.VERSION.SDK_INT <= 15) { getNotMgr().notify(id, builder.getNotification()); } else { getNotMgr().notify(id, builder.build()); } }
/** * Show as local notification when in background. */
Show as local notification when in background
showNotification
{ "repo_name": "elizabethrego/cordova-plugin-local-notifications-actions", "path": "src/android/notification/Notification.java", "license": "apache-2.0", "size": 9281 }
[ "android.os.Build" ]
import android.os.Build;
import android.os.*;
[ "android.os" ]
android.os;
1,071,591
@Test public void testCreateNewCapacity() { // Start from the default int capacity = 11; for (;;) { int newCapacity = MemoryUtils.createNewCapacity(capacity + 1, capacity); Assertions.assertTrue(newCapacity >= capacity + 1); capacity = newCapacity; if (capacity == Integer.MAX_VALUE) { break; } } // Stop increasing in jumps at the safe max capacity final int safeMaxCapacity = Integer.MAX_VALUE - 8; Assertions.assertEquals(safeMaxCapacity, MemoryUtils.createNewCapacity(safeMaxCapacity - 5, safeMaxCapacity - 10)); // Approach max value in single step increments for (int i = 1; i <= 8; i++) { Assertions.assertEquals(safeMaxCapacity + i, MemoryUtils.createNewCapacity(safeMaxCapacity + i, safeMaxCapacity)); Assertions.assertEquals(safeMaxCapacity + i, MemoryUtils.createNewCapacity(safeMaxCapacity + i, safeMaxCapacity + i - 1)); } Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, 10)); Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, safeMaxCapacity)); Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, Integer.MAX_VALUE)); }
void function() { int capacity = 11; for (;;) { int newCapacity = MemoryUtils.createNewCapacity(capacity + 1, capacity); Assertions.assertTrue(newCapacity >= capacity + 1); capacity = newCapacity; if (capacity == Integer.MAX_VALUE) { break; } } final int safeMaxCapacity = Integer.MAX_VALUE - 8; Assertions.assertEquals(safeMaxCapacity, MemoryUtils.createNewCapacity(safeMaxCapacity - 5, safeMaxCapacity - 10)); for (int i = 1; i <= 8; i++) { Assertions.assertEquals(safeMaxCapacity + i, MemoryUtils.createNewCapacity(safeMaxCapacity + i, safeMaxCapacity)); Assertions.assertEquals(safeMaxCapacity + i, MemoryUtils.createNewCapacity(safeMaxCapacity + i, safeMaxCapacity + i - 1)); } Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, 10)); Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, safeMaxCapacity)); Assertions.assertThrows(OutOfMemoryError.class, () -> MemoryUtils.createNewCapacity(1 + Integer.MAX_VALUE, Integer.MAX_VALUE)); }
/** * Test the method to create a new capacity. */
Test the method to create a new capacity
testCreateNewCapacity
{ "repo_name": "aherbert/GDSC-Core", "path": "src/test/java/uk/ac/sussex/gdsc/core/utils/MemoryUtilsTest.java", "license": "gpl-3.0", "size": 3659 }
[ "org.junit.jupiter.api.Assertions" ]
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.*;
[ "org.junit.jupiter" ]
org.junit.jupiter;
1,500,274
public void start(String[] args) throws IOException { try { // read build information and print version. props = ResourceBundle.getBundle("build"); logger.info("Starting proxy version " + props.getString("build.version")); logger.info("Arguments: " + Joiner.on(", ").join(args)); new JCommander(this, args); if (unparsed_params != null) { logger.info("Unparsed arguments: " + Joiner.on(", ").join(unparsed_params)); } // 1. Load the listener configurations. loadListenerConfigurationFile(); loadLogsIngestionConfig(); // Conditionally enter an interactive debugging session for logsIngestionConfig.yaml if (testLogs) { InteractiveLogsTester interactiveLogsTester = new InteractiveLogsTester(this::loadLogsIngestionConfig, prefix); logger.info("Reading line-by-line sample log messages from STDIN"); while (interactiveLogsTester.interactiveTest()) { // empty } System.exit(0); } // 2. Read or create the unique Id for the daemon running on this machine. readOrCreateDaemonId(); if (proxyHost != null) { System.setProperty("http.proxyHost", proxyHost); System.setProperty("https.proxyHost", proxyHost); System.setProperty("http.proxyPort", String.valueOf(proxyPort)); System.setProperty("https.proxyPort", String.valueOf(proxyPort)); }
void function(String[] args) throws IOException { try { props = ResourceBundle.getBundle("build"); logger.info(STR + props.getString(STR)); logger.info(STR + Joiner.on(STR).join(args)); new JCommander(this, args); if (unparsed_params != null) { logger.info(STR + Joiner.on(STR).join(unparsed_params)); } loadListenerConfigurationFile(); loadLogsIngestionConfig(); if (testLogs) { InteractiveLogsTester interactiveLogsTester = new InteractiveLogsTester(this::loadLogsIngestionConfig, prefix); logger.info(STR); while (interactiveLogsTester.interactiveTest()) { } System.exit(0); } readOrCreateDaemonId(); if (proxyHost != null) { System.setProperty(STR, proxyHost); System.setProperty(STR, proxyHost); System.setProperty(STR, String.valueOf(proxyPort)); System.setProperty(STR, String.valueOf(proxyPort)); }
/** * Entry-point for the application. * * @param args Command-line parameters passed on to JCommander to configure the daemon. */
Entry-point for the application
start
{ "repo_name": "NGSEKHAR/javaproject1", "path": "proxy/src/main/java/com/wavefront/agent/AbstractAgent.java", "license": "apache-2.0", "size": 56043 }
[ "com.beust.jcommander.JCommander", "com.google.common.base.Joiner", "com.wavefront.agent.logsharvesting.InteractiveLogsTester", "java.io.IOException", "java.util.ResourceBundle" ]
import com.beust.jcommander.JCommander; import com.google.common.base.Joiner; import com.wavefront.agent.logsharvesting.InteractiveLogsTester; import java.io.IOException; import java.util.ResourceBundle;
import com.beust.jcommander.*; import com.google.common.base.*; import com.wavefront.agent.logsharvesting.*; import java.io.*; import java.util.*;
[ "com.beust.jcommander", "com.google.common", "com.wavefront.agent", "java.io", "java.util" ]
com.beust.jcommander; com.google.common; com.wavefront.agent; java.io; java.util;
1,641,299
void readUser() { String userId; ReaderUtil.clearScreen(); try { System.out.println( "Enter UserId to read:" ); userId = ReaderUtil.readLn(); User user = new User(); user.setUserId( userId ); User ue = rm.readUser( user ); if ( ue != null ) { System.out.println( "USER OBJECT:" ); System.out.println( " userId [" + ue.getUserId() + "]" ); System.out.println( " internalId [" + ue.getInternalId() + "]" ); System.out.println( " description [" + ue.getDescription() + "]" ); System.out.println( " common name [" + ue.getCn() + "]" ); System.out.println( " surname [" + ue.getSn() + "]" ); System.out.println( " orgUnitId [" + ue.getOu() + "]" ); System.out.println( " pwpolicy [" + ue.getPwPolicy() + "]" ); printPosixAccount( ue, "POSIX" ); printTemporal( ue, "USER" ); if ( ue.getRoles() != null ) { for ( UserRole ur : ue.getRoles() ) { printTemporal( ur, "RBACROLE" ); } } if ( ue.getAdminRoles() != null ) { for ( UserAdminRole ur : ue.getAdminRoles() ) { printAdminRole( ur ); printTemporal( ur, "ADMINROLE" ); } } if ( ue.getProperties() != null && ue.getProperties().size() > 0 ) { int ctr = 0; for ( Enumeration e = ue.getProperties().propertyNames(); e.hasMoreElements(); ) { String key = ( String ) e.nextElement(); String val = ue.getProperty( key ); System.out.println( "prop key[" + ctr + "]=" + key ); System.out.println( "prop value[" + ctr++ + "]=" + val ); } } } else { System.out.println( " user [" + userId + "] was not found" ); } System.out.println( "ENTER to continue" ); } catch ( SecurityException e ) { LOG.error( "readUser caught SecurityException rc=" + e.getErrorId() + ", msg=" + e.getMessage(), e ); } ReaderUtil.readChar(); }
void readUser() { String userId; ReaderUtil.clearScreen(); try { System.out.println( STR ); userId = ReaderUtil.readLn(); User user = new User(); user.setUserId( userId ); User ue = rm.readUser( user ); if ( ue != null ) { System.out.println( STR ); System.out.println( STR + ue.getUserId() + "]" ); System.out.println( STR + ue.getInternalId() + "]" ); System.out.println( STR + ue.getDescription() + "]" ); System.out.println( STR + ue.getCn() + "]" ); System.out.println( STR + ue.getSn() + "]" ); System.out.println( STR + ue.getOu() + "]" ); System.out.println( STR + ue.getPwPolicy() + "]" ); printPosixAccount( ue, "POSIX" ); printTemporal( ue, "USER" ); if ( ue.getRoles() != null ) { for ( UserRole ur : ue.getRoles() ) { printTemporal( ur, STR ); } } if ( ue.getAdminRoles() != null ) { for ( UserAdminRole ur : ue.getAdminRoles() ) { printAdminRole( ur ); printTemporal( ur, STR ); } } if ( ue.getProperties() != null && ue.getProperties().size() > 0 ) { int ctr = 0; for ( Enumeration e = ue.getProperties().propertyNames(); e.hasMoreElements(); ) { String key = ( String ) e.nextElement(); String val = ue.getProperty( key ); System.out.println( STR + ctr + "]=" + key ); System.out.println( STR + ctr++ + "]=" + val ); } } } else { System.out.println( STR + userId + STR ); } System.out.println( STR ); } catch ( SecurityException e ) { LOG.error( STR + e.getErrorId() + STR + e.getMessage(), e ); } ReaderUtil.readChar(); }
/** * Description of the Method */
Description of the Method
readUser
{ "repo_name": "PennState/directory-fortress-core-1", "path": "src/test/java/org/apache/directory/fortress/core/ReviewMgrConsole.java", "license": "apache-2.0", "size": 42212 }
[ "java.util.Enumeration", "org.apache.directory.fortress.core.model.User", "org.apache.directory.fortress.core.model.UserAdminRole", "org.apache.directory.fortress.core.model.UserRole" ]
import java.util.Enumeration; import org.apache.directory.fortress.core.model.User; import org.apache.directory.fortress.core.model.UserAdminRole; import org.apache.directory.fortress.core.model.UserRole;
import java.util.*; import org.apache.directory.fortress.core.model.*;
[ "java.util", "org.apache.directory" ]
java.util; org.apache.directory;
509,376
private static String findBytestreamAliasUrl(Individual byteStream, String surrogateUri) { if (byteStream == null) { return null; } String aliasUrl = byteStream.getDataValue(VitroVocabulary.FS_ALIAS_URL); if (aliasUrl == null) { log.error("File had no aliasUrl: '" + surrogateUri + "'"); } else { log.debug("aliasUrl for '" + surrogateUri + "' was '" + aliasUrl + "'"); } return aliasUrl; } // ---------------------------------------------------------------------- // The instance variables and methods. // ---------------------------------------------------------------------- private final String uri; private final String filename; private final String mimeType; private final String bytestreamUri; private final String bytestreamAliasUrl; private FileInfo(Builder builder) { this.uri = builder.uri; this.filename = builder.filename; this.mimeType = builder.mimeType; this.bytestreamUri = builder.bytestreamUri; this.bytestreamAliasUrl = builder.bytestreamAliasUrl; }
static String function(Individual byteStream, String surrogateUri) { if (byteStream == null) { return null; } String aliasUrl = byteStream.getDataValue(VitroVocabulary.FS_ALIAS_URL); if (aliasUrl == null) { log.error(STR + surrogateUri + "'"); } else { log.debug(STR + surrogateUri + STR + aliasUrl + "'"); } return aliasUrl; } private final String uri; private final String filename; private final String mimeType; private final String bytestreamUri; private final String bytestreamAliasUrl; private FileInfo(Builder builder) { this.uri = builder.uri; this.filename = builder.filename; this.mimeType = builder.mimeType; this.bytestreamUri = builder.bytestreamUri; this.bytestreamAliasUrl = builder.bytestreamAliasUrl; }
/** * Get the alias URL from the bytestream, or null if there is none. */
Get the alias URL from the bytestream, or null if there is none
findBytestreamAliasUrl
{ "repo_name": "vivo-project/Vitro", "path": "api/src/main/java/edu/cornell/mannlib/vitro/webapp/filestorage/model/FileInfo.java", "license": "bsd-3-clause", "size": 8624 }
[ "edu.cornell.mannlib.vitro.webapp.beans.Individual", "edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary" ]
import edu.cornell.mannlib.vitro.webapp.beans.Individual; import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.beans.*; import edu.cornell.mannlib.vitro.webapp.dao.*;
[ "edu.cornell.mannlib" ]
edu.cornell.mannlib;
2,823,134
public static void main(String args[]) throws IOException { Project project = null; SwingCommandLine commandLine = new SwingCommandLine(); try { commandLine.parse(args); } catch (IllegalArgumentException e) { System.err.println("Error: " + e.getMessage()); showSynopsis(); ShowHelp.showGeneralOptions(); showCommandLineOptions(); System.exit(1); } catch (HelpRequestedException e) { showSynopsis(); ShowHelp.showGeneralOptions(); showCommandLineOptions(); System.exit(1); } if (commandLine.getProject().getFileCount() > 0) { project = commandLine.getProject(); } // Uncomment one of these to test I18N // Locale.setDefault( Locale.FRENCH ); // Locale.setDefault( Locale.GERMAN ); // Locale.setDefault( Locale.JAPANESE ); // Locale.setDefault( new Locale( "et" )); // Locale.setDefault( new Locale( "fi" )); // Locale.setDefault( new Locale( "es" )); // Locale.setDefault( new Locale( "pl" )); // Load plugins! DetectorFactoryCollection.instance(); FindBugsFrame frame = new FindBugsFrame(); if (project != null) { frame.setProject(project); } else if (commandLine.getBugsFilename().length() > 0) { try { File bugsFile = new File(commandLine.getBugsFilename()); frame.loadBugsFromFile(bugsFile); } catch (Exception e) { System.err.println("Error: " + e.getMessage()); } } else if (SystemProperties.getProperty("findbugs.loadBugsFromURL") != null) { // Allow JNLP launch to specify the URL of a report to load try { String urlspec = SystemProperties.getProperty("findbugs.loadBugsFromURL"); frame.loadBugsFromURL(urlspec); } catch (Exception e) { System.err.println("Error: " + e.getMessage()); } } frame.setPriority(commandLine.getPriority()); if (commandLine.getSettingList() != null) { frame.settingList = commandLine.getSettingList(); if (Arrays.equals(frame.settingList,FindBugs.MIN_EFFORT)) frame.minEffortItem.setSelected(true); else if (Arrays.equals(frame.settingList, FindBugs.MAX_EFFORT)) frame.maxEffortItem.setSelected(true); } frame.setSize(800, 600); frame.setLocationRelativeTo(null); // center the frame frame.setVisible(true); } private int analysisPriority = Thread.NORM_PRIORITY-1;
static void function(String args[]) throws IOException { Project project = null; SwingCommandLine commandLine = new SwingCommandLine(); try { commandLine.parse(args); } catch (IllegalArgumentException e) { System.err.println(STR + e.getMessage()); showSynopsis(); ShowHelp.showGeneralOptions(); showCommandLineOptions(); System.exit(1); } catch (HelpRequestedException e) { showSynopsis(); ShowHelp.showGeneralOptions(); showCommandLineOptions(); System.exit(1); } if (commandLine.getProject().getFileCount() > 0) { project = commandLine.getProject(); } DetectorFactoryCollection.instance(); FindBugsFrame frame = new FindBugsFrame(); if (project != null) { frame.setProject(project); } else if (commandLine.getBugsFilename().length() > 0) { try { File bugsFile = new File(commandLine.getBugsFilename()); frame.loadBugsFromFile(bugsFile); } catch (Exception e) { System.err.println(STR + e.getMessage()); } } else if (SystemProperties.getProperty(STR) != null) { try { String urlspec = SystemProperties.getProperty(STR); frame.loadBugsFromURL(urlspec); } catch (Exception e) { System.err.println(STR + e.getMessage()); } } frame.setPriority(commandLine.getPriority()); if (commandLine.getSettingList() != null) { frame.settingList = commandLine.getSettingList(); if (Arrays.equals(frame.settingList,FindBugs.MIN_EFFORT)) frame.minEffortItem.setSelected(true); else if (Arrays.equals(frame.settingList, FindBugs.MAX_EFFORT)) frame.maxEffortItem.setSelected(true); } frame.setSize(800, 600); frame.setLocationRelativeTo(null); frame.setVisible(true); } private int analysisPriority = Thread.NORM_PRIORITY-1;
/** * Invoke from the command line. * * @param args the command line arguments * @throws IOException */
Invoke from the command line
main
{ "repo_name": "optivo-org/fingbugs-1.3.9-optivo", "path": "src/java/edu/umd/cs/findbugs/gui/FindBugsFrame.java", "license": "lgpl-2.1", "size": 146480 }
[ "edu.umd.cs.findbugs.DetectorFactoryCollection", "edu.umd.cs.findbugs.FindBugs", "edu.umd.cs.findbugs.Project", "edu.umd.cs.findbugs.ShowHelp", "edu.umd.cs.findbugs.SystemProperties", "edu.umd.cs.findbugs.config.CommandLine", "java.io.File", "java.io.IOException", "java.util.Arrays" ]
import edu.umd.cs.findbugs.DetectorFactoryCollection; import edu.umd.cs.findbugs.FindBugs; import edu.umd.cs.findbugs.Project; import edu.umd.cs.findbugs.ShowHelp; import edu.umd.cs.findbugs.SystemProperties; import edu.umd.cs.findbugs.config.CommandLine; import java.io.File; import java.io.IOException; import java.util.Arrays;
import edu.umd.cs.findbugs.*; import edu.umd.cs.findbugs.config.*; import java.io.*; import java.util.*;
[ "edu.umd.cs", "java.io", "java.util" ]
edu.umd.cs; java.io; java.util;
1,060,879
public List<DamageCause> getImmunities();
List<DamageCause> function();
/** * Get a list of damage causes that this entity is immune to. * @return list of damage causes */
Get a list of damage causes that this entity is immune to
getImmunities
{ "repo_name": "smaltby/CustomEntityLibrary", "path": "src/main/java/com/github/customentitylibrary/entities/EntityType.java", "license": "mit", "size": 4821 }
[ "java.util.List", "org.bukkit.event.entity.EntityDamageEvent" ]
import java.util.List; import org.bukkit.event.entity.EntityDamageEvent;
import java.util.*; import org.bukkit.event.entity.*;
[ "java.util", "org.bukkit.event" ]
java.util; org.bukkit.event;
205,983
void showMaterializedViews(DataOutputStream out, List<Table> materializedViews) throws HiveException;
void showMaterializedViews(DataOutputStream out, List<Table> materializedViews) throws HiveException;
/** * Show a list of materialized views. */
Show a list of materialized views
showMaterializedViews
{ "repo_name": "vineetgarg02/hive", "path": "ql/src/java/org/apache/hadoop/hive/ql/metadata/formatting/MetaDataFormatter.java", "license": "apache-2.0", "size": 4146 }
[ "java.io.DataOutputStream", "java.util.List", "org.apache.hadoop.hive.ql.metadata.HiveException", "org.apache.hadoop.hive.ql.metadata.Table" ]
import java.io.DataOutputStream; import java.util.List; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.Table;
import java.io.*; import java.util.*; import org.apache.hadoop.hive.ql.metadata.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
2,454,183
public SparseMatrix matricization(int n) { int numRows = dimensions[n]; int numCols = 1; for (int d = 0; d < numDimensions; d++) { if (d != n) numCols *= dimensions[d]; } Table<Integer, Integer, Double> dataTable = HashBasedTable.create(); Multimap<Integer, Integer> colMap = HashMultimap.create(); for (TensorEntry te : this) { int[] keys = te.keys(); int i = keys[n]; int j = 0; for (int k = 0; k < numDimensions; k++) { if (k == n) continue; int ik = keys[k]; int jk = 1; for (int m = 0; m < k; m++) { if (m == n) continue; jk *= dimensions[m]; } j += ik * jk; } dataTable.put(i, j, te.get()); colMap.put(j, i); } return new SparseMatrix(numRows, numCols, dataTable, colMap); }
SparseMatrix function(int n) { int numRows = dimensions[n]; int numCols = 1; for (int d = 0; d < numDimensions; d++) { if (d != n) numCols *= dimensions[d]; } Table<Integer, Integer, Double> dataTable = HashBasedTable.create(); Multimap<Integer, Integer> colMap = HashMultimap.create(); for (TensorEntry te : this) { int[] keys = te.keys(); int i = keys[n]; int j = 0; for (int k = 0; k < numDimensions; k++) { if (k == n) continue; int ik = keys[k]; int jk = 1; for (int m = 0; m < k; m++) { if (m == n) continue; jk *= dimensions[m]; } j += ik * jk; } dataTable.put(i, j, te.get()); colMap.put(j, i); } return new SparseMatrix(numRows, numCols, dataTable, colMap); }
/** * Re-ordering entries of a tensor into a matrix * * @param n * mode or dimension * @return an unfolded or flatten matrix */
Re-ordering entries of a tensor into a matrix
matricization
{ "repo_name": "martinb741/Recommender", "path": "src/main/i5/las2peer/services/recommender/librec/data/SparseTensor.java", "license": "cc0-1.0", "size": 24275 }
[ "com.google.common.collect.HashBasedTable", "com.google.common.collect.HashMultimap", "com.google.common.collect.Multimap", "com.google.common.collect.Table" ]
import com.google.common.collect.HashBasedTable; import com.google.common.collect.HashMultimap; import com.google.common.collect.Multimap; import com.google.common.collect.Table;
import com.google.common.collect.*;
[ "com.google.common" ]
com.google.common;
287,938
@Test void testEquals() { assertTrue(UtilMath.equals(0.0, 0.0, Double.MIN_VALUE)); assertTrue(UtilMath.equals(1.0, 1.0, Double.MIN_VALUE)); assertTrue(UtilMath.equals(1.0, 1.1, 0.2)); assertTrue(UtilMath.equals(1.1, 1.0, 0.2)); assertFalse(UtilMath.equals(0.0, 0.0, 0.0)); assertFalse(UtilMath.equals(1.0, 0.0, 0.0)); assertFalse(UtilMath.equals(1.0, 0.0, 0.05)); }
void testEquals() { assertTrue(UtilMath.equals(0.0, 0.0, Double.MIN_VALUE)); assertTrue(UtilMath.equals(1.0, 1.0, Double.MIN_VALUE)); assertTrue(UtilMath.equals(1.0, 1.1, 0.2)); assertTrue(UtilMath.equals(1.1, 1.0, 0.2)); assertFalse(UtilMath.equals(0.0, 0.0, 0.0)); assertFalse(UtilMath.equals(1.0, 0.0, 0.0)); assertFalse(UtilMath.equals(1.0, 0.0, 0.05)); }
/** * Test the equals function. */
Test the equals function
testEquals
{ "repo_name": "b3dgs/lionengine", "path": "lionengine-core/src/test/java/com/b3dgs/lionengine/UtilMathTest.java", "license": "gpl-3.0", "size": 11691 }
[ "com.b3dgs.lionengine.UtilAssert" ]
import com.b3dgs.lionengine.UtilAssert;
import com.b3dgs.lionengine.*;
[ "com.b3dgs.lionengine" ]
com.b3dgs.lionengine;
831,183
public void testGlobalCheckpointIsSafe() throws Exception { startCluster(rarely() ? 5 : 3); final int numberOfShards = 1 + randomInt(2); assertAcked(prepareCreate("test") .setSettings(Settings.builder() .put(indexSettings()) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) )); ensureGreen(); AtomicBoolean stopGlobalCheckpointFetcher = new AtomicBoolean(); Map<Integer, Long> shardToGcp = new ConcurrentHashMap<>(); for (int i = 0; i < numberOfShards; i++) { shardToGcp.put(i, SequenceNumbers.NO_OPS_PERFORMED); } final Thread globalCheckpointSampler = new Thread(() -> { while (stopGlobalCheckpointFetcher.get() == false) { try { for (ShardStats shardStats : client().admin().indices().prepareStats("test").clear().get().getShards()) { final int shardId = shardStats.getShardRouting().id(); final long globalCheckpoint = shardStats.getSeqNoStats().getGlobalCheckpoint(); shardToGcp.compute(shardId, (i, v) -> Math.max(v, globalCheckpoint)); } } catch (Exception e) { // ignore logger.debug("failed to fetch shard stats", e); } } }); globalCheckpointSampler.start(); try (BackgroundIndexer indexer = new BackgroundIndexer("test", "_doc", client(), -1, RandomizedTest.scaledRandomIntBetween(2, 5), false, random())) { indexer.setRequestTimeout(TimeValue.ZERO); indexer.setIgnoreIndexingFailures(true); indexer.setFailureAssertion(e -> {}); indexer.start(-1); waitForDocs(randomIntBetween(1, 100), indexer); logger.info("injecting failures"); injectTranslogFailures(); logger.info("stopping indexing"); } logger.info("full cluster restart"); internalCluster().fullRestart(new InternalTestCluster.RestartCallback() {
void function() throws Exception { startCluster(rarely() ? 5 : 3); final int numberOfShards = 1 + randomInt(2); assertAcked(prepareCreate("test") .setSettings(Settings.builder() .put(indexSettings()) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, numberOfShards) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, randomInt(2)) )); ensureGreen(); AtomicBoolean stopGlobalCheckpointFetcher = new AtomicBoolean(); Map<Integer, Long> shardToGcp = new ConcurrentHashMap<>(); for (int i = 0; i < numberOfShards; i++) { shardToGcp.put(i, SequenceNumbers.NO_OPS_PERFORMED); } final Thread globalCheckpointSampler = new Thread(() -> { while (stopGlobalCheckpointFetcher.get() == false) { try { for (ShardStats shardStats : client().admin().indices().prepareStats("test").clear().get().getShards()) { final int shardId = shardStats.getShardRouting().id(); final long globalCheckpoint = shardStats.getSeqNoStats().getGlobalCheckpoint(); shardToGcp.compute(shardId, (i, v) -> Math.max(v, globalCheckpoint)); } } catch (Exception e) { logger.debug(STR, e); } } }); globalCheckpointSampler.start(); try (BackgroundIndexer indexer = new BackgroundIndexer("test", "_doc", client(), -1, RandomizedTest.scaledRandomIntBetween(2, 5), false, random())) { indexer.setRequestTimeout(TimeValue.ZERO); indexer.setIgnoreIndexingFailures(true); indexer.setFailureAssertion(e -> {}); indexer.start(-1); waitForDocs(randomIntBetween(1, 100), indexer); logger.info(STR); injectTranslogFailures(); logger.info(STR); } logger.info(STR); internalCluster().fullRestart(new InternalTestCluster.RestartCallback() {
/** * This test checks that all operations below the global checkpoint are properly persisted. * It simulates a full power outage by preventing translog checkpoint files to be written and restart the cluster. This means that * all un-fsynced data will be lost. */
This test checks that all operations below the global checkpoint are properly persisted. It simulates a full power outage by preventing translog checkpoint files to be written and restart the cluster. This means that all un-fsynced data will be lost
testGlobalCheckpointIsSafe
{ "repo_name": "ern/elasticsearch", "path": "server/src/internalClusterTest/java/org/elasticsearch/discovery/DiskDisruptionIT.java", "license": "apache-2.0", "size": 6580 }
[ "com.carrotsearch.randomizedtesting.RandomizedTest", "java.util.Map", "java.util.concurrent.ConcurrentHashMap", "java.util.concurrent.atomic.AtomicBoolean", "org.elasticsearch.action.admin.indices.stats.ShardStats", "org.elasticsearch.cluster.metadata.IndexMetadata", "org.elasticsearch.common.settings.Settings", "org.elasticsearch.core.TimeValue", "org.elasticsearch.index.seqno.SequenceNumbers", "org.elasticsearch.test.BackgroundIndexer", "org.elasticsearch.test.InternalTestCluster", "org.elasticsearch.test.hamcrest.ElasticsearchAssertions" ]
import com.carrotsearch.randomizedtesting.RandomizedTest; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.InternalTestCluster; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions;
import com.carrotsearch.randomizedtesting.*; import java.util.*; import java.util.concurrent.*; import java.util.concurrent.atomic.*; import org.elasticsearch.action.admin.indices.stats.*; import org.elasticsearch.cluster.metadata.*; import org.elasticsearch.common.settings.*; import org.elasticsearch.core.*; import org.elasticsearch.index.seqno.*; import org.elasticsearch.test.*; import org.elasticsearch.test.hamcrest.*;
[ "com.carrotsearch.randomizedtesting", "java.util", "org.elasticsearch.action", "org.elasticsearch.cluster", "org.elasticsearch.common", "org.elasticsearch.core", "org.elasticsearch.index", "org.elasticsearch.test" ]
com.carrotsearch.randomizedtesting; java.util; org.elasticsearch.action; org.elasticsearch.cluster; org.elasticsearch.common; org.elasticsearch.core; org.elasticsearch.index; org.elasticsearch.test;
318,826
public void setPaymentGroupService(PaymentGroupService paymentGroupService) { this.paymentGroupService = paymentGroupService; }
void function(PaymentGroupService paymentGroupService) { this.paymentGroupService = paymentGroupService; }
/** * Sets the paymentGroupService attribute value. * * @param paymentGroupService The paymentGroupService to set. */
Sets the paymentGroupService attribute value
setPaymentGroupService
{ "repo_name": "Ariah-Group/Finance", "path": "af_webapp/src/main/java/org/kuali/kfs/module/purap/service/impl/PdpExtractServiceImpl.java", "license": "apache-2.0", "size": 53829 }
[ "org.kuali.kfs.pdp.service.PaymentGroupService" ]
import org.kuali.kfs.pdp.service.PaymentGroupService;
import org.kuali.kfs.pdp.service.*;
[ "org.kuali.kfs" ]
org.kuali.kfs;
500,858
protected final void setRemoteAddress(InetAddress addr) { m_remoteAddr = addr; }
final void function(InetAddress addr) { m_remoteAddr = addr; }
/** * Set the remote address * * @param addr InetAddress */
Set the remote address
setRemoteAddress
{ "repo_name": "arcusys/Liferay-CIFS", "path": "source/java/org/alfresco/jlan/smb/server/PacketHandler.java", "license": "gpl-3.0", "size": 5642 }
[ "java.net.InetAddress" ]
import java.net.InetAddress;
import java.net.*;
[ "java.net" ]
java.net;
2,315,626
EEnum getHttpMethod();
EEnum getHttpMethod();
/** * Returns the meta object for enum '{@link io.opensemantics.semiotics.model.assessment.HttpMethod <em>Http Method</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for enum '<em>Http Method</em>'. * @see io.opensemantics.semiotics.model.assessment.HttpMethod * @generated */
Returns the meta object for enum '<code>io.opensemantics.semiotics.model.assessment.HttpMethod Http Method</code>'.
getHttpMethod
{ "repo_name": "CoastalHacking/semiotics-main", "path": "bundles/io.opensemantics.semiotics.model.assessment/src-gen/io/opensemantics/semiotics/model/assessment/AssessmentPackage.java", "license": "apache-2.0", "size": 151116 }
[ "org.eclipse.emf.ecore.EEnum" ]
import org.eclipse.emf.ecore.EEnum;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,795,347
@Override public List<TWorkItemBean> loadMsProjectTasksForImport(Integer entityID, int entityType) { Criteria crit = new Criteria(); crit.addJoin(WORKITEMKEY, BaseTMSProjectTaskPeer.WORKITEM); crit.addJoin(CATEGORYKEY, BaseTListTypePeer.PKEY); crit.add(BaseTListTypePeer.TYPEFLAG, TListTypeBean.TYPEFLAGS.TASK); switch (entityType) { case SystemFields.RELEASESCHEDULED: crit.add(RELSCHEDULEDKEY, entityID); break; case SystemFields.PROJECT: crit.add(PROJECTKEY, entityID); break; default: return null; } try { return convertTorqueListToBeanList(doSelect(crit)); } catch(Exception e) { LOGGER.error("Loading WorkItems with task by entityID " + entityID + " entityType " + entityType + failedWith + e.getMessage()); return null; } }
List<TWorkItemBean> function(Integer entityID, int entityType) { Criteria crit = new Criteria(); crit.addJoin(WORKITEMKEY, BaseTMSProjectTaskPeer.WORKITEM); crit.addJoin(CATEGORYKEY, BaseTListTypePeer.PKEY); crit.add(BaseTListTypePeer.TYPEFLAG, TListTypeBean.TYPEFLAGS.TASK); switch (entityType) { case SystemFields.RELEASESCHEDULED: crit.add(RELSCHEDULEDKEY, entityID); break; case SystemFields.PROJECT: crit.add(PROJECTKEY, entityID); break; default: return null; } try { return convertTorqueListToBeanList(doSelect(crit)); } catch(Exception e) { LOGGER.error(STR + entityID + STR + entityType + failedWith + e.getMessage()); return null; } }
/** * Loads workItems for import which has associated msProjectTaskBean objects * and are of type task, include also the closed and deleted, archived tasks * @param entityID * @param entityType */
Loads workItems for import which has associated msProjectTaskBean objects and are of type task, include also the closed and deleted, archived tasks
loadMsProjectTasksForImport
{ "repo_name": "trackplus/Genji", "path": "src/main/java/com/aurel/track/persist/TWorkItemPeer.java", "license": "gpl-3.0", "size": 84464 }
[ "com.aurel.track.beans.TListTypeBean", "com.aurel.track.beans.TWorkItemBean", "com.aurel.track.fieldType.constants.SystemFields", "java.util.List", "org.apache.torque.util.Criteria" ]
import com.aurel.track.beans.TListTypeBean; import com.aurel.track.beans.TWorkItemBean; import com.aurel.track.fieldType.constants.SystemFields; import java.util.List; import org.apache.torque.util.Criteria;
import com.aurel.track.*; import com.aurel.track.beans.*; import java.util.*; import org.apache.torque.util.*;
[ "com.aurel.track", "java.util", "org.apache.torque" ]
com.aurel.track; java.util; org.apache.torque;
44,178
public void updateConnection(MapServerData newData) throws NoSavedConnectionException;
void function(MapServerData newData) throws NoSavedConnectionException;
/** * Sets all connection properties by the new. * * @param newData * @throws NoSavedConnectionException */
Sets all connection properties by the new
updateConnection
{ "repo_name": "MReichenbach/visitmeta", "path": "common/src/main/java/de/hshannover/f4/trust/visitmeta/interfaces/ifmap/ConnectionManager.java", "license": "apache-2.0", "size": 7920 }
[ "de.hshannover.f4.trust.visitmeta.exceptions.ifmap.NoSavedConnectionException", "de.hshannover.f4.trust.visitmeta.interfaces.data.MapServerData" ]
import de.hshannover.f4.trust.visitmeta.exceptions.ifmap.NoSavedConnectionException; import de.hshannover.f4.trust.visitmeta.interfaces.data.MapServerData;
import de.hshannover.f4.trust.visitmeta.exceptions.ifmap.*; import de.hshannover.f4.trust.visitmeta.interfaces.data.*;
[ "de.hshannover.f4" ]
de.hshannover.f4;
1,134,999
public void readPacketData(PacketBuffer buf) throws IOException { this.windowId = buf.readByte(); this.uid = buf.readShort(); this.accepted = buf.readByte() != 0; }
void function(PacketBuffer buf) throws IOException { this.windowId = buf.readByte(); this.uid = buf.readShort(); this.accepted = buf.readByte() != 0; }
/** * Reads the raw packet data from the data stream. */
Reads the raw packet data from the data stream
readPacketData
{ "repo_name": "SkidJava/BaseClient", "path": "new_1.8.8/net/minecraft/network/play/client/C0FPacketConfirmTransaction.java", "license": "gpl-2.0", "size": 1497 }
[ "java.io.IOException", "net.minecraft.network.PacketBuffer" ]
import java.io.IOException; import net.minecraft.network.PacketBuffer;
import java.io.*; import net.minecraft.network.*;
[ "java.io", "net.minecraft.network" ]
java.io; net.minecraft.network;
2,192,515
ProcessorEntity deleteProcessor(Revision revision, String processorId); // ---------------------------------------- // Connections methods // ----------------------------------------
ProcessorEntity deleteProcessor(Revision revision, String processorId);
/** * Deletes the specified processor. * * @param revision Revision to compare with current base revision * @param processorId The processor id to delete * @return snapshot */
Deletes the specified processor
deleteProcessor
{ "repo_name": "jskora/nifi", "path": "nifi-nar-bundles/nifi-framework-bundle/nifi-framework/nifi-web/nifi-web-api/src/main/java/org/apache/nifi/web/NiFiServiceFacade.java", "license": "apache-2.0", "size": 83710 }
[ "org.apache.nifi.web.api.entity.ProcessorEntity" ]
import org.apache.nifi.web.api.entity.ProcessorEntity;
import org.apache.nifi.web.api.entity.*;
[ "org.apache.nifi" ]
org.apache.nifi;
1,253,364
@Before @SuppressWarnings("unchecked") public void setup() { TAGS.clear(); TAGS.add(TAG_1); TAGS.add(TAG_2); TAGS.add(TAG_3); STATUSES.clear(); STATUSES.add(CommandStatus.ACTIVE); STATUSES.add(CommandStatus.INACTIVE); this.root = (Root<Command>) Mockito.mock(Root.class); this.cq = Mockito.mock(CriteriaQuery.class); this.cb = Mockito.mock(CriteriaBuilder.class); final Path<String> commandNamePath = (Path<String>) Mockito.mock(Path.class); final Predicate equalNamePredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.name)).thenReturn(commandNamePath); Mockito.when(this.cb.equal(commandNamePath, NAME)) .thenReturn(equalNamePredicate); final Path<String> userNamePath = (Path<String>) Mockito.mock(Path.class); final Predicate equalUserNamePredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.user)).thenReturn(userNamePath); Mockito.when(this.cb.equal(userNamePath, USER_NAME)) .thenReturn(equalUserNamePredicate); final Path<CommandStatus> statusPath = (Path<CommandStatus>) Mockito.mock(Path.class); final Predicate equalStatusPredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.status)).thenReturn(statusPath); Mockito.when(this.cb.equal(Mockito.eq(statusPath), Mockito.any(CommandStatus.class))) .thenReturn(equalStatusPredicate); final Expression<Set<String>> tagExpression = (Expression<Set<String>>) Mockito.mock(Expression.class); final Predicate isMemberTagPredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.tags)).thenReturn(tagExpression); Mockito.when(this.cb.isMember(Mockito.any(String.class), Mockito.eq(tagExpression))) .thenReturn(isMemberTagPredicate); }
@SuppressWarnings(STR) void function() { TAGS.clear(); TAGS.add(TAG_1); TAGS.add(TAG_2); TAGS.add(TAG_3); STATUSES.clear(); STATUSES.add(CommandStatus.ACTIVE); STATUSES.add(CommandStatus.INACTIVE); this.root = (Root<Command>) Mockito.mock(Root.class); this.cq = Mockito.mock(CriteriaQuery.class); this.cb = Mockito.mock(CriteriaBuilder.class); final Path<String> commandNamePath = (Path<String>) Mockito.mock(Path.class); final Predicate equalNamePredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.name)).thenReturn(commandNamePath); Mockito.when(this.cb.equal(commandNamePath, NAME)) .thenReturn(equalNamePredicate); final Path<String> userNamePath = (Path<String>) Mockito.mock(Path.class); final Predicate equalUserNamePredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.user)).thenReturn(userNamePath); Mockito.when(this.cb.equal(userNamePath, USER_NAME)) .thenReturn(equalUserNamePredicate); final Path<CommandStatus> statusPath = (Path<CommandStatus>) Mockito.mock(Path.class); final Predicate equalStatusPredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.status)).thenReturn(statusPath); Mockito.when(this.cb.equal(Mockito.eq(statusPath), Mockito.any(CommandStatus.class))) .thenReturn(equalStatusPredicate); final Expression<Set<String>> tagExpression = (Expression<Set<String>>) Mockito.mock(Expression.class); final Predicate isMemberTagPredicate = Mockito.mock(Predicate.class); Mockito.when(this.root.get(Command_.tags)).thenReturn(tagExpression); Mockito.when(this.cb.isMember(Mockito.any(String.class), Mockito.eq(tagExpression))) .thenReturn(isMemberTagPredicate); }
/** * Setup some variables. */
Setup some variables
setup
{ "repo_name": "chen0031/genie", "path": "genie-server/src/test/java/com/netflix/genie/server/repository/jpa/TestCommandSpecs.java", "license": "apache-2.0", "size": 10395 }
[ "com.netflix.genie.common.model.Command", "com.netflix.genie.common.model.CommandStatus", "java.util.Set", "javax.persistence.criteria.CriteriaBuilder", "javax.persistence.criteria.CriteriaQuery", "javax.persistence.criteria.Expression", "javax.persistence.criteria.Path", "javax.persistence.criteria.Predicate", "javax.persistence.criteria.Root", "org.mockito.Mockito" ]
import com.netflix.genie.common.model.Command; import com.netflix.genie.common.model.CommandStatus; import java.util.Set; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.Expression; import javax.persistence.criteria.Path; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import org.mockito.Mockito;
import com.netflix.genie.common.model.*; import java.util.*; import javax.persistence.criteria.*; import org.mockito.*;
[ "com.netflix.genie", "java.util", "javax.persistence", "org.mockito" ]
com.netflix.genie; java.util; javax.persistence; org.mockito;
262,182
private Property getIdPropertyOneToMany(EntityRelation _entityRelation) throws InvalidEntityException { final Entity sourceEntity = getSourceEntity(_entityRelation); final Entity relationEntity = getRelationEntity(_entityRelation); final String relationField = _entityRelation.getRelationFieldNameToCreate(); // if there is no relation field, construct the property. if (relationField == null) { return relationEntity.addStringProperty( sourceEntity.getClassName().toLowerCase() + ID_PROPERTY_POSTFIX).notNull().getProperty(); } // If there is relation field, get the property from there for (Property property : relationEntity.getProperties()) { if (property.getPropertyName().equals(relationField)) { return property; } } // otherwise, add the relationField. return relationEntity.addStringProperty(relationField).notNull().getProperty(); }
Property function(EntityRelation _entityRelation) throws InvalidEntityException { final Entity sourceEntity = getSourceEntity(_entityRelation); final Entity relationEntity = getRelationEntity(_entityRelation); final String relationField = _entityRelation.getRelationFieldNameToCreate(); if (relationField == null) { return relationEntity.addStringProperty( sourceEntity.getClassName().toLowerCase() + ID_PROPERTY_POSTFIX).notNull().getProperty(); } for (Property property : relationEntity.getProperties()) { if (property.getPropertyName().equals(relationField)) { return property; } } return relationEntity.addStringProperty(relationField).notNull().getProperty(); }
/** * Returns the Id property of the OneToMany relation. * @param _entityRelation The relation to get the Id property from. * @return the Id Property. * @throws InvalidEntityException */
Returns the Id property of the OneToMany relation
getIdPropertyOneToMany
{ "repo_name": "TheolZacharopoulos/greenDao-schema-builder", "path": "src/main/java/com/greendao_schema_builder/relationship/EntityRelationBuilder.java", "license": "apache-2.0", "size": 5453 }
[ "com.greendao_schema_builder.errors.InvalidEntityException", "de.greenrobot.daogenerator.Entity", "de.greenrobot.daogenerator.Property" ]
import com.greendao_schema_builder.errors.InvalidEntityException; import de.greenrobot.daogenerator.Entity; import de.greenrobot.daogenerator.Property;
import com.greendao_schema_builder.errors.*; import de.greenrobot.daogenerator.*;
[ "com.greendao_schema_builder.errors", "de.greenrobot.daogenerator" ]
com.greendao_schema_builder.errors; de.greenrobot.daogenerator;
1,769,874
public TaskKey getTaskKey(String prefix, String jobNumber, String fullId) { String taskComponent = fullId; if (fullId == null) { taskComponent = ""; } else { String expectedPrefix = prefix + jobNumber + "_"; if (fullId.startsWith(expectedPrefix) && fullId.length() > expectedPrefix.length()) { taskComponent = fullId.substring(expectedPrefix.length()); } } return new TaskKey(this.jobKey, taskComponent); }
TaskKey function(String prefix, String jobNumber, String fullId) { String taskComponent = fullId; if (fullId == null) { taskComponent = STR_"; if (fullId.startsWith(expectedPrefix) && fullId.length() > expectedPrefix.length()) { taskComponent = fullId.substring(expectedPrefix.length()); } } return new TaskKey(this.jobKey, taskComponent); }
/** * Returns the Task ID or Task Attempt ID, stripped of the leading job ID, * appended to the job row key. */
Returns the Task ID or Task Attempt ID, stripped of the leading job ID, appended to the job row key
getTaskKey
{ "repo_name": "InMobi/hraven", "path": "hraven-etl/src/main/java/com/twitter/hraven/mapreduce/JobHistoryListener.java", "license": "apache-2.0", "size": 11167 }
[ "com.twitter.hraven.TaskKey" ]
import com.twitter.hraven.TaskKey;
import com.twitter.hraven.*;
[ "com.twitter.hraven" ]
com.twitter.hraven;
517,630
@Override public void setActiveEditor(IEditorPart part) { super.setActiveEditor(part); activeEditorPart = part; // Switch to the new selection provider. // if (selectionProvider != null) { selectionProvider.removeSelectionChangedListener(this); } if (part == null) { selectionProvider = null; } else { selectionProvider = part.getSite().getSelectionProvider(); selectionProvider.addSelectionChangedListener(this); // Fake a selection changed event to update the menus. // if (selectionProvider.getSelection() != null) { selectionChanged(new SelectionChangedEvent(selectionProvider, selectionProvider.getSelection())); } } }
void function(IEditorPart part) { super.setActiveEditor(part); activeEditorPart = part; selectionProvider.removeSelectionChangedListener(this); } if (part == null) { selectionProvider = null; } else { selectionProvider = part.getSite().getSelectionProvider(); selectionProvider.addSelectionChangedListener(this); selectionChanged(new SelectionChangedEvent(selectionProvider, selectionProvider.getSelection())); } } }
/** * When the active editor changes, this remembers the change and registers with it as a selection provider. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
When the active editor changes, this remembers the change and registers with it as a selection provider.
setActiveEditor
{ "repo_name": "CloudScale-Project/Environment", "path": "plugins/org.scaledl.overview.editor/src/org/scaledl/overview/presentation/OverviewActionBarContributor.java", "license": "epl-1.0", "size": 14800 }
[ "org.eclipse.jface.viewers.SelectionChangedEvent", "org.eclipse.ui.IEditorPart" ]
import org.eclipse.jface.viewers.SelectionChangedEvent; import org.eclipse.ui.IEditorPart;
import org.eclipse.jface.viewers.*; import org.eclipse.ui.*;
[ "org.eclipse.jface", "org.eclipse.ui" ]
org.eclipse.jface; org.eclipse.ui;
2,258,836
interface WithAllOptions { PermissionsUpdateDefinitionStages.WithExecute withEmails(List<String> emails); }
interface WithAllOptions { PermissionsUpdateDefinitionStages.WithExecute withEmails(List<String> emails); }
/** * The email address of the users. * * @return next definition stage */
The email address of the users
withEmails
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/cognitiveservices/ms-azure-cs-luis-authoring/src/main/java/com/microsoft/azure/cognitiveservices/language/luis/authoring/Permissions.java", "license": "mit", "size": 11885 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
1,382,417
public void setApiDir( FileResource apiDir ) { this.apiDir = apiDir; }
void function( FileResource apiDir ) { this.apiDir = apiDir; }
/** * The directory in which to install {@link Artifact.Type#API} artifacts. * When null, will not install them. * * @param apiDir * The API directory */
The directory in which to install <code>Artifact.Type#API</code> artifacts. When null, will not install them
setApiDir
{ "repo_name": "tliron/creel", "path": "components/creel-ant/source/com/threecrickets/creel/ant/CleanTask.java", "license": "lgpl-3.0", "size": 4531 }
[ "org.apache.tools.ant.types.resources.FileResource" ]
import org.apache.tools.ant.types.resources.FileResource;
import org.apache.tools.ant.types.resources.*;
[ "org.apache.tools" ]
org.apache.tools;
917,415
public static URI uriSearch(final URI baseUri) { final UriBuilder bld = UriBuilder.fromUri(baseUri).path("conventionSearches"); return bld.build(); }
static URI function(final URI baseUri) { final UriBuilder bld = UriBuilder.fromUri(baseUri).path(STR); return bld.build(); }
/** * Builds a URI. * * @param baseUri the base URI, not null * @return the URI, not null */
Builds a URI
uriSearch
{ "repo_name": "McLeodMoores/starling", "path": "projects/master-rest-client/src/main/java/com/opengamma/master/convention/impl/DataConventionMasterUris.java", "license": "apache-2.0", "size": 1499 }
[ "javax.ws.rs.core.UriBuilder" ]
import javax.ws.rs.core.UriBuilder;
import javax.ws.rs.core.*;
[ "javax.ws" ]
javax.ws;
1,597,837
public final void setInput(final IJavaElement element) { computeAndDoSetInput(null, element, false); }
final void function(final IJavaElement element) { computeAndDoSetInput(null, element, false); }
/** * Sets the input for this view. * * @param element the java element */
Sets the input for this view
setInput
{ "repo_name": "elucash/eclipse-oxygen", "path": "org.eclipse.jdt.ui/src/org/eclipse/jdt/internal/ui/infoviews/AbstractInfoView.java", "license": "epl-1.0", "size": 28626 }
[ "org.eclipse.jdt.core.IJavaElement" ]
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.*;
[ "org.eclipse.jdt" ]
org.eclipse.jdt;
2,740,137
public T caseDataItem(final DataItem object) { return null; }
T function(final DataItem object) { return null; }
/** * Returns the result of interpreting the object as an instance of '<em>Data * Item</em>'. <!-- begin-user-doc --> This implementation returns null; * returning a non-null result will terminate the switch. <!-- end-user-doc --> * * @param object the target of the switch. * @return the result of interpreting the object as an instance of '<em>Data * Item</em>'. * @see #doSwitch(org.eclipse.emf.ecore.EObject) doSwitch(EObject) * @generated */
Returns the result of interpreting the object as an instance of 'Data Item'. This implementation returns null; returning a non-null result will terminate the switch.
caseDataItem
{ "repo_name": "debrief/debrief", "path": "info.limpet.stackedcharts.model/src/info/limpet/stackedcharts/model/util/StackedchartsSwitch.java", "license": "epl-1.0", "size": 20487 }
[ "info.limpet.stackedcharts.model.DataItem" ]
import info.limpet.stackedcharts.model.DataItem;
import info.limpet.stackedcharts.model.*;
[ "info.limpet.stackedcharts" ]
info.limpet.stackedcharts;
628,455
public void reloadSubscriptionsFromStorage() throws AndesException { Map<String, List<String>> results = AndesContext.getInstance().getAndesContextStore() .getAllStoredDurableSubscriptions(); Set<AndesSubscription> dbSubscriptions = new HashSet<>(); Set<AndesSubscription> localSubscriptions = new HashSet<>(); Set<AndesSubscription> copyOfLocalSubscriptions = new HashSet<>(); //get all local subscriptions in registry Iterable<AndesSubscription> registeredLocalSubscriptions = getAllLocalSubscriptions(); for (AndesSubscription registeredLocalSubscription : registeredLocalSubscriptions) { localSubscriptions.add(registeredLocalSubscription); } copyOfLocalSubscriptions.addAll(localSubscriptions); //get all subscriptions in DB for (Map.Entry<String, List<String>> entry : results.entrySet()) { for (String subscriptionAsStr : entry.getValue()) { try { AndesSubscription subscription = new AndesSubscription(subscriptionAsStr); dbSubscriptions.add(subscription); } catch (SubscriptionException e) { log.error("Could not add subscription: " + subscriptionAsStr, e); } } } //if DB does not have the local subscription add it localSubscriptions.removeAll(dbSubscriptions); final List<AndesSubscription> subscriptionsToBeDisconnected = new ArrayList<>(); for (AndesSubscription subscription : localSubscriptions) { //If there are 2 subscriptions with the same subscription identifier but a different connected node, // disconnect local subscription. boolean conflictingSubscriberFound = false; for (AndesSubscription dbSubscription : dbSubscriptions) { if (!sharedSubscribersAllowed && (TOPIC_EXCHANGE_NAME.equals(subscription.getStorageQueue().getMessageRouter().getName()))) { if (subscription.getStorageQueue().equals(dbSubscription.getStorageQueue()) && !(subscription.getSubscriberConnection().getConnectedNode() .equals(dbSubscription.getSubscriberConnection().getConnectedNode()))) { conflictingSubscriberFound = true; subscriptionsToBeDisconnected.add(subscription); break; } } } if (!conflictingSubscriberFound) { log.warn("Subscriptions are not in sync. Local Subscription available " + "in subscription registry of node " + localNodeId + " but not in DB. Thus adding to DB subscription=" + subscription.toString()); andesContextStore.storeDurableSubscription(subscription); } }
void function() throws AndesException { Map<String, List<String>> results = AndesContext.getInstance().getAndesContextStore() .getAllStoredDurableSubscriptions(); Set<AndesSubscription> dbSubscriptions = new HashSet<>(); Set<AndesSubscription> localSubscriptions = new HashSet<>(); Set<AndesSubscription> copyOfLocalSubscriptions = new HashSet<>(); Iterable<AndesSubscription> registeredLocalSubscriptions = getAllLocalSubscriptions(); for (AndesSubscription registeredLocalSubscription : registeredLocalSubscriptions) { localSubscriptions.add(registeredLocalSubscription); } copyOfLocalSubscriptions.addAll(localSubscriptions); for (Map.Entry<String, List<String>> entry : results.entrySet()) { for (String subscriptionAsStr : entry.getValue()) { try { AndesSubscription subscription = new AndesSubscription(subscriptionAsStr); dbSubscriptions.add(subscription); } catch (SubscriptionException e) { log.error(STR + subscriptionAsStr, e); } } } localSubscriptions.removeAll(dbSubscriptions); final List<AndesSubscription> subscriptionsToBeDisconnected = new ArrayList<>(); for (AndesSubscription subscription : localSubscriptions) { boolean conflictingSubscriberFound = false; for (AndesSubscription dbSubscription : dbSubscriptions) { if (!sharedSubscribersAllowed && (TOPIC_EXCHANGE_NAME.equals(subscription.getStorageQueue().getMessageRouter().getName()))) { if (subscription.getStorageQueue().equals(dbSubscription.getStorageQueue()) && !(subscription.getSubscriberConnection().getConnectedNode() .equals(dbSubscription.getSubscriberConnection().getConnectedNode()))) { conflictingSubscriberFound = true; subscriptionsToBeDisconnected.add(subscription); break; } } } if (!conflictingSubscriberFound) { log.warn(STR + STR + localNodeId + STR + subscription.toString()); andesContextStore.storeDurableSubscription(subscription); } }
/** * Reload subscriptions from DB storage and update subscription registry. This is a two step process * 1. Sync the DB with the local subscriptions. * 2. Sync the subscription registry with updated DB */
Reload subscriptions from DB storage and update subscription registry. This is a two step process 1. Sync the DB with the local subscriptions. 2. Sync the subscription registry with updated DB
reloadSubscriptionsFromStorage
{ "repo_name": "indikasampath2000/andes", "path": "modules/andes-core/broker/src/main/java/org/wso2/andes/kernel/subscription/AndesSubscriptionManager.java", "license": "apache-2.0", "size": 55184 }
[ "java.util.ArrayList", "java.util.HashSet", "java.util.List", "java.util.Map", "java.util.Set", "org.wso2.andes.kernel.AndesContext", "org.wso2.andes.kernel.AndesException" ]
import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.wso2.andes.kernel.AndesContext; import org.wso2.andes.kernel.AndesException;
import java.util.*; import org.wso2.andes.kernel.*;
[ "java.util", "org.wso2.andes" ]
java.util; org.wso2.andes;
1,946,159
public static Collection<File> getAllSubFolder(String chemin) throws UtilException { List<File> resultat = new ArrayList<File>(); File directory = new File(chemin); if (directory.isDirectory()) { File[] list = directory.listFiles(); for (File file : list) { if (file.isDirectory()) { resultat.add(file); } } } else { SilverTrace.error("util", "FileFolderManager.getAllSubFolder", "util.EX_NO_CHEMIN_REPOS", chemin); throw new UtilException("FileFolderManager.getAllSubFolder", "util.EX_NO_CHEMIN_REPOS", chemin); } return resultat; }
static Collection<File> function(String chemin) throws UtilException { List<File> resultat = new ArrayList<File>(); File directory = new File(chemin); if (directory.isDirectory()) { File[] list = directory.listFiles(); for (File file : list) { if (file.isDirectory()) { resultat.add(file); } } } else { SilverTrace.error("util", STR, STR, chemin); throw new UtilException(STR, STR, chemin); } return resultat; }
/** * retourne une Collection d'objets File qui representent les repertoires (et seulement les * repertoires, pas les fichiers) contenus dans le repertoire passe en parametre * @param chemin le chemin du repertoire * @return une Collection d'objets File qui representent les repertoires (et seulement les * repertoires, pas les fichiers) contenus dans le repertoire passe en parametre * @throws UtilException */
retourne une Collection d'objets File qui representent les repertoires (et seulement les repertoires, pas les fichiers) contenus dans le repertoire passe en parametre
getAllSubFolder
{ "repo_name": "auroreallibe/Silverpeas-Core", "path": "core-library/src/main/java/org/silverpeas/core/util/file/FileFolderManager.java", "license": "agpl-3.0", "size": 12690 }
[ "java.io.File", "java.util.ArrayList", "java.util.Collection", "java.util.List", "org.silverpeas.core.exception.UtilException", "org.silverpeas.core.silvertrace.SilverTrace" ]
import java.io.File; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.silverpeas.core.exception.UtilException; import org.silverpeas.core.silvertrace.SilverTrace;
import java.io.*; import java.util.*; import org.silverpeas.core.exception.*; import org.silverpeas.core.silvertrace.*;
[ "java.io", "java.util", "org.silverpeas.core" ]
java.io; java.util; org.silverpeas.core;
2,804,280
public void fillMap() { int iRoomX = 0; int iRoomY = 0; int iRoomCoords[]; int iRooms; // How many rooms should we draw? Room currRoom; Room tempRoom; ListIterator iter; int iRoomID = 0; // used for checking if we've added a room already // get our first room to draw // (why did I want the last room added?!) //listRooms.add(rooms[rooms.length - 1]); // until I figure out why I used the last room I'll just go with the first room listRooms.add(rooms[0]); //specify that we've added the first rooom //(so that we don't add it again) //This gets done later, no sense douplicating the effort //(we have to do it in the loop down below anyways, so we'll not do it here) //listAddedRoomIDs.add(new Integer(((Room)listRooms.getFirst()).getID())); // add all the rooms that we grabbed from the database to the // "rooms to be drawn" list and the // "rooms to be processed" list // except the first one which is already the room currently being processed for(int i = 1; i < rooms.length; i++) { // DEBUG: System.out.println("adding room [" + rooms[i].getID() + "] to the to-be-processed list"); listRoomsToProcess.add(rooms[i]); listRooms.add(rooms[i]); } // put the first room in the "currently being processed variable" currRoom = (Room) listRooms.getFirst(); // DEBUG: System.out.println("room [" + currRoom.getID() + "] is first to be processed"); // use the first room to set the max and min X and Y coords iRoomCoords = currRoom.getCoords(); iMinX = iMaxX = iRoomCoords[X]; iMinY = iMaxY = iRoomCoords[Y]; // we're picking an arbitrary number of rooms here for now // and not really handling them properly, this is just to // test going through and adding rooms and then drawing them //iRooms = 15; int i = 0; // process all the rooms we have available because the query // should have returned as many rooms as the player can see while(currRoom != null) { if(processRoom(currRoom.northwest)) { i++; } if(processRoom(currRoom.north)) { i++; } if(processRoom(currRoom.northeast)) { i++; } if(processRoom(currRoom.west)) { i++; } if(processRoom(currRoom.east)) { i++; } if(processRoom(currRoom.southwest)) { i++; } if(processRoom(currRoom.south)) { i++; } if(processRoom(currRoom.southeast)) { i++; } // mark that we've processed this room (we already did this above) listAddedRoomIDs.add(new Integer(currRoom.getID())); // null the current room so we can test to see if we get the next one currRoom = null; // get an iterator to look at each ID in the list of added room IDs iter = listAddedRoomIDs.listIterator(); // keep getting the next room until we find one that hasn't been processed while(!listRoomsToProcess.isEmpty() && currRoom == null) { // remove a room to process it currRoom = (Room) listRoomsToProcess.removeFirst(); // DEBUG: System.out.print("fillMap() -> Looking for room " + currRoom.getID() + " ..."); // get the room's ID to make sure we haven't ALREADY processed it (not that it would hurt) while(iter.hasNext()) { // check the next processed ID in the list against the current room ID if(((Integer) iter.next()).intValue() == currRoom.getID()) { // we've found the ID has already been processed // so set the current room to null so the outer loop will grab the next room currRoom = null; // we found the room had been processed so we don't need to keep looping // through the processed rooms break; } } if(currRoom != null) { // DEBUG: add to the previous print System.out.println(" not found: processing."); } else { //DEBUG: Add to the previous print System.out.println(" already processed."); } } } //clear the rooms to add list listRoomsToProcess = null; //now we need to find what our map width and height are //so we can create a 2D array big enough to hold all the //rooms and connections // + 1 so that we don't translate TOO far left or down iMapHeight = ((iMaxY - iMinY + 1) * 2) + 1; iMapWidth = ((iMaxX - iMinX + 1) * 2) + 1; // see notes below // make sure there is a minimum of 3 columns and three rows //iMapHeight = iMapHeight < 3 ? 3 : iMapHeight; //iMapWidth = iMapWidth < 3 ? 3 : iMapWidth; // in order to hold 1 room and 1 possible connection in any direction we'll // need a 3 by 3 array for each room HOWEVER two neighbouring rooms will // share one connection so we only need a 2x2 array for each room plus // one full row and one full column to account for the connections that aren't shared // create 2D array big enough to hold one character per room and // one character per room connection cMap = new char[iMapWidth][iMapHeight]; // initialize all cells to ' ' for(int iCols = 0; iCols < iMapWidth; iCols++) { for(int iRows = 0; iRows < iMapHeight; iRows++) { cMap[iCols][iRows] = ' '; // remember: x,y coords } } // get an iterator on the room list // (I think when we did it before we got an error when we change the list afterwards) // (Yup, works now) iter = listRooms.listIterator(); // add the rooms individually to the array while(iter.hasNext()) { // re-use currRoom currRoom = (Room) iter.next(); // get room coords iRoomCoords = currRoom.getCoords(); // find room center coords // |<-- add space for conns -->| // - 1 accounts for 0-based arrays iRoomX = ((iRoomCoords[X] - iMinX) * 2) + 1; iRoomY = ((iRoomCoords[Y] - iMinY) * 2) + 1; // correct edge rooms that get messed up by the above formula (i.e. where actual room X = min X, work it out to see what I mean) iRoomX = iRoomX < 1 ? 1 : iRoomX; iRoomY = iRoomY < 1 ? 1 : iRoomY; // add room to map cMap[iRoomX][iRoomY] = '0'; // add connections to map or space if no connection // NOTE: This will overwrite previous connections if they exist in the // resulting array cells // NW cMap[iRoomX - 1][iRoomY + 1] = currRoom.northwest != null ? '\\' : ' '; // N cMap[iRoomX][iRoomY + 1] = currRoom.north != null ? '|' : ' '; // NE cMap[iRoomX + 1][iRoomY + 1] = currRoom.northeast != null ? '/' : ' '; // W cMap[iRoomX - 1][iRoomY] = currRoom.west != null ? '-' : ' '; // NO CENTER CONNECTION - It's the room itself! // E cMap[iRoomX + 1][iRoomY] = currRoom.east != null ? '-' : ' '; // SW cMap[iRoomX - 1][iRoomY - 1] = currRoom.southwest != null ? '/' : ' '; // S cMap[iRoomX][iRoomY - 1] = currRoom.south != null ? '|' : ' '; // SE cMap[iRoomX + 1][iRoomY - 1] = currRoom.southeast != null ? '\\' : ' '; } }
void function() { int iRoomX = 0; int iRoomY = 0; int iRoomCoords[]; int iRooms; Room currRoom; Room tempRoom; ListIterator iter; int iRoomID = 0; listRooms.add(rooms[0]); for(int i = 1; i < rooms.length; i++) { System.out.println(STR + rooms[i].getID() + STR); listRoomsToProcess.add(rooms[i]); listRooms.add(rooms[i]); } currRoom = (Room) listRooms.getFirst(); System.out.println(STR + currRoom.getID() + STR); iRoomCoords = currRoom.getCoords(); iMinX = iMaxX = iRoomCoords[X]; iMinY = iMaxY = iRoomCoords[Y]; int i = 0; while(currRoom != null) { if(processRoom(currRoom.northwest)) { i++; } if(processRoom(currRoom.north)) { i++; } if(processRoom(currRoom.northeast)) { i++; } if(processRoom(currRoom.west)) { i++; } if(processRoom(currRoom.east)) { i++; } if(processRoom(currRoom.southwest)) { i++; } if(processRoom(currRoom.south)) { i++; } if(processRoom(currRoom.southeast)) { i++; } listAddedRoomIDs.add(new Integer(currRoom.getID())); currRoom = null; iter = listAddedRoomIDs.listIterator(); while(!listRoomsToProcess.isEmpty() && currRoom == null) { currRoom = (Room) listRoomsToProcess.removeFirst(); System.out.print(STR + currRoom.getID() + STR); while(iter.hasNext()) { if(((Integer) iter.next()).intValue() == currRoom.getID()) { currRoom = null; break; } } if(currRoom != null) { System.out.println(STR); } else { System.out.println(STR); } } } listRoomsToProcess = null; iMapHeight = ((iMaxY - iMinY + 1) * 2) + 1; iMapWidth = ((iMaxX - iMinX + 1) * 2) + 1; cMap = new char[iMapWidth][iMapHeight]; for(int iCols = 0; iCols < iMapWidth; iCols++) { for(int iRows = 0; iRows < iMapHeight; iRows++) { cMap[iCols][iRows] = ' '; } } iter = listRooms.listIterator(); while(iter.hasNext()) { currRoom = (Room) iter.next(); iRoomCoords = currRoom.getCoords(); iRoomX = ((iRoomCoords[X] - iMinX) * 2) + 1; iRoomY = ((iRoomCoords[Y] - iMinY) * 2) + 1; iRoomX = iRoomX < 1 ? 1 : iRoomX; iRoomY = iRoomY < 1 ? 1 : iRoomY; cMap[iRoomX][iRoomY] = '0'; cMap[iRoomX - 1][iRoomY + 1] = currRoom.northwest != null ? '\\' : ' '; cMap[iRoomX][iRoomY + 1] = currRoom.north != null ? ' ' : ' '; cMap[iRoomX + 1][iRoomY + 1] = currRoom.northeast != null ? '/' : ' '; cMap[iRoomX - 1][iRoomY] = currRoom.west != null ? '-' : ' '; cMap[iRoomX + 1][iRoomY] = currRoom.east != null ? '-' : ' '; cMap[iRoomX - 1][iRoomY - 1] = currRoom.southwest != null ? '/' : ' '; cMap[iRoomX][iRoomY - 1] = currRoom.south != null ? ' ' : ' '; cMap[iRoomX + 1][iRoomY - 1] = currRoom.southeast != null ? '\\' : ' '; } }
/** * Fill a 2D char array with characters representing rooms * and connections between rooms */
Fill a 2D char array with characters representing rooms and connections between rooms
fillMap
{ "repo_name": "nosleeptillbroken/NS_MUD", "path": "jmud/command/Map.java", "license": "mit", "size": 17200 }
[ "java.util.ListIterator" ]
import java.util.ListIterator;
import java.util.*;
[ "java.util" ]
java.util;
2,158,624
public static <DataType> ArrayList<DataType> sampleMultiple( final double[] cumulativeWeights, final List<? extends DataType> domain, final Random random, final int numSamples ) { final ArrayList<DataType> result = new ArrayList<DataType>(numSamples); sampleMultipleInto(cumulativeWeights, domain, random, numSamples, result); return result; }
static <DataType> ArrayList<DataType> function( final double[] cumulativeWeights, final List<? extends DataType> domain, final Random random, final int numSamples ) { final ArrayList<DataType> result = new ArrayList<DataType>(numSamples); sampleMultipleInto(cumulativeWeights, domain, random, numSamples, result); return result; }
/** * Samples multiple elements from the domain proportionately to the * cumulative weights in the given weight array using a fast * binary search algorithm * @param <DataType> * Type of data to be sampled * @param cumulativeWeights * Cumulative weights to sample from * @param domain * Domain from which to sample * @param random * Random number generator * @param numSamples * Number of samples to draw from the distribution * @return * Samples draw proportionately from the cumulative weights */
Samples multiple elements from the domain proportionately to the cumulative weights in the given weight array using a fast binary search algorithm
sampleMultiple
{ "repo_name": "codeaudit/Foundry", "path": "Components/LearningCore/Source/gov/sandia/cognition/statistics/ProbabilityMassFunctionUtil.java", "license": "bsd-3-clause", "size": 12363 }
[ "java.util.ArrayList", "java.util.List", "java.util.Random" ]
import java.util.ArrayList; import java.util.List; import java.util.Random;
import java.util.*;
[ "java.util" ]
java.util;
1,022,171
private static boolean validateNoResourcesAttribute(RuleContext ruleContext) { if (ruleContext.attributes().isAttributeValueExplicitlySpecified("resources")) { ruleContext.attributeError("resources", String.format("resources cannot be set when any of %s are defined.", Joiner.on(", ").join(RESOURCES_ATTRIBUTES))); return false; } return true; }
static boolean function(RuleContext ruleContext) { if (ruleContext.attributes().isAttributeValueExplicitlySpecified(STR)) { ruleContext.attributeError(STR, String.format(STR, Joiner.on(STR).join(RESOURCES_ATTRIBUTES))); return false; } return true; }
/** * Validates that there are no resources defined if there are resource attribute defined. */
Validates that there are no resources defined if there are resource attribute defined
validateNoResourcesAttribute
{ "repo_name": "kamalmarhubi/bazel", "path": "src/main/java/com/google/devtools/build/lib/rules/android/LocalResourceContainer.java", "license": "apache-2.0", "size": 12510 }
[ "com.google.common.base.Joiner", "com.google.devtools.build.lib.analysis.RuleContext" ]
import com.google.common.base.Joiner; import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.common.base.*; import com.google.devtools.build.lib.analysis.*;
[ "com.google.common", "com.google.devtools" ]
com.google.common; com.google.devtools;
1,250,536
static BlockReader getLocalBlockReader(Configuration conf, String src, ExtendedBlock blk, Token<BlockTokenIdentifier> accessToken, DatanodeInfo chosenNode, int socketTimeout, long offsetIntoBlock) throws InvalidToken, IOException { try { return BlockReaderLocal.newBlockReader(conf, src, blk, accessToken, chosenNode, socketTimeout, offsetIntoBlock, blk.getNumBytes() - offsetIntoBlock); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } private static Map<String, Boolean> localAddrMap = Collections .synchronizedMap(new HashMap<String, Boolean>());
static BlockReader getLocalBlockReader(Configuration conf, String src, ExtendedBlock blk, Token<BlockTokenIdentifier> accessToken, DatanodeInfo chosenNode, int socketTimeout, long offsetIntoBlock) throws InvalidToken, IOException { try { return BlockReaderLocal.newBlockReader(conf, src, blk, accessToken, chosenNode, socketTimeout, offsetIntoBlock, blk.getNumBytes() - offsetIntoBlock); } catch (RemoteException re) { throw re.unwrapRemoteException(InvalidToken.class, AccessControlException.class); } } private static Map<String, Boolean> localAddrMap = Collections .synchronizedMap(new HashMap<String, Boolean>());
/** * Get {@link BlockReader} for short circuited local reads. */
Get <code>BlockReader</code> for short circuited local reads
getLocalBlockReader
{ "repo_name": "moreus/hadoop", "path": "hadoop-0.23.10/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSClient.java", "license": "apache-2.0", "size": 68830 }
[ "java.io.IOException", "java.util.Collections", "java.util.HashMap", "java.util.Map", "org.apache.hadoop.conf.Configuration", "org.apache.hadoop.hdfs.protocol.DatanodeInfo", "org.apache.hadoop.hdfs.protocol.ExtendedBlock", "org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier", "org.apache.hadoop.ipc.RemoteException", "org.apache.hadoop.security.AccessControlException", "org.apache.hadoop.security.token.SecretManager", "org.apache.hadoop.security.token.Token" ]
import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.Token;
import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.hdfs.protocol.*; import org.apache.hadoop.hdfs.security.token.block.*; import org.apache.hadoop.ipc.*; import org.apache.hadoop.security.*; import org.apache.hadoop.security.token.*;
[ "java.io", "java.util", "org.apache.hadoop" ]
java.io; java.util; org.apache.hadoop;
1,063,563
public V getValueFromCache(K key) { Cache<K, V> cache = getEntitlementCache(); if (cache != null) { if (cache.containsKey(key)) { if (log.isDebugEnabled()) { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); log.debug("Cache : " + Entitlement_CACHE_NAME + " is HIT " + "in tenant domain : " + tenantDomain); } return cache.get(key); } } if (log.isDebugEnabled()) { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); log.debug("Cache : " + Entitlement_CACHE_NAME + " is MISSED " + "in tenant domain : " + tenantDomain); } return null; }
V function(K key) { Cache<K, V> cache = getEntitlementCache(); if (cache != null) { if (cache.containsKey(key)) { if (log.isDebugEnabled()) { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); log.debug(STR + Entitlement_CACHE_NAME + STR + STR + tenantDomain); } return cache.get(key); } } if (log.isDebugEnabled()) { String tenantDomain = CarbonContext.getThreadLocalCarbonContext().getTenantDomain(); log.debug(STR + Entitlement_CACHE_NAME + STR + STR + tenantDomain); } return null; }
/** * Retrieves a cache entry. * * @param key CacheKey * @return Cached entry. */
Retrieves a cache entry
getValueFromCache
{ "repo_name": "nuwandi-is/identity-framework", "path": "components/entitlement/org.wso2.carbon.identity.entitlement/src/main/java/org/wso2/carbon/identity/entitlement/cache/EntitlementBaseCache.java", "license": "apache-2.0", "size": 8905 }
[ "javax.cache.Cache", "org.wso2.carbon.context.CarbonContext" ]
import javax.cache.Cache; import org.wso2.carbon.context.CarbonContext;
import javax.cache.*; import org.wso2.carbon.context.*;
[ "javax.cache", "org.wso2.carbon" ]
javax.cache; org.wso2.carbon;
1,469,013
public static VectorValueSelector makeVectorValueSelector( VectorColumnSelectorFactory columnSelectorFactory, @Nullable String fieldName, @Nullable String expression, Supplier<Expr> fieldExpression ) { if ((fieldName == null) == (expression == null)) { throw new IllegalArgumentException("Only one of fieldName or expression should be non-null"); } if (expression != null) { return ExpressionVectorSelectors.makeVectorValueSelector(columnSelectorFactory, fieldExpression.get()); } return columnSelectorFactory.makeValueSelector(fieldName); }
static VectorValueSelector function( VectorColumnSelectorFactory columnSelectorFactory, @Nullable String fieldName, @Nullable String expression, Supplier<Expr> fieldExpression ) { if ((fieldName == null) == (expression == null)) { throw new IllegalArgumentException(STR); } if (expression != null) { return ExpressionVectorSelectors.makeVectorValueSelector(columnSelectorFactory, fieldExpression.get()); } return columnSelectorFactory.makeValueSelector(fieldName); }
/** * Make a {@link VectorValueSelector} for primitive numeric or expression virtual column inputs. */
Make a <code>VectorValueSelector</code> for primitive numeric or expression virtual column inputs
makeVectorValueSelector
{ "repo_name": "monetate/druid", "path": "processing/src/main/java/org/apache/druid/query/aggregation/AggregatorUtil.java", "license": "apache-2.0", "size": 15666 }
[ "com.google.common.base.Supplier", "javax.annotation.Nullable", "org.apache.druid.math.expr.Expr", "org.apache.druid.segment.vector.VectorColumnSelectorFactory", "org.apache.druid.segment.vector.VectorValueSelector", "org.apache.druid.segment.virtual.ExpressionVectorSelectors" ]
import com.google.common.base.Supplier; import javax.annotation.Nullable; import org.apache.druid.math.expr.Expr; import org.apache.druid.segment.vector.VectorColumnSelectorFactory; import org.apache.druid.segment.vector.VectorValueSelector; import org.apache.druid.segment.virtual.ExpressionVectorSelectors;
import com.google.common.base.*; import javax.annotation.*; import org.apache.druid.math.expr.*; import org.apache.druid.segment.vector.*; import org.apache.druid.segment.virtual.*;
[ "com.google.common", "javax.annotation", "org.apache.druid" ]
com.google.common; javax.annotation; org.apache.druid;
2,702,902
public static Uri withAppendedId(Uri contentUri, long id) { return appendId(contentUri.buildUpon(), id).build(); }
static Uri function(Uri contentUri, long id) { return appendId(contentUri.buildUpon(), id).build(); }
/** * Appends the given ID to the end of the path. * * @param contentUri to start with * @param id to append * * @return a new URI with the given ID appended to the end of the path */
Appends the given ID to the end of the path
withAppendedId
{ "repo_name": "mateor/PDroidHistory", "path": "frameworks/base/core/java/android/content/ContentUris.java", "license": "gpl-3.0", "size": 2104 }
[ "android.net.Uri" ]
import android.net.Uri;
import android.net.*;
[ "android.net" ]
android.net;
1,289,913
public static byte[] calculateMD5Digest(@NotNull InputStream in) throws NoSuchAlgorithmException, IOException { MessageDigest md = MessageDigest.getInstance("MD5"); InputStream fis = new BufferedInputStream(in); byte[] dataBytes = new byte[1024]; int nread; while ((nread = fis.read(dataBytes)) != -1) md.update(dataBytes, 0, nread); return md.digest(); }
static byte[] function(@NotNull InputStream in) throws NoSuchAlgorithmException, IOException { MessageDigest md = MessageDigest.getInstance("MD5"); InputStream fis = new BufferedInputStream(in); byte[] dataBytes = new byte[1024]; int nread; while ((nread = fis.read(dataBytes)) != -1) md.update(dataBytes, 0, nread); return md.digest(); }
/** * Calculate MD5 digits. * * @param in Input stream. * @return Calculated MD5 digest for given input stream. * @throws NoSuchAlgorithmException If MD5 algorithm was not found. * @throws IOException If an I/O exception occurs. */
Calculate MD5 digits
calculateMD5Digest
{ "repo_name": "shurun19851206/ignite", "path": "modules/core/src/main/java/org/apache/ignite/internal/util/IgniteUtils.java", "license": "apache-2.0", "size": 289056 }
[ "java.io.BufferedInputStream", "java.io.IOException", "java.io.InputStream", "java.security.MessageDigest", "java.security.NoSuchAlgorithmException", "org.jetbrains.annotations.NotNull" ]
import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import org.jetbrains.annotations.NotNull;
import java.io.*; import java.security.*; import org.jetbrains.annotations.*;
[ "java.io", "java.security", "org.jetbrains.annotations" ]
java.io; java.security; org.jetbrains.annotations;
1,743,124
List<TEntityDescriptor> getAll();
List<TEntityDescriptor> getAll();
/** * Gets all wallet storable entity descriptors. * * @return A list of all storable entity descriptors. */
Gets all wallet storable entity descriptors
getAll
{ "repo_name": "NewEconomyMovement/NemCommunityClient", "path": "nem-client-api/src/main/java/org/nem/ncc/storable/entity/StorableEntityLocator.java", "license": "mit", "size": 701 }
[ "java.util.List" ]
import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
192,316
public void testRetrieveDAOFactoryFolder() { @NotNull final PackageUtils t_PackageUtils = PackageUtils.getInstance(); assertNotNull(t_PackageUtils); @Nullable final File t_TestFile = getTestFile(); assertNotNull(t_TestFile); @NotNull final File t_DAOFactoryFolder = t_PackageUtils.retrieveDAOFactoryFolder( t_TestFile, "com.foo.bar", "mysql", false); assertNotNull(t_DAOFactoryFolder); assertEquals( t_DAOFactoryFolder.getAbsolutePath(), t_TestFile.getAbsolutePath() + File.separator + "com" + File.separator + "foo" + File.separator + "bar" + File.separator + PackageUtils.BASE_DAO_SUBPACKAGE + File.separator + PackageUtils.RDB_SUBPACKAGE + File.separator + "mysql"); }
void function() { @NotNull final PackageUtils t_PackageUtils = PackageUtils.getInstance(); assertNotNull(t_PackageUtils); @Nullable final File t_TestFile = getTestFile(); assertNotNull(t_TestFile); @NotNull final File t_DAOFactoryFolder = t_PackageUtils.retrieveDAOFactoryFolder( t_TestFile, STR, "mysql", false); assertNotNull(t_DAOFactoryFolder); assertEquals( t_DAOFactoryFolder.getAbsolutePath(), t_TestFile.getAbsolutePath() + File.separator + "com" + File.separator + "foo" + File.separator + "bar" + File.separator + PackageUtils.BASE_DAO_SUBPACKAGE + File.separator + PackageUtils.RDB_SUBPACKAGE + File.separator + "mysql"); }
/** * Tests the retrieveDAOFactoryFolder() method * @see org.acmsl.queryj.tools.PackageUtils#retrieveDAOFactoryFolder(File,String,String,boolean) */
Tests the retrieveDAOFactoryFolder() method
testRetrieveDAOFactoryFolder
{ "repo_name": "rydnr/queryj-rt", "path": "queryj-core/src/test/java/org/acmsl/queryj/tools/PackageUtilsTest.java", "license": "gpl-2.0", "size": 18169 }
[ "java.io.File", "org.jetbrains.annotations.NotNull", "org.jetbrains.annotations.Nullable" ]
import java.io.File; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable;
import java.io.*; import org.jetbrains.annotations.*;
[ "java.io", "org.jetbrains.annotations" ]
java.io; org.jetbrains.annotations;
2,821,117
public Output<? extends TType> handle() { return handle; }
Output<? extends TType> function() { return handle; }
/** * Gets handle. * * @return handle. */
Gets handle
handle
{ "repo_name": "tensorflow/java", "path": "tensorflow-core/tensorflow-core-api/src/gen/java/org/tensorflow/op/core/DummyMemoryCache.java", "license": "apache-2.0", "size": 2752 }
[ "org.tensorflow.Output", "org.tensorflow.types.family.TType" ]
import org.tensorflow.Output; import org.tensorflow.types.family.TType;
import org.tensorflow.*; import org.tensorflow.types.family.*;
[ "org.tensorflow", "org.tensorflow.types" ]
org.tensorflow; org.tensorflow.types;
1,885,734
public double[] transformToVector() throws Exception { if (vectorLength > vladAggregator.getVectorLength() || vectorLength <= 0) { throw new Exception("Vector length should be between 1 and " + vladAggregator.getVectorLength()); } // the local features are extracted double[][] features; if (image == null) { // first the image is read if the image field is null try { // first try reading with the default class image = ImageIO.read(new File(imageFolder + imageFilename)); } catch (IllegalArgumentException e) { // this exception is probably thrown because of a greyscale jpeg image System.out.println("Exception: " + e.getMessage() + " | Image: " + imageFilename); // retry with the modified class image = ImageIOGreyScale.read(new File(imageFolder + imageFilename)); } } // next the image is scaled ImageScaling scale = new ImageScaling(maxImageSizeInPixels); try { image = scale.maxPixelsScaling(image); } catch (Exception e) { throw new Exception("Exception thrown when scaling the image!\n" + e.getMessage()); } // next the local features are extracted features = featureExtractor.extractFeatures(image); // next the features are aggregated double[] vladVector = vladAggregator.aggregate(features); if (vladVector.length == vectorLength) { // no projection is needed return vladVector; } else { // pca projection is applied double[] projected = pcaProjector.sampleToEigenSpace(vladVector); return projected; } }
double[] function() throws Exception { if (vectorLength > vladAggregator.getVectorLength() vectorLength <= 0) { throw new Exception(STR + vladAggregator.getVectorLength()); } double[][] features; if (image == null) { try { image = ImageIO.read(new File(imageFolder + imageFilename)); } catch (IllegalArgumentException e) { System.out.println(STR + e.getMessage() + STR + imageFilename); image = ImageIOGreyScale.read(new File(imageFolder + imageFilename)); } } ImageScaling scale = new ImageScaling(maxImageSizeInPixels); try { image = scale.maxPixelsScaling(image); } catch (Exception e) { throw new Exception(STR + e.getMessage()); } features = featureExtractor.extractFeatures(image); double[] vladVector = vladAggregator.aggregate(features); if (vladVector.length == vectorLength) { return vladVector; } else { double[] projected = pcaProjector.sampleToEigenSpace(vladVector); return projected; } }
/** * Transforms the image into a vector and returns the result. * * @return The image's vector. * @throws Exception */
Transforms the image into a vector and returns the result
transformToVector
{ "repo_name": "kandreadou/reveal-multimedia-indexing", "path": "src/main/java/gr/iti/mklab/visual/vectorization/ImageVectorization.java", "license": "apache-2.0", "size": 8972 }
[ "gr.iti.mklab.visual.extraction.ImageScaling", "gr.iti.mklab.visual.utilities.ImageIOGreyScale", "java.io.File", "javax.imageio.ImageIO" ]
import gr.iti.mklab.visual.extraction.ImageScaling; import gr.iti.mklab.visual.utilities.ImageIOGreyScale; import java.io.File; import javax.imageio.ImageIO;
import gr.iti.mklab.visual.extraction.*; import gr.iti.mklab.visual.utilities.*; import java.io.*; import javax.imageio.*;
[ "gr.iti.mklab", "java.io", "javax.imageio" ]
gr.iti.mklab; java.io; javax.imageio;
926,767
public static Date stringToDate(String dateString) { return stringToDate(dateString, DATE_YEAR_MONTH_DAY_STRING_FORMAT); }
static Date function(String dateString) { return stringToDate(dateString, DATE_YEAR_MONTH_DAY_STRING_FORMAT); }
/** * Standard format 'yyyy-MM-dd' * * @param dateString The string to parse * @return The date or null */
Standard format 'yyyy-MM-dd'
stringToDate
{ "repo_name": "olivierg13/GreenDao-vs-Realm", "path": "Greendao-app/src/main/java/com/db/oliviergoutay/greendao_vs_realm/utils/Utilities.java", "license": "apache-2.0", "size": 2290 }
[ "java.util.Date" ]
import java.util.Date;
import java.util.*;
[ "java.util" ]
java.util;
498,771
public static boolean deleteSnippet(Context context, String worldName, Category category, String articleName, String snippetName) { return FileRetriever.getSnippetFile(context, worldName, category, articleName, snippetName, false).delete(); }
static boolean function(Context context, String worldName, Category category, String articleName, String snippetName) { return FileRetriever.getSnippetFile(context, worldName, category, articleName, snippetName, false).delete(); }
/** * Deletes a Snippet belonging to the specified Article. * @param context The Context calling this method. * @param worldName The name of the World the Article belongs to. * @param category The Category of the Article. * @param articleName The name of the Article the Snippet belongs to. * @param snippetName The name of the Snippet to delete. * @return True if the Snippet was deleted successfully; false otherwise. */
Deletes a Snippet belonging to the specified Article
deleteSnippet
{ "repo_name": "MarquisLP/World-Scribe", "path": "app/src/main/java/com/averi/worldscribe/utilities/ExternalDeleter.java", "license": "mit", "size": 6906 }
[ "android.content.Context", "com.averi.worldscribe.Category" ]
import android.content.Context; import com.averi.worldscribe.Category;
import android.content.*; import com.averi.worldscribe.*;
[ "android.content", "com.averi.worldscribe" ]
android.content; com.averi.worldscribe;
2,597,035
@ServiceMethod(returns = ReturnType.SINGLE) public VirtualNetworkRuleInner createOrUpdate( String resourceGroupName, String serverName, String virtualNetworkRuleName, VirtualNetworkRuleInner parameters, Context context) { return createOrUpdateAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters, context).block(); }
@ServiceMethod(returns = ReturnType.SINGLE) VirtualNetworkRuleInner function( String resourceGroupName, String serverName, String virtualNetworkRuleName, VirtualNetworkRuleInner parameters, Context context) { return createOrUpdateAsync(resourceGroupName, serverName, virtualNetworkRuleName, parameters, context).block(); }
/** * Creates or updates an existing virtual network rule. * * @param resourceGroupName The name of the resource group. The name is case insensitive. * @param serverName The name of the server. * @param virtualNetworkRuleName The name of the virtual network rule. * @param parameters The requested virtual Network Rule Resource state. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a virtual network rule. */
Creates or updates an existing virtual network rule
createOrUpdate
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/mysql/azure-resourcemanager-mysql/src/main/java/com/azure/resourcemanager/mysql/implementation/VirtualNetworkRulesClientImpl.java", "license": "mit", "size": 57505 }
[ "com.azure.core.annotation.ReturnType", "com.azure.core.annotation.ServiceMethod", "com.azure.core.util.Context", "com.azure.resourcemanager.mysql.fluent.models.VirtualNetworkRuleInner" ]
import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.util.Context; import com.azure.resourcemanager.mysql.fluent.models.VirtualNetworkRuleInner;
import com.azure.core.annotation.*; import com.azure.core.util.*; import com.azure.resourcemanager.mysql.fluent.models.*;
[ "com.azure.core", "com.azure.resourcemanager" ]
com.azure.core; com.azure.resourcemanager;
1,809,784
public static StringServer create(Observable<String> source, int port) { return new StringServer(source, port); } private StringServer(Observable<String> source, int port) { try { this.ss = new ServerSocket(port); subscriptions.add(Subscriptions.create(() -> closeServerSocket())); } catch (IOException e) { throw new RuntimeException(e); } this.source = source; }
static StringServer function(Observable<String> source, int port) { return new StringServer(source, port); } private StringServer(Observable<String> source, int port) { try { this.ss = new ServerSocket(port); subscriptions.add(Subscriptions.create(() -> closeServerSocket())); } catch (IOException e) { throw new RuntimeException(e); } this.source = source; }
/** * Factory method. * * @param source * source to publish on server socket * @param port * to assign the server socket to */
Factory method
create
{ "repo_name": "amsa-code/risky", "path": "streams/src/main/java/au/gov/amsa/streams/StringServer.java", "license": "apache-2.0", "size": 5729 }
[ "java.io.IOException", "java.net.ServerSocket" ]
import java.io.IOException; import java.net.ServerSocket;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
564,634
EAttribute getMemberPropertyType_Arcrole();
EAttribute getMemberPropertyType_Arcrole();
/** * Returns the meta object for the attribute '{@link net.opengis.wfs20.MemberPropertyType#getArcrole <em>Arcrole</em>}'. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @return the meta object for the attribute '<em>Arcrole</em>'. * @see net.opengis.wfs20.MemberPropertyType#getArcrole() * @see #getMemberPropertyType() * @generated */
Returns the meta object for the attribute '<code>net.opengis.wfs20.MemberPropertyType#getArcrole Arcrole</code>'.
getMemberPropertyType_Arcrole
{ "repo_name": "geotools/geotools", "path": "modules/ogc/net.opengis.wfs/src/net/opengis/wfs20/Wfs20Package.java", "license": "lgpl-2.1", "size": 404067 }
[ "org.eclipse.emf.ecore.EAttribute" ]
import org.eclipse.emf.ecore.EAttribute;
import org.eclipse.emf.ecore.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
1,181,543
EList<AllocationContext> getAllocationContexts();
EList<AllocationContext> getAllocationContexts();
/** * Returns the value of the '<em><b>Allocation Contexts</b></em>' reference list. * The list contents are of type {@link org.palladiosimulator.pcm.allocation.AllocationContext}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Allocation Contexts</em>' reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Allocation Contexts</em>' reference list. * @see pcmarchoptions.PcmarchoptionsPackage#getPCM_MultipleAllocation_AllocationContexts() * @model lower="2" * @generated */
Returns the value of the 'Allocation Contexts' reference list. The list contents are of type <code>org.palladiosimulator.pcm.allocation.AllocationContext</code>. If the meaning of the 'Allocation Contexts' reference list isn't clear, there really should be more of a description here...
getAllocationContexts
{ "repo_name": "KAMP-Research/KAMP", "path": "bundles/Toometa/toometa.pcmarchoptions/src/pcmarchoptions/PCM_MultipleAllocation.java", "license": "apache-2.0", "size": 1801 }
[ "org.eclipse.emf.common.util.EList", "org.palladiosimulator.pcm.allocation.AllocationContext" ]
import org.eclipse.emf.common.util.EList; import org.palladiosimulator.pcm.allocation.AllocationContext;
import org.eclipse.emf.common.util.*; import org.palladiosimulator.pcm.allocation.*;
[ "org.eclipse.emf", "org.palladiosimulator.pcm" ]
org.eclipse.emf; org.palladiosimulator.pcm;
2,188,272
public static String escapeAssignmentFeedback(String value) { if (value == null || value.length() == 0) return value; value = fixAssignmentFeedback(value); StringBuilder buf = new StringBuilder(value); int pos = -1; while ((pos = buf.indexOf("{{")) != -1) { buf.replace(pos, pos + "{{".length(), "<span class='highlight'>"); } while ((pos = buf.indexOf("}}")) != -1) { buf.replace(pos, pos + "}}".length(), "</span>"); } return FormattedText.escapeHtmlFormattedText(buf.toString()); }
static String function(String value) { if (value == null value.length() == 0) return value; value = fixAssignmentFeedback(value); StringBuilder buf = new StringBuilder(value); int pos = -1; while ((pos = buf.indexOf("{{")) != -1) { buf.replace(pos, pos + "{{".length(), STR); } while ((pos = buf.indexOf("}}")) != -1) { buf.replace(pos, pos + "}}".length(), STR); } return FormattedText.escapeHtmlFormattedText(buf.toString()); }
/** * Called to deal with old Chef-style assignment feedback annotation, {{like this}}. * * @param value * A formatted text string that may contain {{}} style markup * @return HTML ready to for display on a browser */
Called to deal with old Chef-style assignment feedback annotation, {{like this}}
escapeAssignmentFeedback
{ "repo_name": "tl-its-umich-edu/sakai", "path": "assignment/assignment-tool/tool/src/java/org/sakaiproject/assignment/tool/AssignmentAction.java", "license": "apache-2.0", "size": 671846 }
[ "org.sakaiproject.util.FormattedText" ]
import org.sakaiproject.util.FormattedText;
import org.sakaiproject.util.*;
[ "org.sakaiproject.util" ]
org.sakaiproject.util;
909,129
public UploadCollection getSelectedCollection() { JList collectionList = mainFrame.getCollectionList(); String current = (String) collectionList.getSelectedValue(); if (collection.contains(current)) { return collection.getCollection(current); } return null; }
UploadCollection function() { JList collectionList = mainFrame.getCollectionList(); String current = (String) collectionList.getSelectedValue(); if (collection.contains(current)) { return collection.getCollection(current); } return null; }
/** * Returns the currently selected UploadCollection * * @return selected collection */
Returns the currently selected UploadCollection
getSelectedCollection
{ "repo_name": "thetmk/UploadManager", "path": "src/main/java/com/tmk/uploadmanager/control/MainController.java", "license": "mit", "size": 16441 }
[ "com.tmk.uploadmanager.model.UploadCollection", "javax.swing.JList" ]
import com.tmk.uploadmanager.model.UploadCollection; import javax.swing.JList;
import com.tmk.uploadmanager.model.*; import javax.swing.*;
[ "com.tmk.uploadmanager", "javax.swing" ]
com.tmk.uploadmanager; javax.swing;
460,708
public String getAuthorFont(int what) { Element el = settingsFile.getRootElement().getChild(SETTING_AUTHORFONT); String retval = ""; if (el != null) { switch (what) { case FONTNAME: retval = el.getText(); break; case FONTSIZE: retval = el.getAttributeValue("size"); break; case FONTCOLOR: retval = el.getAttributeValue("color"); break; case FONTSTYLE: retval = el.getAttributeValue("style"); break; case FONTWEIGHT: retval = el.getAttributeValue("weight"); break; } } return retval; }
String function(int what) { Element el = settingsFile.getRootElement().getChild(SETTING_AUTHORFONT); String retval = STRsizeSTRcolorSTRstyleSTRweight"); break; } } return retval; }
/** * Retrieves settings for the mainfont (the font used for the main-entry-textfield). * * @param what (indicates, which font-characteristic we want to have. use following * constants:<br> * - FONTNAME<br> * - FONTSIZE<br> * - FONTCOLOR<br> * - FONTSTYLE<br> * - FONTWEIGHT<br> * @return the related font-information as string. */
Retrieves settings for the mainfont (the font used for the main-entry-textfield)
getAuthorFont
{ "repo_name": "sjPlot/Zettelkasten", "path": "src/main/java/de/danielluedecke/zettelkasten/database/Settings.java", "license": "gpl-3.0", "size": 218287 }
[ "org.jdom2.Element" ]
import org.jdom2.Element;
import org.jdom2.*;
[ "org.jdom2" ]
org.jdom2;
1,759,805
public VpnSiteInner withProvisioningState(ProvisioningState provisioningState) { this.provisioningState = provisioningState; return this; }
VpnSiteInner function(ProvisioningState provisioningState) { this.provisioningState = provisioningState; return this; }
/** * Set the provisioning state of the resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'. * * @param provisioningState the provisioningState value to set * @return the VpnSiteInner object itself. */
Set the provisioning state of the resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', 'Failed'
withProvisioningState
{ "repo_name": "navalev/azure-sdk-for-java", "path": "sdk/network/mgmt-v2019_06_01/src/main/java/com/microsoft/azure/management/network/v2019_06_01/implementation/VpnSiteInner.java", "license": "mit", "size": 8006 }
[ "com.microsoft.azure.management.network.v2019_06_01.ProvisioningState" ]
import com.microsoft.azure.management.network.v2019_06_01.ProvisioningState;
import com.microsoft.azure.management.network.v2019_06_01.*;
[ "com.microsoft.azure" ]
com.microsoft.azure;
2,239,335
CompletableFuture<WorkQueueStats> stats();
CompletableFuture<WorkQueueStats> stats();
/** * Returns work queue statistics. * @return future that is completed with work queue stats when the operation completes */
Returns work queue statistics
stats
{ "repo_name": "VinodKumarS-Huawei/ietf96yang", "path": "core/api/src/main/java/org/onosproject/store/service/WorkQueue.java", "license": "apache-2.0", "size": 5407 }
[ "java.util.concurrent.CompletableFuture" ]
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.*;
[ "java.util" ]
java.util;
2,348,337
protected final void addViewInternal(View child, int index, ViewGroup.LayoutParams params) { super.addView(child, index, params); }
final void function(View child, int index, ViewGroup.LayoutParams params) { super.addView(child, index, params); }
/** * Used internally for adding view. Need because we override addView to * pass-through to the Refreshable View */
Used internally for adding view. Need because we override addView to pass-through to the Refreshable View
addViewInternal
{ "repo_name": "WeiDianzhao1989/AndroidAppLib", "path": "pulltorefresh/src/main/java/com/weidian/pulltorefresh/PullToRefreshBase.java", "license": "apache-2.0", "size": 48112 }
[ "android.view.View", "android.view.ViewGroup" ]
import android.view.View; import android.view.ViewGroup;
import android.view.*;
[ "android.view" ]
android.view;
648,422
String name = driver.findElement(By.xpath(uiElementMapper.getElement("iot.user.registered.name"))).getText(); return name.contains(uiElementMapper.getElement("iot.user.login.username")); }
String name = driver.findElement(By.xpath(uiElementMapper.getElement(STR))).getText(); return name.contains(uiElementMapper.getElement(STR)); }
/** * Method to check the current User name * @return : True if the user name matches the logged in user. False otherwise. */
Method to check the current User name
checkUserName
{ "repo_name": "madhawap/product-iots", "path": "modules/integration/tests-common/web-ui-pages/src/main/java/org/wso2/iot/integration/ui/pages/home/IOTHomePage.java", "license": "apache-2.0", "size": 3713 }
[ "org.openqa.selenium.By" ]
import org.openqa.selenium.By;
import org.openqa.selenium.*;
[ "org.openqa.selenium" ]
org.openqa.selenium;
409,285
public static boolean any(Object self, Closure closure) { for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) { if (DefaultTypeTransformation.castToBoolean(closure.call(iter.next()))) { return true; } } return false; } /** * Iterates over the entries of a map, and checks whether a predicate is * valid for at least one entry. If the * closure takes one parameter then it will be passed the Map.Entry * otherwise if the closure takes two parameters then it will be * passed the key and the value. * <pre class="groovyTestCase"> * assert [2:3, 4:5, 5:10].any { key, value -> key * 2 == value } * assert ![2:3, 4:5, 5:10].any { entry -> entry.key == entry.value * 2 }
static boolean function(Object self, Closure closure) { for (Iterator iter = InvokerHelper.asIterator(self); iter.hasNext();) { if (DefaultTypeTransformation.castToBoolean(closure.call(iter.next()))) { return true; } } return false; } /** * Iterates over the entries of a map, and checks whether a predicate is * valid for at least one entry. If the * closure takes one parameter then it will be passed the Map.Entry * otherwise if the closure takes two parameters then it will be * passed the key and the value. * <pre class=STR> * assert [2:3, 4:5, 5:10].any { key, value -> key * 2 == value } * assert ![2:3, 4:5, 5:10].any { entry -> entry.key == entry.value * 2 }
/** * Iterates over the contents of an object or collection, and checks whether a * predicate is valid for at least one element. * * @param self the object over which we iterate * @param closure the closure predicate used for matching * @return true if any iteration for the object matches the closure predicate * @since 1.0 */
Iterates over the contents of an object or collection, and checks whether a predicate is valid for at least one element
any
{ "repo_name": "xien777/yajsw", "path": "yajsw/wrapper/src/main/java/org/codehaus/groovy/runtime/DefaultGroovyMethods.java", "license": "lgpl-2.1", "size": 704150 }
[ "groovy.lang.Closure", "java.util.Iterator", "java.util.Map", "org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation" ]
import groovy.lang.Closure; import java.util.Iterator; import java.util.Map; import org.codehaus.groovy.runtime.typehandling.DefaultTypeTransformation;
import groovy.lang.*; import java.util.*; import org.codehaus.groovy.runtime.typehandling.*;
[ "groovy.lang", "java.util", "org.codehaus.groovy" ]
groovy.lang; java.util; org.codehaus.groovy;
2,415,685
public boolean waitForDialogToClose(long timeout) { if(config.commandLogging){ Log.d(config.commandLoggingTag, "waitForDialogToClose("+timeout+")"); } return dialogUtils.waitForDialogToClose(timeout); }
boolean function(long timeout) { if(config.commandLogging){ Log.d(config.commandLoggingTag, STR+timeout+")"); } return dialogUtils.waitForDialogToClose(timeout); }
/** * Waits for a Dialog to close. * * @param timeout the amount of time in milliseconds to wait * @return {@code true} if the {@link android.app.Dialog} is closed before the timeout and {@code false} if it is not closed */
Waits for a Dialog to close
waitForDialogToClose
{ "repo_name": "darker50/robotium", "path": "robotium-solo/src/main/java/com/robotium/solo/Solo.java", "license": "apache-2.0", "size": 124742 }
[ "android.util.Log" ]
import android.util.Log;
import android.util.*;
[ "android.util" ]
android.util;
1,972,443
public Preference findPreference(CharSequence key) { if (mPreferenceManager == null) { return null; } return mPreferenceManager.findPreference(key); }
Preference function(CharSequence key) { if (mPreferenceManager == null) { return null; } return mPreferenceManager.findPreference(key); }
/** * Finds a {@link Preference} based on its key. * * @param key The key of the preference to retrieve. * @return The {@link Preference} with the key, or null. * @see PreferenceGroup#findPreference(CharSequence) */
Finds a <code>Preference</code> based on its key
findPreference
{ "repo_name": "kabili207/Droid-Achievement", "path": "Achievement/src/main/java/com/zyrenth/achievement/PreferenceListFragment.java", "license": "gpl-3.0", "size": 8540 }
[ "android.preference.Preference" ]
import android.preference.Preference;
import android.preference.*;
[ "android.preference" ]
android.preference;
2,602,638
public void add(Socket socket, int timeout, boolean resume, boolean wakeup) { if (timeout < 0) { timeout = keepAliveTimeout; } if (timeout < 0) { timeout = soTimeout; } if (timeout <= 0) { // Always put a timeout in timeout = Integer.MAX_VALUE; } boolean ok = false; synchronized (this) { // Add socket to the list. Newly added sockets will wait // at most for pollTime before being polled if (addList.add(socket, timeout, (resume ? SocketInfo.RESUME : 0) | (wakeup ? SocketInfo.WAKEUP : 0))) { ok = true; this.notify(); } } if (!ok) { // Can't do anything: close the socket right away if (!processSocket(socket, SocketStatus.ERROR)) { try { socket.close(); } catch (IOException e) { } } } }
void function(Socket socket, int timeout, boolean resume, boolean wakeup) { if (timeout < 0) { timeout = keepAliveTimeout; } if (timeout < 0) { timeout = soTimeout; } if (timeout <= 0) { timeout = Integer.MAX_VALUE; } boolean ok = false; synchronized (this) { if (addList.add(socket, timeout, (resume ? SocketInfo.RESUME : 0) (wakeup ? SocketInfo.WAKEUP : 0))) { ok = true; this.notify(); } } if (!ok) { if (!processSocket(socket, SocketStatus.ERROR)) { try { socket.close(); } catch (IOException e) { } } } }
/** * Add specified socket and associated pool to the poller. The socket will * be added to a temporary array, and polled first after a maximum amount * of time equal to pollTime (in most cases, latency will be much lower, * however). * * @param socket to add to the poller */
Add specified socket and associated pool to the poller. The socket will be added to a temporary array, and polled first after a maximum amount of time equal to pollTime (in most cases, latency will be much lower, however)
add
{ "repo_name": "whitingjr/JbossWeb_7_2_0", "path": "src/main/java/org/apache/tomcat/util/net/JIoEndpoint.java", "license": "apache-2.0", "size": 39734 }
[ "java.io.IOException", "java.net.Socket" ]
import java.io.IOException; import java.net.Socket;
import java.io.*; import java.net.*;
[ "java.io", "java.net" ]
java.io; java.net;
2,352,788
public BufferedImage getFilteredImage(BufferedImage sourceImage) { if(!isEnabled()) { return sourceImage; } if(sourceImage!=source) { initialize(sourceImage); } if(sourceImage!=input) { gIn.drawImage(source, 0, 0, null); } subtractBaseline(); return output; }
BufferedImage function(BufferedImage sourceImage) { if(!isEnabled()) { return sourceImage; } if(sourceImage!=source) { initialize(sourceImage); } if(sourceImage!=input) { gIn.drawImage(source, 0, 0, null); } subtractBaseline(); return output; }
/** * Applies the filter to a source image and returns the result. * * @param sourceImage the source image * @return the filtered image */
Applies the filter to a source image and returns the result
getFilteredImage
{ "repo_name": "dobrown/tracker-mvn", "path": "src/main/java/org/opensourcephysics/media/core/BaselineFilter.java", "license": "gpl-3.0", "size": 12573 }
[ "java.awt.image.BufferedImage" ]
import java.awt.image.BufferedImage;
import java.awt.image.*;
[ "java.awt" ]
java.awt;
1,254,696
FetchInternationalGroupResponse fetchInternationalGroups(AuthenticationToken token, FetchInternationalGroupRequest request);
FetchInternationalGroupResponse fetchInternationalGroups(AuthenticationToken token, FetchInternationalGroupRequest request);
/** * <p>Working with International Groups involve working with more details * than normally allowed. Additionally, it must also be possible to work * with both Active and Suspended International Groups. This request will * facilitate precisely this, reading the additional details needed for all * existing International Groups.</p> * * @param token User Authentication Request object * @param request Fetch International Group Request Object * @return Fetch Response Object, with standard error information */
Working with International Groups involve working with more details than normally allowed. Additionally, it must also be possible to work with both Active and Suspended International Groups. This request will facilitate precisely this, reading the additional details needed for all existing International Groups
fetchInternationalGroups
{ "repo_name": "IWSDevelopers/iws", "path": "iws-api/src/main/java/net/iaeste/iws/api/Committees.java", "license": "apache-2.0", "size": 11669 }
[ "net.iaeste.iws.api.dtos.AuthenticationToken", "net.iaeste.iws.api.requests.FetchInternationalGroupRequest", "net.iaeste.iws.api.responses.FetchInternationalGroupResponse" ]
import net.iaeste.iws.api.dtos.AuthenticationToken; import net.iaeste.iws.api.requests.FetchInternationalGroupRequest; import net.iaeste.iws.api.responses.FetchInternationalGroupResponse;
import net.iaeste.iws.api.dtos.*; import net.iaeste.iws.api.requests.*; import net.iaeste.iws.api.responses.*;
[ "net.iaeste.iws" ]
net.iaeste.iws;
1,281,033
public Color getTeamColor() { return teamColor; }
Color function() { return teamColor; }
/** * Get the color of this team * @return */
Get the color of this team
getTeamColor
{ "repo_name": "thepropterhoc/Artificial_Intelligence_Project1", "path": "src/spacesettlers/objects/Ship.java", "license": "bsd-3-clause", "size": 6110 }
[ "java.awt.Color" ]
import java.awt.Color;
import java.awt.*;
[ "java.awt" ]
java.awt;
1,302,932
public Future<String> putBlob(BlobProperties blobProperties, byte[] usermetadata, ReadableStreamChannel channel);
Future<String> function(BlobProperties blobProperties, byte[] usermetadata, ReadableStreamChannel channel);
/** * Requests for a new blob to be put asynchronously and returns a future that will eventually contain the BlobId of * the new blob on a successful response. * @param blobProperties The properties of the blob. Note that the size specified in the properties is ignored. The * channel is consumed fully, and the size of the blob is the number of bytes read from it. * @param usermetadata Optional user metadata about the blob. This can be null. * @param channel The {@link ReadableStreamChannel} that contains the content of the blob. * @return A future that would contain the BlobId eventually. */
Requests for a new blob to be put asynchronously and returns a future that will eventually contain the BlobId of the new blob on a successful response
putBlob
{ "repo_name": "nsivabalan/ambry", "path": "ambry-api/src/main/java/com.github.ambry/router/Router.java", "license": "apache-2.0", "size": 5290 }
[ "com.github.ambry.messageformat.BlobProperties", "java.util.concurrent.Future" ]
import com.github.ambry.messageformat.BlobProperties; import java.util.concurrent.Future;
import com.github.ambry.messageformat.*; import java.util.concurrent.*;
[ "com.github.ambry", "java.util" ]
com.github.ambry; java.util;
2,675,918
public static Marketplace createMarketplaceWithRestrictedAccessAndAccessibleOrganizations( Organization owner, String marketplaceId, DataService ds, List<Organization> accessibleOrganizations) throws NonUniqueBusinessKeyException { Assert.assertNotNull("Marketplace owner not defined", owner); Assert.assertNotNull("Marketplace id not defined", marketplaceId); Assert.assertTrue(marketplaceId.trim().length() > 0); Marketplace mp = new Marketplace(); mp.setMarketplaceId(marketplaceId.trim()); mp.setOrganization(owner); mp.setOpen(true); setDefaultLandingpage(mp); createRevenueModels(mp, ds); mp.setRestricted(true); ds.persist(mp); for (Organization org : accessibleOrganizations) { MarketplaceAccess access = new MarketplaceAccess(); access.setMarketplace(mp); access.setOrganization(org); ds.persist(access); } return mp; }
static Marketplace function( Organization owner, String marketplaceId, DataService ds, List<Organization> accessibleOrganizations) throws NonUniqueBusinessKeyException { Assert.assertNotNull(STR, owner); Assert.assertNotNull(STR, marketplaceId); Assert.assertTrue(marketplaceId.trim().length() > 0); Marketplace mp = new Marketplace(); mp.setMarketplaceId(marketplaceId.trim()); mp.setOrganization(owner); mp.setOpen(true); setDefaultLandingpage(mp); createRevenueModels(mp, ds); mp.setRestricted(true); ds.persist(mp); for (Organization org : accessibleOrganizations) { MarketplaceAccess access = new MarketplaceAccess(); access.setMarketplace(mp); access.setOrganization(org); ds.persist(access); } return mp; }
/** * Creates a restricted marketplace with accessible organization. * * @param owner * the owner of the marketplace * @param marketplaceId * optional id (default is '<i>&lt;oId&gt;_GLOBAL</i>') * @param ds * a data service * @return the created marketplace * @throws NonUniqueBusinessKeyException */
Creates a restricted marketplace with accessible organization
createMarketplaceWithRestrictedAccessAndAccessibleOrganizations
{ "repo_name": "opetrovski/development", "path": "oscm-unittests-base/javasrc/org/oscm/test/data/Marketplaces.java", "license": "apache-2.0", "size": 15535 }
[ "java.util.List", "org.junit.Assert", "org.oscm.dataservice.local.DataService", "org.oscm.domobjects.Marketplace", "org.oscm.domobjects.MarketplaceAccess", "org.oscm.domobjects.Organization", "org.oscm.internal.types.exception.NonUniqueBusinessKeyException" ]
import java.util.List; import org.junit.Assert; import org.oscm.dataservice.local.DataService; import org.oscm.domobjects.Marketplace; import org.oscm.domobjects.MarketplaceAccess; import org.oscm.domobjects.Organization; import org.oscm.internal.types.exception.NonUniqueBusinessKeyException;
import java.util.*; import org.junit.*; import org.oscm.dataservice.local.*; import org.oscm.domobjects.*; import org.oscm.internal.types.exception.*;
[ "java.util", "org.junit", "org.oscm.dataservice", "org.oscm.domobjects", "org.oscm.internal" ]
java.util; org.junit; org.oscm.dataservice; org.oscm.domobjects; org.oscm.internal;
528,089
private MeasurementListener getMeasurementListener() throws RemoteException { class MeasurementSaveListener extends UnicastRemoteObject implements MeasurementListener { private static final long serialVersionUID = 6658831597399735839L; MeasurementSaveListener() throws RemoteException { super(); }
MeasurementListener function() throws RemoteException { class MeasurementSaveListener extends UnicastRemoteObject implements MeasurementListener { private static final long serialVersionUID = 6658831597399735839L; MeasurementSaveListener() throws RemoteException { super(); }
/** * Returns the listener which gets added to the observed measurement for the saver to know when * it starts and stops. * @return Listener to be added to the observed measurement. * @throws RemoteException */
Returns the listener which gets added to the observed measurement for the saver to know when it starts and stops
getMeasurementListener
{ "repo_name": "langmo/youscope", "path": "core/server/src/main/java/org/youscope/server/MeasurementSaverImpl.java", "license": "gpl-2.0", "size": 21102 }
[ "java.rmi.RemoteException", "java.rmi.server.UnicastRemoteObject", "org.youscope.common.measurement.MeasurementListener" ]
import java.rmi.RemoteException; import java.rmi.server.UnicastRemoteObject; import org.youscope.common.measurement.MeasurementListener;
import java.rmi.*; import java.rmi.server.*; import org.youscope.common.measurement.*;
[ "java.rmi", "org.youscope.common" ]
java.rmi; org.youscope.common;
1,748,277
public static ImmutableList<String> getAllResponseTypes(final ImmutableList<Ds3ResponseCode> responseCodes) { final ImmutableList.Builder<String> builder = ImmutableList.builder(); if (isEmpty(responseCodes)) { //No response codes is logged as an error instead of throwing an error //because some test may not contain response codes LOG.error("There are no Response Codes associated with this request"); return ImmutableList.of(); } for (final Ds3ResponseCode responseCode : responseCodes) { if (isNonErrorCode(responseCode.getCode())) { builder.add(getResponseType(responseCode.getDs3ResponseTypes())); } } return builder.build(); }
static ImmutableList<String> function(final ImmutableList<Ds3ResponseCode> responseCodes) { final ImmutableList.Builder<String> builder = ImmutableList.builder(); if (isEmpty(responseCodes)) { LOG.error(STR); return ImmutableList.of(); } for (final Ds3ResponseCode responseCode : responseCodes) { if (isNonErrorCode(responseCode.getCode())) { builder.add(getResponseType(responseCode.getDs3ResponseTypes())); } } return builder.build(); }
/** * Gets the list of Response Types from a list of Ds3ResponseCodes */
Gets the list of Response Types from a list of Ds3ResponseCodes
getAllResponseTypes
{ "repo_name": "rpmoore/ds3_autogen", "path": "ds3-autogen-utils/src/main/java/com/spectralogic/ds3autogen/utils/ResponsePayloadUtil.java", "license": "apache-2.0", "size": 4898 }
[ "com.google.common.collect.ImmutableList", "com.spectralogic.ds3autogen.api.models.Ds3ResponseCode" ]
import com.google.common.collect.ImmutableList; import com.spectralogic.ds3autogen.api.models.Ds3ResponseCode;
import com.google.common.collect.*; import com.spectralogic.ds3autogen.api.models.*;
[ "com.google.common", "com.spectralogic.ds3autogen" ]
com.google.common; com.spectralogic.ds3autogen;
666,673
public ChromosomeFactory<T> getTestChromosomeFactory() { return testChromosomeFactory; } @SuppressWarnings("unchecked") protected AbstractTestSuiteChromosome(AbstractTestSuiteChromosome<T> source) { this(source.testChromosomeFactory); for (T test : source.tests) { addTest((T) test.clone()); } //this.setFitness(source.getFitness()); this.setFitnessValues(source.getFitnessValues()); this.setPreviousFitnessValues(source.getPreviousFitnessValues()); this.setChanged(source.isChanged()); this.setCoverageValues(source.getCoverageValues()); this.setNumsOfCoveredGoals(source.getNumsOfCoveredGoals()); this.setNumsOfNotCoveredGoals(source.getNumsNotCoveredGoals()); }
ChromosomeFactory<T> function() { return testChromosomeFactory; } @SuppressWarnings(STR) protected AbstractTestSuiteChromosome(AbstractTestSuiteChromosome<T> source) { this(source.testChromosomeFactory); for (T test : source.tests) { addTest((T) test.clone()); } this.setFitnessValues(source.getFitnessValues()); this.setPreviousFitnessValues(source.getPreviousFitnessValues()); this.setChanged(source.isChanged()); this.setCoverageValues(source.getCoverageValues()); this.setNumsOfCoveredGoals(source.getNumsOfCoveredGoals()); this.setNumsOfNotCoveredGoals(source.getNumsNotCoveredGoals()); }
/** * <p>Getter for the field <code>testChromosomeFactory</code>.</p> * * @return a {@link org.evosuite.ga.ChromosomeFactory} object. */
Getter for the field <code>testChromosomeFactory</code>
getTestChromosomeFactory
{ "repo_name": "sefaakca/EvoSuite-Sefa", "path": "client/src/main/java/org/evosuite/testsuite/AbstractTestSuiteChromosome.java", "license": "lgpl-3.0", "size": 7380 }
[ "org.evosuite.ga.ChromosomeFactory" ]
import org.evosuite.ga.ChromosomeFactory;
import org.evosuite.ga.*;
[ "org.evosuite.ga" ]
org.evosuite.ga;
1,436,656
public long writeValue(OutputStream out) throws IOException { DataInputStream dis = getValueStream(); long size = 0; try { int chunkSize; while ((chunkSize = valueBufferInputStream.getRemain()) > 0) { chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE); valTransferBuffer.setSize(chunkSize); dis.readFully(valTransferBuffer.get(), 0, chunkSize); out.write(valTransferBuffer.get(), 0, chunkSize); size += chunkSize; } return size; } finally { dis.close(); } }
long function(OutputStream out) throws IOException { DataInputStream dis = getValueStream(); long size = 0; try { int chunkSize; while ((chunkSize = valueBufferInputStream.getRemain()) > 0) { chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE); valTransferBuffer.setSize(chunkSize); dis.readFully(valTransferBuffer.get(), 0, chunkSize); out.write(valTransferBuffer.get(), 0, chunkSize); size += chunkSize; } return size; } finally { dis.close(); } }
/** * Writing the value to the output stream. This method avoids copying * value data from Scanner into user buffer, then writing to the output * stream. It does not require the value length to be known. * * @param out * The output stream * @return the length of the value * @throws IOException */
Writing the value to the output stream. This method avoids copying value data from Scanner into user buffer, then writing to the output stream. It does not require the value length to be known
writeValue
{ "repo_name": "hanhlh/hadoop-0.20.2_FatBTree", "path": "src/core/org/apache/hadoop/io/file/tfile/TFile.java", "license": "apache-2.0", "size": 78806 }
[ "java.io.DataInputStream", "java.io.IOException", "java.io.OutputStream" ]
import java.io.DataInputStream; import java.io.IOException; import java.io.OutputStream;
import java.io.*;
[ "java.io" ]
java.io;
689,943
public static List<String> findClassesThatExtend(String[] searchPathsOrJars, final Class<?>[] classNames, final boolean innerClasses, String contains, String notContains, boolean annotations) throws IOException { if (log.isDebugEnabled()) { log.debug("searchPathsOrJars : " + Arrays.toString(searchPathsOrJars)); log.debug("superclass : " + Arrays.toString(classNames)); log.debug("innerClasses : " + innerClasses + " annotations: " + annotations); log.debug("contains: " + contains + " notContains: " + notContains); } ClassFilter filter = null; if(annotations) { @SuppressWarnings("unchecked") // Should only be called with classes that extend annotations final Class<? extends Annotation>[] annoclassNames = (Class<? extends Annotation>[]) classNames; filter = new AnnoClassFilter(annoclassNames, innerClasses); } else { filter = new ExtendsClassFilter(classNames, innerClasses, contains, notContains); } return findClasses(searchPathsOrJars, filter); }
static List<String> function(String[] searchPathsOrJars, final Class<?>[] classNames, final boolean innerClasses, String contains, String notContains, boolean annotations) throws IOException { if (log.isDebugEnabled()) { log.debug(STR + Arrays.toString(searchPathsOrJars)); log.debug(STR + Arrays.toString(classNames)); log.debug(STR + innerClasses + STR + annotations); log.debug(STR + contains + STR + notContains); } ClassFilter filter = null; if(annotations) { @SuppressWarnings(STR) final Class<? extends Annotation>[] annoclassNames = (Class<? extends Annotation>[]) classNames; filter = new AnnoClassFilter(annoclassNames, innerClasses); } else { filter = new ExtendsClassFilter(classNames, innerClasses, contains, notContains); } return findClasses(searchPathsOrJars, filter); }
/** * Find classes in the provided path(s)/jar(s) that extend the class(es). * @param searchPathsOrJars - pathnames or jarfiles to search for classes * @param classNames - required parent class(es) or annotations * @param innerClasses - should we include inner classes? * @param contains - classname should contain this string * @param notContains - classname should not contain this string * @param annotations - true if classnames are annotations * * @return List containing discovered classes * @throws IOException when scanning classes fails */
Find classes in the provided path(s)/jar(s) that extend the class(es)
findClassesThatExtend
{ "repo_name": "ubikfsabbe/jmeter", "path": "src/jorphan/org/apache/jorphan/reflect/ClassFinder.java", "license": "apache-2.0", "size": 19725 }
[ "java.io.IOException", "java.lang.annotation.Annotation", "java.util.Arrays", "java.util.List" ]
import java.io.IOException; import java.lang.annotation.Annotation; import java.util.Arrays; import java.util.List;
import java.io.*; import java.lang.annotation.*; import java.util.*;
[ "java.io", "java.lang", "java.util" ]
java.io; java.lang; java.util;
433,140
public RunInner withLastUpdatedTime(OffsetDateTime lastUpdatedTime) { if (this.innerProperties() == null) { this.innerProperties = new RunProperties(); } this.innerProperties().withLastUpdatedTime(lastUpdatedTime); return this; }
RunInner function(OffsetDateTime lastUpdatedTime) { if (this.innerProperties() == null) { this.innerProperties = new RunProperties(); } this.innerProperties().withLastUpdatedTime(lastUpdatedTime); return this; }
/** * Set the lastUpdatedTime property: The last updated time for the run. * * @param lastUpdatedTime the lastUpdatedTime value to set. * @return the RunInner object itself. */
Set the lastUpdatedTime property: The last updated time for the run
withLastUpdatedTime
{ "repo_name": "Azure/azure-sdk-for-java", "path": "sdk/resourcemanager/azure-resourcemanager-containerregistry/src/main/java/com/azure/resourcemanager/containerregistry/fluent/models/RunInner.java", "license": "mit", "size": 18694 }
[ "java.time.OffsetDateTime" ]
import java.time.OffsetDateTime;
import java.time.*;
[ "java.time" ]
java.time;
1,608,522
public static String fillTemplateWithStringsByRegexp( String template, String lineToPlaceInTemplate, Pattern regexp) { final Matcher matcher = regexp.matcher(lineToPlaceInTemplate); String result = template; if (matcher.find()) { for (int i = 0; i <= matcher.groupCount(); i++) { // $n expands comment match like in Pattern.subst(). result = result.replaceAll("\\$" + i, matcher.group(i)); } } return result; }
static String function( String template, String lineToPlaceInTemplate, Pattern regexp) { final Matcher matcher = regexp.matcher(lineToPlaceInTemplate); String result = template; if (matcher.find()) { for (int i = 0; i <= matcher.groupCount(); i++) { result = result.replaceAll("\\$" + i, matcher.group(i)); } } return result; }
/** * Puts part of line, which matches regexp into given template * on positions $n where 'n' is number of matched part in line. * @param template the string to expand. * @param lineToPlaceInTemplate contains expression which should be placed into string. * @param regexp expression to find in comment. * @return the string, based on template filled with given lines */
Puts part of line, which matches regexp into given template on positions $n where 'n' is number of matched part in line
fillTemplateWithStringsByRegexp
{ "repo_name": "liscju/checkstyle", "path": "src/main/java/com/puppycrawl/tools/checkstyle/utils/CommonUtils.java", "license": "lgpl-2.1", "size": 18954 }
[ "java.util.regex.Matcher", "java.util.regex.Pattern" ]
import java.util.regex.Matcher; import java.util.regex.Pattern;
import java.util.regex.*;
[ "java.util" ]
java.util;
545,585
public static void checkGarbageCollectionQueue(ChunkedSegmentStorage chunkedSegmentStorage, Set<String> beforeSet, Set<String> afterSet) { // Get the enqueued tasks. // Need to de-dup val tasks = new HashMap<String, GarbageCollector.TaskInfo>(); val tasksList = ((InMemoryTaskQueueManager) chunkedSegmentStorage.getGarbageCollector().getTaskQueue()) .drain(chunkedSegmentStorage.getGarbageCollector().getTaskQueueName(), Integer.MAX_VALUE).stream() .collect(Collectors.toList()); for (val task : tasksList) { tasks.put(task.getName(), task); } // All chunks not in new set must be enqueued for deletion. for ( val oldChunk: beforeSet) { if (!afterSet.contains(oldChunk)) { val task = tasks.get(oldChunk); Assert.assertNotNull(task); Assert.assertEquals(GarbageCollector.TaskInfo.DELETE_CHUNK, task.getTaskType() ); } } // All chunks not in old set must be enqueued for deletion. for ( val newChunk: afterSet) { if (!beforeSet.contains(newChunk)) { val task = tasks.get(newChunk); Assert.assertNotNull(task); Assert.assertEquals(GarbageCollector.TaskInfo.DELETE_CHUNK, task.getTaskType() ); } } }
static void function(ChunkedSegmentStorage chunkedSegmentStorage, Set<String> beforeSet, Set<String> afterSet) { val tasks = new HashMap<String, GarbageCollector.TaskInfo>(); val tasksList = ((InMemoryTaskQueueManager) chunkedSegmentStorage.getGarbageCollector().getTaskQueue()) .drain(chunkedSegmentStorage.getGarbageCollector().getTaskQueueName(), Integer.MAX_VALUE).stream() .collect(Collectors.toList()); for (val task : tasksList) { tasks.put(task.getName(), task); } for ( val oldChunk: beforeSet) { if (!afterSet.contains(oldChunk)) { val task = tasks.get(oldChunk); Assert.assertNotNull(task); Assert.assertEquals(GarbageCollector.TaskInfo.DELETE_CHUNK, task.getTaskType() ); } } for ( val newChunk: afterSet) { if (!beforeSet.contains(newChunk)) { val task = tasks.get(newChunk); Assert.assertNotNull(task); Assert.assertEquals(GarbageCollector.TaskInfo.DELETE_CHUNK, task.getTaskType() ); } } }
/** * Checks garbage collection queue to ensure new chunks and truncated chunks are added to GC queue. * * @param chunkedSegmentStorage Instance of {@link ChunkedSegmentStorage}. * @param beforeSet set of chunks before. * @param afterSet set of chunks after. */
Checks garbage collection queue to ensure new chunks and truncated chunks are added to GC queue
checkGarbageCollectionQueue
{ "repo_name": "pravega/pravega", "path": "segmentstore/storage/src/test/java/io/pravega/segmentstore/storage/chunklayer/TestUtils.java", "license": "apache-2.0", "size": 24122 }
[ "io.pravega.segmentstore.storage.mocks.InMemoryTaskQueueManager", "java.util.HashMap", "java.util.Set", "java.util.stream.Collectors", "org.junit.Assert" ]
import io.pravega.segmentstore.storage.mocks.InMemoryTaskQueueManager; import java.util.HashMap; import java.util.Set; import java.util.stream.Collectors; import org.junit.Assert;
import io.pravega.segmentstore.storage.mocks.*; import java.util.*; import java.util.stream.*; import org.junit.*;
[ "io.pravega.segmentstore", "java.util", "org.junit" ]
io.pravega.segmentstore; java.util; org.junit;
2,183,169
public Key getKey() { return key; }
Key function() { return key; }
/** * Returns the key associated with the button. * * @return the key associated with the button * * @see #setKey(ch.jeda.event.Key) * @since 1.3 */
Returns the key associated with the button
getKey
{ "repo_name": "r0the/jeda", "path": "core/src/ch/jeda/ui/TextButton.java", "license": "lgpl-3.0", "size": 7378 }
[ "ch.jeda.event.Key" ]
import ch.jeda.event.Key;
import ch.jeda.event.*;
[ "ch.jeda.event" ]
ch.jeda.event;
2,625,072
try { ConfigFactory theConfigFactory = (ConfigFactory)EPackage.Registry.INSTANCE.getEFactory(ConfigPackage.eNS_URI); if (theConfigFactory != null) { return theConfigFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new ConfigFactoryImpl(); } public ConfigFactoryImpl() { super(); }
try { ConfigFactory theConfigFactory = (ConfigFactory)EPackage.Registry.INSTANCE.getEFactory(ConfigPackage.eNS_URI); if (theConfigFactory != null) { return theConfigFactory; } } catch (Exception exception) { EcorePlugin.INSTANCE.log(exception); } return new ConfigFactoryImpl(); } public ConfigFactoryImpl() { super(); }
/** * Creates the default factory implementation. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
Creates the default factory implementation.
init
{ "repo_name": "upohl/eloquent", "path": "plugins/org.muml.psm.allocation.algorithm.ilp.opt4j.config/src/org/muml/psm/allocation/algorithm/ilp/opt4j/config/impl/ConfigFactoryImpl.java", "license": "epl-1.0", "size": 2344 }
[ "org.eclipse.emf.ecore.EPackage", "org.eclipse.emf.ecore.plugin.EcorePlugin", "org.muml.psm.allocation.algorithm.ilp.opt4j.config.ConfigFactory", "org.muml.psm.allocation.algorithm.ilp.opt4j.config.ConfigPackage" ]
import org.eclipse.emf.ecore.EPackage; import org.eclipse.emf.ecore.plugin.EcorePlugin; import org.muml.psm.allocation.algorithm.ilp.opt4j.config.ConfigFactory; import org.muml.psm.allocation.algorithm.ilp.opt4j.config.ConfigPackage;
import org.eclipse.emf.ecore.*; import org.eclipse.emf.ecore.plugin.*; import org.muml.psm.allocation.algorithm.ilp.opt4j.config.*;
[ "org.eclipse.emf", "org.muml.psm" ]
org.eclipse.emf; org.muml.psm;
2,098,787
super.add(new Left<L, R>(lefty)); return true; }
super.add(new Left<L, R>(lefty)); return true; }
/** * Add a left to this list. * @param lefty The left thing. * @return whether a thing was added. */
Add a left to this list
addLeft
{ "repo_name": "julie-sullivan/phytomine", "path": "intermine/web/main/src/org/intermine/webservice/server/core/DisjointList.java", "license": "lgpl-2.1", "size": 1244 }
[ "org.intermine.webservice.server.core.Either" ]
import org.intermine.webservice.server.core.Either;
import org.intermine.webservice.server.core.*;
[ "org.intermine.webservice" ]
org.intermine.webservice;
275,620
private void handleLocalAssociatedParams() { for (ArrayList<RegisterSpec> ssaRegs : localVariables.values()) { int sz = ssaRegs.size(); int paramIndex = -1; int paramCategory = 0; // First, find out if this local variable is a parameter. for (int i = 0; i < sz; i++) { RegisterSpec ssaSpec = ssaRegs.get(i); int ssaReg = ssaSpec.getReg(); paramIndex = getParameterIndexForReg(ssaReg); if (paramIndex >= 0) { paramCategory = ssaSpec.getCategory(); addMapping(ssaSpec, paramIndex); break; } } if (paramIndex < 0) { // This local wasn't a parameter. continue; } // Any remaining local-associated registers will be mapped later. tryMapRegs(ssaRegs, paramIndex, paramCategory, true); } }
void function() { for (ArrayList<RegisterSpec> ssaRegs : localVariables.values()) { int sz = ssaRegs.size(); int paramIndex = -1; int paramCategory = 0; for (int i = 0; i < sz; i++) { RegisterSpec ssaSpec = ssaRegs.get(i); int ssaReg = ssaSpec.getReg(); paramIndex = getParameterIndexForReg(ssaReg); if (paramIndex >= 0) { paramCategory = ssaSpec.getCategory(); addMapping(ssaSpec, paramIndex); break; } } if (paramIndex < 0) { continue; } tryMapRegs(ssaRegs, paramIndex, paramCategory, true); } }
/** * Maps all local-associated parameters to rop registers. */
Maps all local-associated parameters to rop registers
handleLocalAssociatedParams
{ "repo_name": "nikita36078/J2ME-Loader", "path": "dexlib/src/main/java/com/android/dx/ssa/back/FirstFitLocalCombiningAllocator.java", "license": "apache-2.0", "size": 43272 }
[ "com.android.dx.rop.code.RegisterSpec", "java.util.ArrayList" ]
import com.android.dx.rop.code.RegisterSpec; import java.util.ArrayList;
import com.android.dx.rop.code.*; import java.util.*;
[ "com.android.dx", "java.util" ]
com.android.dx; java.util;
1,942,380
@Override public List<T> readList(Collection<String> ids) { // return mapper.select(getDataStore().find(inheritanceRoot).where(Clause.in("_id", ids.toArray()))); // return getDataStore().find(inheritanceRoot).field("_id") // .in(new AdaptedIdIterable(ids)).asList(); return null; }
List<T> function(Collection<String> ids) { return null; }
/** * Read each of the instances corresponding to the given Collection of IDs, * returning the results as a list. If an ID in the provided Collection does * not exist, it is simply not included in the returned results. * * @param ids * a Collection of IDs to read. */
Read each of the instances corresponding to the given Collection of IDs, returning the results as a list. If an ID in the provided Collection does not exist, it is simply not included in the returned results
readList
{ "repo_name": "amilaI/RepoExpress", "path": "src/java/cassandra/com/strategicgains/repoexpress/cassandra/CassandraRepository.java", "license": "apache-2.0", "size": 8467 }
[ "java.util.Collection", "java.util.List" ]
import java.util.Collection; import java.util.List;
import java.util.*;
[ "java.util" ]
java.util;
2,187,847
public static void normalizedListInputFunctions(CallExpr callExpr) { FunctionSignature fs = callExpr.getFunctionSignature(); String internalFuncName = LIST_INPUT_FUNCTION_MAP.get(fs.getName().toLowerCase()); if (internalFuncName == null) { return; } callExpr.setFunctionSignature(new FunctionSignature(FunctionConstants.ASTERIX_DV, internalFuncName, 1)); ListConstructor listConstr = new ListConstructor(ListConstructor.Type.ORDERED_LIST_CONSTRUCTOR, callExpr.getExprList()); listConstr.setSourceLocation(callExpr.getSourceLocation()); callExpr.setExprList(new ArrayList<>(Collections.singletonList(listConstr))); }
static void function(CallExpr callExpr) { FunctionSignature fs = callExpr.getFunctionSignature(); String internalFuncName = LIST_INPUT_FUNCTION_MAP.get(fs.getName().toLowerCase()); if (internalFuncName == null) { return; } callExpr.setFunctionSignature(new FunctionSignature(FunctionConstants.ASTERIX_DV, internalFuncName, 1)); ListConstructor listConstr = new ListConstructor(ListConstructor.Type.ORDERED_LIST_CONSTRUCTOR, callExpr.getExprList()); listConstr.setSourceLocation(callExpr.getSourceLocation()); callExpr.setExprList(new ArrayList<>(Collections.singletonList(listConstr))); }
/** * Rewrites a variable-arg, user-surface function call into an internal, list-arg function. * * @param callExpr * The input call expression. */
Rewrites a variable-arg, user-surface function call into an internal, list-arg function
normalizedListInputFunctions
{ "repo_name": "apache/incubator-asterixdb", "path": "asterixdb/asterix-lang-sqlpp/src/main/java/org/apache/asterix/lang/sqlpp/util/FunctionMapUtil.java", "license": "apache-2.0", "size": 9264 }
[ "java.util.ArrayList", "java.util.Collections", "org.apache.asterix.common.functions.FunctionConstants", "org.apache.asterix.common.functions.FunctionSignature", "org.apache.asterix.lang.common.expression.CallExpr", "org.apache.asterix.lang.common.expression.ListConstructor" ]
import java.util.ArrayList; import java.util.Collections; import org.apache.asterix.common.functions.FunctionConstants; import org.apache.asterix.common.functions.FunctionSignature; import org.apache.asterix.lang.common.expression.CallExpr; import org.apache.asterix.lang.common.expression.ListConstructor;
import java.util.*; import org.apache.asterix.common.functions.*; import org.apache.asterix.lang.common.expression.*;
[ "java.util", "org.apache.asterix" ]
java.util; org.apache.asterix;
1,760,618
@Override public final void invoke(Request request, Response response) throws IOException, ServletException { // Select the Host to be used for this Request Host host = request.getHost(); if (host == null) { response.sendError (HttpServletResponse.SC_BAD_REQUEST, sm.getString("standardEngine.noHost", request.getServerName())); return; } if (request.isAsyncSupported()) { request.setAsyncSupported(host.getPipeline().isAsyncSupported()); } // Ask this Host to process this request host.getPipeline().getFirst().invoke(request, response); }
final void function(Request request, Response response) throws IOException, ServletException { Host host = request.getHost(); if (host == null) { response.sendError (HttpServletResponse.SC_BAD_REQUEST, sm.getString(STR, request.getServerName())); return; } if (request.isAsyncSupported()) { request.setAsyncSupported(host.getPipeline().isAsyncSupported()); } host.getPipeline().getFirst().invoke(request, response); }
/** * Select the appropriate child Host to process this request, * based on the requested server name. If no matching Host can * be found, return an appropriate HTTP error. * * @param request Request to be processed * @param response Response to be produced * * @exception IOException if an input/output error occurred * @exception ServletException if a servlet error occurred */
Select the appropriate child Host to process this request, based on the requested server name. If no matching Host can be found, return an appropriate HTTP error
invoke
{ "repo_name": "Nickname0806/Test_Q4", "path": "java/org/apache/catalina/core/StandardEngineValve.java", "license": "apache-2.0", "size": 3127 }
[ "java.io.IOException", "javax.servlet.ServletException", "javax.servlet.http.HttpServletResponse", "org.apache.catalina.Host", "org.apache.catalina.connector.Request", "org.apache.catalina.connector.Response" ]
import java.io.IOException; import javax.servlet.ServletException; import javax.servlet.http.HttpServletResponse; import org.apache.catalina.Host; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response;
import java.io.*; import javax.servlet.*; import javax.servlet.http.*; import org.apache.catalina.*; import org.apache.catalina.connector.*;
[ "java.io", "javax.servlet", "org.apache.catalina" ]
java.io; javax.servlet; org.apache.catalina;
278,039
public ArrayList<SpellAbility> getOriginalAbilities(Card c) { ArrayList<SpellAbility> returnList = new ArrayList<SpellAbility>(); if (originalAbilities.containsKey(c)) { returnList.addAll(originalAbilities.get(c)); } return returnList; }
ArrayList<SpellAbility> function(Card c) { ArrayList<SpellAbility> returnList = new ArrayList<SpellAbility>(); if (originalAbilities.containsKey(c)) { returnList.addAll(originalAbilities.get(c)); } return returnList; }
/** * <p>Getter for the field <code>originalAbilities</code>.</p> * * @param c a {@link forge.Card} object. * @return a {@link java.util.ArrayList} object. */
Getter for the field <code>originalAbilities</code>
getOriginalAbilities
{ "repo_name": "xitongzou/cardforge", "path": "src/forge/StaticEffect.java", "license": "gpl-3.0", "size": 15888 }
[ "java.util.ArrayList" ]
import java.util.ArrayList;
import java.util.*;
[ "java.util" ]
java.util;
276,464
public void setProperty(Property p, String value) { this.siteConfig.put(p.getKey(), value); }
void function(Property p, String value) { this.siteConfig.put(p.getKey(), value); }
/** * Sets arbitrary configuration properties. * * @since 1.6.0 */
Sets arbitrary configuration properties
setProperty
{ "repo_name": "mikewalch/accumulo", "path": "minicluster/src/main/java/org/apache/accumulo/minicluster/impl/MiniAccumuloConfigImpl.java", "license": "apache-2.0", "size": 22561 }
[ "org.apache.accumulo.core.conf.Property" ]
import org.apache.accumulo.core.conf.Property;
import org.apache.accumulo.core.conf.*;
[ "org.apache.accumulo" ]
org.apache.accumulo;
2,012,832
public void unassign(HRegionInfo region) { unassign(region, false); }
void function(HRegionInfo region) { unassign(region, false); }
/** * Unassigns the specified region. * <p> * Updates the RegionState and sends the CLOSE RPC. * <p> * If a RegionPlan is already set, it will remain. * * @param region server to be unassigned */
Unassigns the specified region. Updates the RegionState and sends the CLOSE RPC. If a RegionPlan is already set, it will remain
unassign
{ "repo_name": "simplegeo/hadoop-hbase", "path": "src/main/java/org/apache/hadoop/hbase/master/AssignmentManager.java", "license": "apache-2.0", "size": 73693 }
[ "org.apache.hadoop.hbase.HRegionInfo" ]
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.*;
[ "org.apache.hadoop" ]
org.apache.hadoop;
2,058,691
@Override public void close() throws IOException { closed = true; super.close(); }
void function() throws IOException { closed = true; super.close(); }
/** * Closes this input stream and releases any system resources * associated with the stream. * This * method simply performs {@code in.close()}. * * @throws IOException if an I/O error occurs. * @see java.io.FilterInputStream#in */
Closes this input stream and releases any system resources associated with the stream. This method simply performs in.close()
close
{ "repo_name": "apache/tomcat", "path": "java/org/apache/tomcat/util/http/fileupload/util/LimitedInputStream.java", "license": "apache-2.0", "size": 5510 }
[ "java.io.IOException" ]
import java.io.IOException;
import java.io.*;
[ "java.io" ]
java.io;
1,158,772
public void addBlockedWorkFragment(WorkFragment.Builder ftask) { if (trace.val) LOG.trace("Adding block FragmentTaskMessage for txn #" + this.txn_id); this.blockedTasks.add(ftask); }
void function(WorkFragment.Builder ftask) { if (trace.val) LOG.trace(STR + this.txn_id); this.blockedTasks.add(ftask); }
/** * Add a FragmentTaskMessage this blocked until all of the partitions return results/responses * for this DependencyInfo * @param ftask */
Add a FragmentTaskMessage this blocked until all of the partitions return results/responses for this DependencyInfo
addBlockedWorkFragment
{ "repo_name": "apavlo/h-store", "path": "src/frontend/edu/brown/hstore/txns/DependencyInfo.java", "license": "gpl-3.0", "size": 14466 }
[ "edu.brown.hstore.Hstoreservice" ]
import edu.brown.hstore.Hstoreservice;
import edu.brown.hstore.*;
[ "edu.brown.hstore" ]
edu.brown.hstore;
2,705,304
public static scale_delta_omegaType fromPerAligned(byte[] encodedBytes) { scale_delta_omegaType result = new scale_delta_omegaType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; }
static scale_delta_omegaType function(byte[] encodedBytes) { scale_delta_omegaType result = new scale_delta_omegaType(); result.decodePerAligned(new BitStreamReader(encodedBytes)); return result; }
/** * Creates a new scale_delta_omegaType from encoded stream. */
Creates a new scale_delta_omegaType from encoded stream
fromPerAligned
{ "repo_name": "google/supl-client", "path": "src/main/java/com/google/location/suplclient/asn1/supl2/rrlp_components/GANSSEphemerisDeltaScales.java", "license": "apache-2.0", "size": 72248 }
[ "com.google.location.suplclient.asn1.base.BitStreamReader" ]
import com.google.location.suplclient.asn1.base.BitStreamReader;
import com.google.location.suplclient.asn1.base.*;
[ "com.google.location" ]
com.google.location;
1,084,906
public User createUser(String username, final String password, final String name, final String email) throws UserAlreadyExistsException { if (provider.isReadOnly()) { throw new UnsupportedOperationException("User provider is read-only."); } if (username == null || username.isEmpty()) { throw new IllegalArgumentException("Null or empty username."); } if (password == null || password.isEmpty()) { throw new IllegalArgumentException("Null or empty password."); } // Make sure that the username is valid. try { username = Stringprep.nodeprep(username); } catch (final StringprepException se) { throw new IllegalArgumentException("Invalid username: " + username, se); } if (provider.isNameRequired() && (name == null || name.matches("\\s*"))) { throw new IllegalArgumentException("Invalid or empty name specified with provider that requires name. User: " + username + " Name: " + name); } if (provider.isEmailRequired() && !StringUtils.isValidEmailAddress(email)) { throw new IllegalArgumentException("Invalid or empty email address specified with provider that requires email address. User: " + username + " Email: " + email); } final User user = provider.createUser(username, password, name, email); userCache.put(username, user); // Fire event. final Map<String,Object> params = Collections.emptyMap(); UserEventDispatcher.dispatchEvent(user, UserEventDispatcher.EventType.user_created, params); return user; }
User function(String username, final String password, final String name, final String email) throws UserAlreadyExistsException { if (provider.isReadOnly()) { throw new UnsupportedOperationException(STR); } if (username == null username.isEmpty()) { throw new IllegalArgumentException(STR); } if (password == null password.isEmpty()) { throw new IllegalArgumentException(STR); } try { username = Stringprep.nodeprep(username); } catch (final StringprepException se) { throw new IllegalArgumentException(STR + username, se); } if (provider.isNameRequired() && (name == null name.matches("\\s*"))) { throw new IllegalArgumentException(STR + username + STR + name); } if (provider.isEmailRequired() && !StringUtils.isValidEmailAddress(email)) { throw new IllegalArgumentException(STR + username + STR + email); } final User user = provider.createUser(username, password, name, email); userCache.put(username, user); final Map<String,Object> params = Collections.emptyMap(); UserEventDispatcher.dispatchEvent(user, UserEventDispatcher.EventType.user_created, params); return user; }
/** * Creates a new User. Required values are username and password. The email address * and name can optionally be {@code null}, unless the UserProvider deems that * either of them are required. * * @param username the new and unique username for the account. * @param password the password for the account (plain text). * @param name the name of the user, which can be {@code null} unless the UserProvider * deems that it's required. * @param email the email address to associate with the new account, which can * be {@code null}, unless the UserProvider deems that it's required. * @return a new User. * @throws UserAlreadyExistsException if the username already exists in the system. * @throws UnsupportedOperationException if the provider does not support the * operation. */
Creates a new User. Required values are username and password. The email address and name can optionally be null, unless the UserProvider deems that either of them are required
createUser
{ "repo_name": "speedy01/Openfire", "path": "xmppserver/src/main/java/org/jivesoftware/openfire/user/UserManager.java", "license": "apache-2.0", "size": 22624 }
[ "gnu.inet.encoding.Stringprep", "gnu.inet.encoding.StringprepException", "java.util.Collections", "java.util.Map", "org.jivesoftware.openfire.event.UserEventDispatcher", "org.jivesoftware.util.StringUtils" ]
import gnu.inet.encoding.Stringprep; import gnu.inet.encoding.StringprepException; import java.util.Collections; import java.util.Map; import org.jivesoftware.openfire.event.UserEventDispatcher; import org.jivesoftware.util.StringUtils;
import gnu.inet.encoding.*; import java.util.*; import org.jivesoftware.openfire.event.*; import org.jivesoftware.util.*;
[ "gnu.inet.encoding", "java.util", "org.jivesoftware.openfire", "org.jivesoftware.util" ]
gnu.inet.encoding; java.util; org.jivesoftware.openfire; org.jivesoftware.util;
239,061
public synchronized DataSet removeValues(double value) { Iterator it = null; Value v = null; DataSet ds = null; log.debug("Removing value: " + value); it = getIterator(); // This will doMaintenance() while (it.hasNext()) { v = (Value) it.next(); if (v == null) continue; if (v.doubleValue() == value) { if (ds == null) ds = new DataSet(); it.remove(); ds.addValue(v); } } modified(ds); log.debug("-> Removed: " + ds); return ds; } // /////////////////////////////////////////////////////////////////////// // Contains Functionality // ///////////////////////////////////////////////////////////////////////
synchronized DataSet function(double value) { Iterator it = null; Value v = null; DataSet ds = null; log.debug(STR + value); it = getIterator(); while (it.hasNext()) { v = (Value) it.next(); if (v == null) continue; if (v.doubleValue() == value) { if (ds == null) ds = new DataSet(); it.remove(); ds.addValue(v); } } modified(ds); log.debug(STR + ds); return ds; }
/** * Removes all instances of <tt>Value</tt> having the the specified * numerical value. * * @param value * to be searched * @return a <tt>DataSet</tt> instance containing all values which were * removed, <tt>null</tt> if none were removed */
Removes all instances of Value having the the specified numerical value
removeValues
{ "repo_name": "Jeach/Java-Stats", "path": "src/com/jeach/stats/DataSet.java", "license": "gpl-3.0", "size": 82805 }
[ "java.util.Iterator" ]
import java.util.Iterator;
import java.util.*;
[ "java.util" ]
java.util;
847,580
public void writeTo(StreamOutput out) throws IOException { out.writeByte(id); }
void function(StreamOutput out) throws IOException { out.writeByte(id); }
/** * Serialize the GapPolicy to the output stream */
Serialize the GapPolicy to the output stream
writeTo
{ "repo_name": "danielmitterdorfer/elasticsearch", "path": "core/src/main/java/org/elasticsearch/search/aggregations/pipeline/BucketHelpers.java", "license": "apache-2.0", "size": 8490 }
[ "java.io.IOException", "org.elasticsearch.common.io.stream.StreamOutput" ]
import java.io.IOException; import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.*; import org.elasticsearch.common.io.stream.*;
[ "java.io", "org.elasticsearch.common" ]
java.io; org.elasticsearch.common;
2,452,265
public static void assertIs(final SQLCaseAssertContext assertContext, final ShadowRuleSegment actual, final ExpectedShadowRule expected) { if (null == expected) { assertNull(assertContext.getText("Actual shadow rule should not exist."), actual); } else { assertNotNull(assertContext.getText("Actual shadow rule should exist."), actual); assertThat(assertContext.getText(String.format("`%s`'s shadow rule segment assertion error: ", actual.getClass().getSimpleName())), actual.getShadow(), is(expected.getShadow())); assertThat(assertContext.getText(String.format("`%s`'s shadow rule segment assertion error: ", actual.getClass().getSimpleName())), actual.getSource(), is(expected.getSource())); assertThat(assertContext.getText(String.format("`%s`'s shadow rule segment assertion error: ", actual.getClass().getSimpleName())), actual.getRuleName(), is(expected.getRuleName())); for (ExpectedShadowTableRule each : expected.getShadowTableRules()) { assertIsTableRules(assertContext, actual.getShadowTableRules().get(each.getTableName()), each.getAlgorithms()); } } }
static void function(final SQLCaseAssertContext assertContext, final ShadowRuleSegment actual, final ExpectedShadowRule expected) { if (null == expected) { assertNull(assertContext.getText(STR), actual); } else { assertNotNull(assertContext.getText(STR), actual); assertThat(assertContext.getText(String.format(STR, actual.getClass().getSimpleName())), actual.getShadow(), is(expected.getShadow())); assertThat(assertContext.getText(String.format(STR, actual.getClass().getSimpleName())), actual.getSource(), is(expected.getSource())); assertThat(assertContext.getText(String.format(STR, actual.getClass().getSimpleName())), actual.getRuleName(), is(expected.getRuleName())); for (ExpectedShadowTableRule each : expected.getShadowTableRules()) { assertIsTableRules(assertContext, actual.getShadowTableRules().get(each.getTableName()), each.getAlgorithms()); } } }
/** * Assert shadow rule is correct with expected parser result. * * @param assertContext assert context * @param actual actual shadow rule * @param expected expected shadow rule test case */
Assert shadow rule is correct with expected parser result
assertIs
{ "repo_name": "apache/incubator-shardingsphere", "path": "shardingsphere-test/shardingsphere-parser-test/src/main/java/org/apache/shardingsphere/test/sql/parser/parameterized/asserts/segment/distsql/rdl/ShadowRuleAssert.java", "license": "apache-2.0", "size": 4535 }
[ "org.apache.shardingsphere.shadow.distsql.parser.segment.ShadowRuleSegment", "org.apache.shardingsphere.test.sql.parser.parameterized.asserts.SQLCaseAssertContext", "org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.segment.impl.distsql.rdl.ExpectedShadowRule", "org.hamcrest.CoreMatchers", "org.junit.Assert" ]
import org.apache.shardingsphere.shadow.distsql.parser.segment.ShadowRuleSegment; import org.apache.shardingsphere.test.sql.parser.parameterized.asserts.SQLCaseAssertContext; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.segment.impl.distsql.rdl.ExpectedShadowRule; import org.hamcrest.CoreMatchers; import org.junit.Assert;
import org.apache.shardingsphere.shadow.distsql.parser.segment.*; import org.apache.shardingsphere.test.sql.parser.parameterized.asserts.*; import org.apache.shardingsphere.test.sql.parser.parameterized.jaxb.cases.domain.segment.impl.distsql.rdl.*; import org.hamcrest.*; import org.junit.*;
[ "org.apache.shardingsphere", "org.hamcrest", "org.junit" ]
org.apache.shardingsphere; org.hamcrest; org.junit;
2,479,215
@Override public Adapter createSwitchDefaultParentContainerAdapter() { if (switchDefaultParentContainerItemProvider == null) { switchDefaultParentContainerItemProvider = new SwitchDefaultParentContainerItemProvider(this); } return switchDefaultParentContainerItemProvider; } protected SwitchCaseContainerItemProvider switchCaseContainerItemProvider;
Adapter function() { if (switchDefaultParentContainerItemProvider == null) { switchDefaultParentContainerItemProvider = new SwitchDefaultParentContainerItemProvider(this); } return switchDefaultParentContainerItemProvider; } protected SwitchCaseContainerItemProvider switchCaseContainerItemProvider;
/** * This creates an adapter for a {@link org.wso2.developerstudio.eclipse.gmf.esb.SwitchDefaultParentContainer}. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */
This creates an adapter for a <code>org.wso2.developerstudio.eclipse.gmf.esb.SwitchDefaultParentContainer</code>.
createSwitchDefaultParentContainerAdapter
{ "repo_name": "prabushi/devstudio-tooling-esb", "path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src/org/wso2/developerstudio/eclipse/gmf/esb/provider/EsbItemProviderAdapterFactory.java", "license": "apache-2.0", "size": 339597 }
[ "org.eclipse.emf.common.notify.Adapter" ]
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.*;
[ "org.eclipse.emf" ]
org.eclipse.emf;
2,300,330
public void sessionWillPassivate(ISession session) { if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_CORE.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_CORE.logp(Level.FINE, methodClassName, "sessionWillPassivate", "sessionObservers.size()=" + _sessionObservers.size()); } if (_sessionObservers == null || _sessionObservers.size() < 1) { return; } ISessionObserver sessionObserver = null; for (int i = 0; i < _sessionObservers.size(); i++) { sessionObserver = (ISessionObserver) _sessionObservers.get(i); sessionObserver.sessionWillPassivate(session); } }
void function(ISession session) { if (com.ibm.ejs.ras.TraceComponent.isAnyTracingEnabled() && LoggingUtil.SESSION_LOGGER_CORE.isLoggable(Level.FINE)) { LoggingUtil.SESSION_LOGGER_CORE.logp(Level.FINE, methodClassName, STR, STR + _sessionObservers.size()); } if (_sessionObservers == null _sessionObservers.size() < 1) { return; } ISessionObserver sessionObserver = null; for (int i = 0; i < _sessionObservers.size(); i++) { sessionObserver = (ISessionObserver) _sessionObservers.get(i); sessionObserver.sessionWillPassivate(session); } }
/** * Method sessionWillPassivate * <p> * * @see com.ibm.wsspi.session.ISessionObserver#sessionWillPassivate(com.ibm.wsspi.session.ISession) */
Method sessionWillPassivate
sessionWillPassivate
{ "repo_name": "OpenLiberty/open-liberty", "path": "dev/com.ibm.ws.session/src/com/ibm/ws/session/SessionEventDispatcher.java", "license": "epl-1.0", "size": 14815 }
[ "com.ibm.ws.session.utils.LoggingUtil", "com.ibm.wsspi.session.ISession", "com.ibm.wsspi.session.ISessionObserver", "java.util.logging.Level" ]
import com.ibm.ws.session.utils.LoggingUtil; import com.ibm.wsspi.session.ISession; import com.ibm.wsspi.session.ISessionObserver; import java.util.logging.Level;
import com.ibm.ws.session.utils.*; import com.ibm.wsspi.session.*; import java.util.logging.*;
[ "com.ibm.ws", "com.ibm.wsspi", "java.util" ]
com.ibm.ws; com.ibm.wsspi; java.util;
360,743