method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
private static void createOrUpdateProject(Client client, String projectName) {
Response response;
// Delete the project if it already exists
response = RESTHelpers.makeRestRequest("GET", client, "/project/" + projectName);
if (response.getStatus() == 200) {
// Project already exists, so delete it
// Note that when deleting a project, its artefacts are also deleted
response = RESTHelpers.makeRestRequest("DELETE", client, "/project/" + projectName);
}
// Create project
// @formatter:off
String json = "{" + " \"name\": \"" + projectName + "\"" + "}";
// @formatter:on
response = RESTHelpers.makeRestRequest("POST", client, "/project", json);
}
| static void function(Client client, String projectName) { Response response; response = RESTHelpers.makeRestRequest("GET", client, STR + projectName); if (response.getStatus() == 200) { response = RESTHelpers.makeRestRequest(STR, client, STR + projectName); } String json = "{" + STRname\STRSTR\STR}STRPOSTSTR/project", json); } | /**
* Creates or updates a project in the assets registry. Update is implemented by first deleting the project and then
* re-adding it. Note that when deleting a project, its artefacts are also deleted.
*
* @param client the client used to connect to the assets registry.
* @param projectName the name of the project to be created or updated.
*/ | Creates or updates a project in the assets registry. Update is implemented by first deleting the project and then re-adding it. Note that when deleting a project, its artefacts are also deleted | createOrUpdateProject | {
"repo_name": "s-case/s-case-core",
"path": "eu.scasefp7.eclipse.core.connect/src/eu/scasefp7/eclipse/core/connect/uploader/ProjectUploader.java",
"license": "apache-2.0",
"size": 12048
} | [
"javax.ws.rs.client.Client",
"javax.ws.rs.core.Response"
] | import javax.ws.rs.client.Client; import javax.ws.rs.core.Response; | import javax.ws.rs.client.*; import javax.ws.rs.core.*; | [
"javax.ws"
] | javax.ws; | 291,108 |
@Override
public void start() throws CantStartPluginException {
logManager.log(CryptoAddressBookCryptoModulePluginRoot.getLogLevelByClass(this.getClass().getName()), "Crypto Address Book Plugin Initializing...", null, null);
try {
cryptoAddressBookCryptoModuleDao = new CryptoAddressBookCryptoModuleDao(pluginDatabaseSystem, pluginId);
cryptoAddressBookCryptoModuleDao.initialize();
} catch (CantInitializeCryptoAddressBookCryptoModuleDatabaseException e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_ADDRESS_BOOK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e);
throw new CantStartPluginException(CantStartPluginException.DEFAULT_MESSAGE, e, "There is a problem when trying to initialize CryptoAddressBook DAO", null);
} catch (Exception e) {
errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_ADDRESS_BOOK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e);
throw new CantStartPluginException(CantStartPluginException.DEFAULT_MESSAGE, e, "There is a problem I can't identify.", null);
}
logManager.log(CryptoAddressBookCryptoModulePluginRoot.getLogLevelByClass(this.getClass().getName()), "Crypto Address Book Plugin Successfully initialized...", null, null);
this.serviceStatus = ServiceStatus.STARTED;
} | void function() throws CantStartPluginException { logManager.log(CryptoAddressBookCryptoModulePluginRoot.getLogLevelByClass(this.getClass().getName()), STR, null, null); try { cryptoAddressBookCryptoModuleDao = new CryptoAddressBookCryptoModuleDao(pluginDatabaseSystem, pluginId); cryptoAddressBookCryptoModuleDao.initialize(); } catch (CantInitializeCryptoAddressBookCryptoModuleDatabaseException e) { errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_ADDRESS_BOOK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e); throw new CantStartPluginException(CantStartPluginException.DEFAULT_MESSAGE, e, STR, null); } catch (Exception e) { errorManager.reportUnexpectedPluginException(Plugins.BITDUBAI_CRYPTO_ADDRESS_BOOK, UnexpectedPluginExceptionSeverity.DISABLES_THIS_PLUGIN, e); throw new CantStartPluginException(CantStartPluginException.DEFAULT_MESSAGE, e, STR, null); } logManager.log(CryptoAddressBookCryptoModulePluginRoot.getLogLevelByClass(this.getClass().getName()), STR, null, null); this.serviceStatus = ServiceStatus.STARTED; } | /**
* Service Interface implementation.
*/ | Service Interface implementation | start | {
"repo_name": "fvasquezjatar/fermat-unused",
"path": "CRY/plugin/crypto_module/fermat-cry-plugin-crypto-module-crypto-address-book-bitdubai/src/main/java/com/bitdubai/fermat_cry_plugin/layer/crypto_module/crypto_address_book/developer/bitdubai/version_1/CryptoAddressBookCryptoModulePluginRoot.java",
"license": "mit",
"size": 20663
} | [
"com.bitdubai.fermat_api.CantStartPluginException",
"com.bitdubai.fermat_api.layer.all_definition.enums.Plugins",
"com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus",
"com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.database.CryptoAddressBookCryptoModuleDao",
"com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.exceptions.CantInitializeCryptoAddressBookCryptoModuleDatabaseException",
"com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity"
] | import com.bitdubai.fermat_api.CantStartPluginException; import com.bitdubai.fermat_api.layer.all_definition.enums.Plugins; import com.bitdubai.fermat_api.layer.all_definition.enums.ServiceStatus; import com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.database.CryptoAddressBookCryptoModuleDao; import com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.exceptions.CantInitializeCryptoAddressBookCryptoModuleDatabaseException; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.UnexpectedPluginExceptionSeverity; | import com.bitdubai.fermat_api.*; import com.bitdubai.fermat_api.layer.all_definition.enums.*; import com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.database.*; import com.bitdubai.fermat_cry_plugin.layer.crypto_module.crypto_address_book.developer.bitdubai.version_1.exceptions.*; import com.bitdubai.fermat_pip_api.layer.pip_platform_service.error_manager.*; | [
"com.bitdubai.fermat_api",
"com.bitdubai.fermat_cry_plugin",
"com.bitdubai.fermat_pip_api"
] | com.bitdubai.fermat_api; com.bitdubai.fermat_cry_plugin; com.bitdubai.fermat_pip_api; | 1,773,689 |
@Test
public void testGetIt() {
final String responseMsg = target().path("myresource").request().get(String.class);
assertEquals("Hello, Heroku!", responseMsg);
} | void function() { final String responseMsg = target().path(STR).request().get(String.class); assertEquals(STR, responseMsg); } | /**
* Test to see that the message "Got it!" is sent in the response.
*/ | Test to see that the message "Got it!" is sent in the response | testGetIt | {
"repo_name": "ricarora/heroku-anagram",
"path": "src/test/java/com/example/MyResourceTest.java",
"license": "gpl-3.0",
"size": 703
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 2,578,944 |
@Override
public Object convertDataUsingConversionMetaData( Object data ) throws KettleValueException {
if ( conversionMetadata == null ) {
throw new KettleValueException(
"API coding error: please specify the conversion metadata before attempting to convert value " + name );
}
// Suppose we have an Integer 123, length 5
// The string variation of this is " 00123"
// To convert this back to an Integer we use the storage metadata
// Specifically, in method convertStringToInteger() we consult the
// storageMetaData to get the correct conversion mask
// That way we're always sure that a conversion works both ways.
//
switch ( conversionMetadata.getType() ) {
case TYPE_STRING:
return getString( data );
case TYPE_INTEGER:
return getInteger( data );
case TYPE_NUMBER:
return getNumber( data );
case TYPE_DATE:
return getDate( data );
case TYPE_BIGNUMBER:
return getBigNumber( data );
case TYPE_BOOLEAN:
return getBoolean( data );
case TYPE_BINARY:
return getBinary( data );
default:
throw new KettleValueException( toString() + " : I can't convert the specified value to data type : "
+ storageMetadata.getType() );
}
} | Object function( Object data ) throws KettleValueException { if ( conversionMetadata == null ) { throw new KettleValueException( STR + name ); } switch ( conversionMetadata.getType() ) { case TYPE_STRING: return getString( data ); case TYPE_INTEGER: return getInteger( data ); case TYPE_NUMBER: return getNumber( data ); case TYPE_DATE: return getDate( data ); case TYPE_BIGNUMBER: return getBigNumber( data ); case TYPE_BOOLEAN: return getBoolean( data ); case TYPE_BINARY: return getBinary( data ); default: throw new KettleValueException( toString() + STR + storageMetadata.getType() ); } } | /**
* Convert an object to the data type specified in the conversion metadata
*
* @param data
* The data
* @return The data converted to the storage data type
* @throws KettleValueException
* in case there is a conversion error.
*/ | Convert an object to the data type specified in the conversion metadata | convertDataUsingConversionMetaData | {
"repo_name": "mattyb149/pentaho-kettle",
"path": "core/src/org/pentaho/di/core/row/value/ValueMetaBase.java",
"license": "apache-2.0",
"size": 170579
} | [
"org.pentaho.di.core.exception.KettleValueException"
] | import org.pentaho.di.core.exception.KettleValueException; | import org.pentaho.di.core.exception.*; | [
"org.pentaho.di"
] | org.pentaho.di; | 1,385,269 |
public long getMessageId();
public Date getTimestamp();
public Object getSource();
public void setSource(Object inSource);
public String getProvider();
public void setProvider(String inProvider); | long getMessageId(); public Date getTimestamp(); public Object getSource(); public void setSource(Object inSource); public String getProvider(); public void function(String inProvider); | /**
* Sets the provider value.
*
* @param inProvider a <code>String</code> value
*/ | Sets the provider value | setProvider | {
"repo_name": "nagyist/marketcetera",
"path": "trunk/core/src/main/java/org/marketcetera/event/Event.java",
"license": "apache-2.0",
"size": 2539
} | [
"java.util.Date"
] | import java.util.Date; | import java.util.*; | [
"java.util"
] | java.util; | 382,367 |
@Override
public Result decode(BinaryBitmap image) throws NotFoundException, ChecksumException, FormatException {
return decode(image, null);
} | Result function(BinaryBitmap image) throws NotFoundException, ChecksumException, FormatException { return decode(image, null); } | /**
* Locates and decodes a MaxiCode in an image.
*
* @return a String representing the content encoded by the MaxiCode
* @throws NotFoundException if a MaxiCode cannot be found
* @throws FormatException if a MaxiCode cannot be decoded
* @throws ChecksumException if error correction fails
*/ | Locates and decodes a MaxiCode in an image | decode | {
"repo_name": "ostap0207/remotify.me",
"path": "remotify.android/Remotify/src/main/java/com/google/zxing/maxicode/MaxiCodeReader.java",
"license": "apache-2.0",
"size": 4261
} | [
"com.google.zxing.BinaryBitmap",
"com.google.zxing.ChecksumException",
"com.google.zxing.FormatException",
"com.google.zxing.NotFoundException",
"com.google.zxing.Result"
] | import com.google.zxing.BinaryBitmap; import com.google.zxing.ChecksumException; import com.google.zxing.FormatException; import com.google.zxing.NotFoundException; import com.google.zxing.Result; | import com.google.zxing.*; | [
"com.google.zxing"
] | com.google.zxing; | 1,365,824 |
@Test
public void testCommandOptions() throws Exception {
final String className = this.getClass().getName();
assertFalse(validateCommand(new String[] {"-foo" }));
// fail due to insufficient number of arguments
assertFalse(validateCommand(new String[] {}));
assertFalse(validateCommand(new String[] {"-getlevel" }));
assertFalse(validateCommand(new String[] {"-setlevel" }));
assertFalse(validateCommand(new String[] {"-getlevel", "foo.bar:8080" }));
// valid command arguments
assertTrue(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
assertTrue(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className }));
assertTrue(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG" }));
// fail due to the extra argument
assertFalse(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className, "blah" }));
assertFalse(validateCommand(
new String[] {"-setlevel", "foo.bar:8080", className, "DEBUG", "blah" }));
assertFalse(validateCommand(
new String[] {"-getlevel", "foo.bar:8080", className, "-setlevel", "foo.bar:8080",
className }));
} | void function() throws Exception { final String className = this.getClass().getName(); assertFalse(validateCommand(new String[] {"-foo" })); assertFalse(validateCommand(new String[] {})); assertFalse(validateCommand(new String[] {STR })); assertFalse(validateCommand(new String[] {STR })); assertFalse(validateCommand(new String[] {STR, STR })); assertTrue(validateCommand( new String[] {STR, STR, className })); assertTrue(validateCommand( new String[] {STR, STR, className, "DEBUG" })); assertTrue(validateCommand( new String[] {STR, STR, className })); assertTrue(validateCommand( new String[] {STR, STR, className, "DEBUG" })); assertFalse(validateCommand( new String[] {STR, STR, className, "blah" })); assertFalse(validateCommand( new String[] {STR, STR, className, "DEBUG", "blah" })); assertFalse(validateCommand( new String[] {STR, STR, className, STR, STR, className })); } | /**
* Test client command line options. Does not validate server behavior.
* @throws Exception if commands return unexpected results.
*/ | Test client command line options. Does not validate server behavior | testCommandOptions | {
"repo_name": "francisliu/hbase",
"path": "hbase-http/src/test/java/org/apache/hadoop/hbase/http/log/TestLogLevel.java",
"license": "apache-2.0",
"size": 18554
} | [
"org.junit.Assert"
] | import org.junit.Assert; | import org.junit.*; | [
"org.junit"
] | org.junit; | 940,840 |
Observable<Flashcard> addFlashcard(Flashcard flashcard); | Observable<Flashcard> addFlashcard(Flashcard flashcard); | /**
* Get an {@link Observable} which will notify the addition of a {@link Flashcard} object
*/ | Get an <code>Observable</code> which will notify the addition of a <code>Flashcard</code> object | addFlashcard | {
"repo_name": "zwang1/words",
"path": "domain/src/main/java/com/dbychkov/domain/repository/FlashcardRepository.java",
"license": "apache-2.0",
"size": 2492
} | [
"com.dbychkov.domain.Flashcard"
] | import com.dbychkov.domain.Flashcard; | import com.dbychkov.domain.*; | [
"com.dbychkov.domain"
] | com.dbychkov.domain; | 1,054,585 |
public List<Contentlet> findContentletsByFolder(Folder parentFolder, User user, boolean respectFrontendRoles) throws DotDataException, DotSecurityException; | List<Contentlet> function(Folder parentFolder, User user, boolean respectFrontendRoles) throws DotDataException, DotSecurityException; | /**
* Gets a list of Contentlets from a given parent folder
* @param parentFolder
* @return
* @throws DotSecurityException
*/ | Gets a list of Contentlets from a given parent folder | findContentletsByFolder | {
"repo_name": "zhiqinghuang/core",
"path": "src/com/dotmarketing/portlets/contentlet/business/ContentletAPI.java",
"license": "gpl-3.0",
"size": 64036
} | [
"com.dotmarketing.exception.DotDataException",
"com.dotmarketing.exception.DotSecurityException",
"com.dotmarketing.portlets.contentlet.model.Contentlet",
"com.dotmarketing.portlets.folders.model.Folder",
"com.liferay.portal.model.User",
"java.util.List"
] | import com.dotmarketing.exception.DotDataException; import com.dotmarketing.exception.DotSecurityException; import com.dotmarketing.portlets.contentlet.model.Contentlet; import com.dotmarketing.portlets.folders.model.Folder; import com.liferay.portal.model.User; import java.util.List; | import com.dotmarketing.exception.*; import com.dotmarketing.portlets.contentlet.model.*; import com.dotmarketing.portlets.folders.model.*; import com.liferay.portal.model.*; import java.util.*; | [
"com.dotmarketing.exception",
"com.dotmarketing.portlets",
"com.liferay.portal",
"java.util"
] | com.dotmarketing.exception; com.dotmarketing.portlets; com.liferay.portal; java.util; | 1,733,246 |
public String getWebdavUrl(NodeRef nodeRef)
{
String url = "";
if (!enabled)
{
return url;
}
try
{
QName typeName = nodeService.getType(nodeRef);
if (getIsContainer(typeName) || getIsDocument(typeName))
{
List<String> paths = fileFolderService.getNameOnlyPath(getRootNode().getNodeForCurrentTenant(), nodeRef);
// build up the webdav url
StringBuilder path = new StringBuilder(128);
path.append("/" + WEBDAV_PREFIX);
for (int i=0; i<paths.size(); i++)
{
path.append("/")
.append(URLEncoder.encode(paths.get(i)));
}
url = path.toString();
}
}
catch (InvalidTypeException typeErr)
{
// cannot build path if file is a type such as a rendition
}
catch (FileNotFoundException nodeErr)
{
// cannot build path if file no longer exists, return default
}
return url;
}
| String function(NodeRef nodeRef) { String url = STR/STR/") .append(URLEncoder.encode(paths.get(i))); } url = path.toString(); } } catch (InvalidTypeException typeErr) { } catch (FileNotFoundException nodeErr) { } return url; } | /**
* Get the WebDavUrl for the specified nodeRef
*
* @param nodeRef the node that the webdav URL (or null)
* @return the URL of the node in webdav or "" if a URL cannot be built.
*/ | Get the WebDavUrl for the specified nodeRef | getWebdavUrl | {
"repo_name": "Alfresco/alfresco-repository",
"path": "src/main/java/org/alfresco/repo/webdav/WebDavServiceImpl.java",
"license": "lgpl-3.0",
"size": 6280
} | [
"org.alfresco.repo.model.filefolder.FileFolderServiceImpl",
"org.alfresco.service.cmr.model.FileNotFoundException",
"org.alfresco.service.cmr.repository.NodeRef",
"org.springframework.extensions.surf.util.URLEncoder"
] | import org.alfresco.repo.model.filefolder.FileFolderServiceImpl; import org.alfresco.service.cmr.model.FileNotFoundException; import org.alfresco.service.cmr.repository.NodeRef; import org.springframework.extensions.surf.util.URLEncoder; | import org.alfresco.repo.model.filefolder.*; import org.alfresco.service.cmr.model.*; import org.alfresco.service.cmr.repository.*; import org.springframework.extensions.surf.util.*; | [
"org.alfresco.repo",
"org.alfresco.service",
"org.springframework.extensions"
] | org.alfresco.repo; org.alfresco.service; org.springframework.extensions; | 519,828 |
@Classpath
public FileCollection getClasspath() {
return classpath;
} | FileCollection function() { return classpath; } | /**
* Returns the classpath to use to resolve type references in the source code.
*
* @return The classpath.
*/ | Returns the classpath to use to resolve type references in the source code | getClasspath | {
"repo_name": "lsmaira/gradle",
"path": "subprojects/language-java/src/main/java/org/gradle/api/tasks/javadoc/Javadoc.java",
"license": "apache-2.0",
"size": 10566
} | [
"org.gradle.api.file.FileCollection"
] | import org.gradle.api.file.FileCollection; | import org.gradle.api.file.*; | [
"org.gradle.api"
] | org.gradle.api; | 157,608 |
@ServiceMethod(returns = ReturnType.SINGLE)
public DataMaskingPolicyInner get(String resourceGroupName, String serverName, String databaseName) {
return getAsync(resourceGroupName, serverName, databaseName).block();
} | @ServiceMethod(returns = ReturnType.SINGLE) DataMaskingPolicyInner function(String resourceGroupName, String serverName, String databaseName) { return getAsync(resourceGroupName, serverName, databaseName).block(); } | /**
* Gets a database data masking policy.
*
* @param resourceGroupName The name of the resource group that contains the resource. You can obtain this value
* from the Azure Resource Manager API or the portal.
* @param serverName The name of the server.
* @param databaseName The name of the database.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return a database data masking policy.
*/ | Gets a database data masking policy | get | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-sql/src/main/java/com/azure/resourcemanager/sql/implementation/DataMaskingPoliciesClientImpl.java",
"license": "mit",
"size": 22311
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.sql.fluent.models.DataMaskingPolicyInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.sql.fluent.models.DataMaskingPolicyInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.sql.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 2,734,747 |
private void renderAnimations() {
Keyframe kf0, kf1, kf2, kf3;
float midwayPoint = 0.2f;
int duration = 500;
// Set up animator for disappearing.
kf0 = Keyframe.ofFloat(0f, 1);
kf1 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier);
kf2 = Keyframe.ofFloat(1f, mTransitionEndRadiusMultiplier);
PropertyValuesHolder radiusDisappear = PropertyValuesHolder.ofKeyframe(
"animationRadiusMultiplier", kf0, kf1, kf2);
kf0 = Keyframe.ofFloat(0f, 1f);
kf1 = Keyframe.ofFloat(1f, 0f);
PropertyValuesHolder fadeOut = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1);
mDisappearAnimator = ObjectAnimator.ofPropertyValuesHolder(
this, radiusDisappear, fadeOut).setDuration(duration);
mDisappearAnimator.addUpdateListener(mInvalidateUpdateListener);
// Set up animator for reappearing.
float delayMultiplier = 0.25f;
float transitionDurationMultiplier = 1f;
float totalDurationMultiplier = transitionDurationMultiplier + delayMultiplier;
int totalDuration = (int) (duration * totalDurationMultiplier);
float delayPoint = (delayMultiplier * duration) / totalDuration;
midwayPoint = 1 - (midwayPoint * (1 - delayPoint));
kf0 = Keyframe.ofFloat(0f, mTransitionEndRadiusMultiplier);
kf1 = Keyframe.ofFloat(delayPoint, mTransitionEndRadiusMultiplier);
kf2 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier);
kf3 = Keyframe.ofFloat(1f, 1);
PropertyValuesHolder radiusReappear = PropertyValuesHolder.ofKeyframe(
"animationRadiusMultiplier", kf0, kf1, kf2, kf3);
kf0 = Keyframe.ofFloat(0f, 0f);
kf1 = Keyframe.ofFloat(delayPoint, 0f);
kf2 = Keyframe.ofFloat(1f, 1f);
PropertyValuesHolder fadeIn = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1, kf2);
mReappearAnimator = ObjectAnimator.ofPropertyValuesHolder(
this, radiusReappear, fadeIn).setDuration(totalDuration);
mReappearAnimator.addUpdateListener(mInvalidateUpdateListener);
} | void function() { Keyframe kf0, kf1, kf2, kf3; float midwayPoint = 0.2f; int duration = 500; kf0 = Keyframe.ofFloat(0f, 1); kf1 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier); kf2 = Keyframe.ofFloat(1f, mTransitionEndRadiusMultiplier); PropertyValuesHolder radiusDisappear = PropertyValuesHolder.ofKeyframe( STR, kf0, kf1, kf2); kf0 = Keyframe.ofFloat(0f, 1f); kf1 = Keyframe.ofFloat(1f, 0f); PropertyValuesHolder fadeOut = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1); mDisappearAnimator = ObjectAnimator.ofPropertyValuesHolder( this, radiusDisappear, fadeOut).setDuration(duration); mDisappearAnimator.addUpdateListener(mInvalidateUpdateListener); float delayMultiplier = 0.25f; float transitionDurationMultiplier = 1f; float totalDurationMultiplier = transitionDurationMultiplier + delayMultiplier; int totalDuration = (int) (duration * totalDurationMultiplier); float delayPoint = (delayMultiplier * duration) / totalDuration; midwayPoint = 1 - (midwayPoint * (1 - delayPoint)); kf0 = Keyframe.ofFloat(0f, mTransitionEndRadiusMultiplier); kf1 = Keyframe.ofFloat(delayPoint, mTransitionEndRadiusMultiplier); kf2 = Keyframe.ofFloat(midwayPoint, mTransitionMidRadiusMultiplier); kf3 = Keyframe.ofFloat(1f, 1); PropertyValuesHolder radiusReappear = PropertyValuesHolder.ofKeyframe( STR, kf0, kf1, kf2, kf3); kf0 = Keyframe.ofFloat(0f, 0f); kf1 = Keyframe.ofFloat(delayPoint, 0f); kf2 = Keyframe.ofFloat(1f, 1f); PropertyValuesHolder fadeIn = PropertyValuesHolder.ofKeyframe("alpha", kf0, kf1, kf2); mReappearAnimator = ObjectAnimator.ofPropertyValuesHolder( this, radiusReappear, fadeIn).setDuration(totalDuration); mReappearAnimator.addUpdateListener(mInvalidateUpdateListener); } | /**
* Render the animations for appearing and disappearing.
*/ | Render the animations for appearing and disappearing | renderAnimations | {
"repo_name": "freezy/MaterialDateTimePicker",
"path": "library/src/main/java/com/wdullaer/materialdatetimepicker/time/RadialTextsView.java",
"license": "apache-2.0",
"size": 16121
} | [
"android.animation.Keyframe",
"android.animation.ObjectAnimator",
"android.animation.PropertyValuesHolder"
] | import android.animation.Keyframe; import android.animation.ObjectAnimator; import android.animation.PropertyValuesHolder; | import android.animation.*; | [
"android.animation"
] | android.animation; | 2,530,256 |
@Override
public String toSource(Node n) {
initCompilerOptionsIfTesting();
return toSource(n, null, true);
} | String function(Node n) { initCompilerOptionsIfTesting(); return toSource(n, null, true); } | /**
* Generates JavaScript source code for an AST, doesn't generate source
* map info.
*/ | Generates JavaScript source code for an AST, doesn't generate source map info | toSource | {
"repo_name": "pauldraper/closure-compiler",
"path": "src/com/google/javascript/jscomp/Compiler.java",
"license": "apache-2.0",
"size": 80912
} | [
"com.google.javascript.rhino.Node"
] | import com.google.javascript.rhino.Node; | import com.google.javascript.rhino.*; | [
"com.google.javascript"
] | com.google.javascript; | 2,471,359 |
public List<String> getEventSourceURIs() {
return Collections.emptyList();
} | List<String> function() { return Collections.emptyList(); } | /**
* Gets the Event Source URI on which the event is handled.<br/>
* If empty, then the event must be sent on all Event Sources.
* @return an URI as string.
*/ | Gets the Event Source URI on which the event is handled. If empty, then the event must be sent on all Event Sources | getEventSourceURIs | {
"repo_name": "auroreallibe/Silverpeas-Core",
"path": "core-api/src/main/java/org/silverpeas/core/notification/sse/AbstractServerEvent.java",
"license": "agpl-3.0",
"size": 5071
} | [
"java.util.Collections",
"java.util.List"
] | import java.util.Collections; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 852,249 |
@Override
public void buildEnvironment(Run<?,?> build, EnvVars env) {
env.put(name,Boolean.toString(value));
env.put(name.toUpperCase(Locale.ENGLISH),Boolean.toString(value)); // backward compatibility pre 1.345
} | void function(Run<?,?> build, EnvVars env) { env.put(name,Boolean.toString(value)); env.put(name.toUpperCase(Locale.ENGLISH),Boolean.toString(value)); } | /**
* Exposes the name/value as an environment variable.
*/ | Exposes the name/value as an environment variable | buildEnvironment | {
"repo_name": "oleg-nenashev/jenkins",
"path": "core/src/main/java/hudson/model/BooleanParameterValue.java",
"license": "mit",
"size": 3321
} | [
"java.util.Locale"
] | import java.util.Locale; | import java.util.*; | [
"java.util"
] | java.util; | 921,067 |
protected void init(MapOwner owner, Channel channel, String mapContextName,
long timeout, int channelSendOptions,ClassLoader[] cls, boolean terminate) {
long start = System.currentTimeMillis();
if (log.isInfoEnabled()) log.info("Initializing AbstractReplicatedMap with context name:"+mapContextName);
this.mapOwner = owner;
this.externalLoaders = cls;
this.channelSendOptions = channelSendOptions;
this.channel = channel;
this.rpcTimeout = timeout;
this.mapname = mapContextName;
//unique context is more efficient if it is stored as bytes
this.mapContextName = mapContextName.getBytes(StandardCharsets.ISO_8859_1);
if ( log.isTraceEnabled() ) log.trace("Created Lazy Map with name:"+mapContextName+", bytes:"+Arrays.toString(this.mapContextName));
//create an rpc channel and add the map as a listener
this.rpcChannel = new RpcChannel(this.mapContextName, channel, this);
//add this map as a message listener
this.channel.addChannelListener(this);
//listen for membership notifications
this.channel.addMembershipListener(this);
try {
//broadcast our map, this just notifies other members of our existence
broadcast(MapMessage.MSG_INIT, true);
//transfer state from another map
transferState();
//state is transferred, we are ready for messaging
broadcast(MapMessage.MSG_START, true);
} catch (ChannelException x) {
log.warn("Unable to send map start message.");
if (terminate) {
breakdown();
throw new RuntimeException("Unable to start replicated map.",x);
}
}
long complete = System.currentTimeMillis() - start;
if (log.isInfoEnabled())
log.info("AbstractReplicatedMap[" +mapContextName + "] initialization was completed in " + complete + " ms.");
} | void function(MapOwner owner, Channel channel, String mapContextName, long timeout, int channelSendOptions,ClassLoader[] cls, boolean terminate) { long start = System.currentTimeMillis(); if (log.isInfoEnabled()) log.info(STR+mapContextName); this.mapOwner = owner; this.externalLoaders = cls; this.channelSendOptions = channelSendOptions; this.channel = channel; this.rpcTimeout = timeout; this.mapname = mapContextName; this.mapContextName = mapContextName.getBytes(StandardCharsets.ISO_8859_1); if ( log.isTraceEnabled() ) log.trace(STR+mapContextName+STR+Arrays.toString(this.mapContextName)); this.rpcChannel = new RpcChannel(this.mapContextName, channel, this); this.channel.addChannelListener(this); this.channel.addMembershipListener(this); try { broadcast(MapMessage.MSG_INIT, true); transferState(); broadcast(MapMessage.MSG_START, true); } catch (ChannelException x) { log.warn(STR); if (terminate) { breakdown(); throw new RuntimeException(STR,x); } } long complete = System.currentTimeMillis() - start; if (log.isInfoEnabled()) log.info(STR +mapContextName + STR + complete + STR); } | /**
* Initializes the map by creating the RPC channel, registering itself as a channel listener
* This method is also responsible for initiating the state transfer
* @param owner Object
* @param channel Channel
* @param mapContextName String
* @param timeout long
* @param channelSendOptions int
* @param cls ClassLoader[]
* @param terminate - Flag for whether to terminate this map that failed to start.
*/ | Initializes the map by creating the RPC channel, registering itself as a channel listener This method is also responsible for initiating the state transfer | init | {
"repo_name": "plumer/codana",
"path": "tomcat_files/8.0.21/AbstractReplicatedMap.java",
"license": "mit",
"size": 58192
} | [
"java.nio.charset.StandardCharsets",
"org.apache.catalina.tribes.Channel",
"org.apache.catalina.tribes.ChannelException",
"org.apache.catalina.tribes.group.RpcChannel",
"org.apache.catalina.tribes.util.Arrays"
] | import java.nio.charset.StandardCharsets; import org.apache.catalina.tribes.Channel; import org.apache.catalina.tribes.ChannelException; import org.apache.catalina.tribes.group.RpcChannel; import org.apache.catalina.tribes.util.Arrays; | import java.nio.charset.*; import org.apache.catalina.tribes.*; import org.apache.catalina.tribes.group.*; import org.apache.catalina.tribes.util.*; | [
"java.nio",
"org.apache.catalina"
] | java.nio; org.apache.catalina; | 29,292 |
public static long setDownload(
final File file,
final String contentType) throws DownloadException {
return setDownload(file, contentType, null);
}
| static long function( final File file, final String contentType) throws DownloadException { return setDownload(file, contentType, null); } | /**
* Set download data.
* @param file
* @param contentType
* @return the download id
* @throws DownloadException
*/ | Set download data | setDownload | {
"repo_name": "GIP-RECIA/esco-grouper-ui",
"path": "ext/esup-commons/src/main/java/org/esupportail/commons/utils/DownloadUtils.java",
"license": "apache-2.0",
"size": 7952
} | [
"java.io.File",
"org.esupportail.commons.exceptions.DownloadException"
] | import java.io.File; import org.esupportail.commons.exceptions.DownloadException; | import java.io.*; import org.esupportail.commons.exceptions.*; | [
"java.io",
"org.esupportail.commons"
] | java.io; org.esupportail.commons; | 37,924 |
int readSmallInt() throws EOFException, IOException; | int readSmallInt() throws EOFException, IOException; | /**
* Reads a signed 32 bit int value. Can read any value that was written using {@link Encoder#writeSmallInt(int)}.
*
* @throws EOFException when the end of the byte stream is reached before the int value can be fully read.
*/ | Reads a signed 32 bit int value. Can read any value that was written using <code>Encoder#writeSmallInt(int)</code> | readSmallInt | {
"repo_name": "gstevey/gradle",
"path": "subprojects/messaging/src/main/java/org/gradle/internal/serialize/Decoder.java",
"license": "apache-2.0",
"size": 5128
} | [
"java.io.EOFException",
"java.io.IOException"
] | import java.io.EOFException; import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,100,270 |
public Map<EWPOntSubModel, Resource> getRdfResourcesForSourceObjectAsMap_expensive(Object o) {
final Set<Entry<EWPOntSubModel, OntModelAdp>> moSet = ontAdpMap.entrySet();
final Map<EWPOntSubModel, Resource> rez = new LinkedHashMap<EWPOntSubModel, Resource>(15);
final Iterator<Resource> iter = domNodeResourceMap1N.getMappedObjects(o).iterator();
while (iter.hasNext()) {
final Resource res = iter.next();
final Iterator<Entry<EWPOntSubModel, OntModelAdp>> iter2 = moSet.iterator();
while (iter2.hasNext()) {
final Entry<EWPOntSubModel, OntModelAdp> e = iter2.next();
if ( res.getModel().equals(e.getValue().getTopRdfModel())
|| e.getValue().getTopRdfModel().containsResource(res)
) {
if (log.isDebugEnabled() && (rez.get(e.getKey()) != null)) {
log.error("This must never happen: 2 resources (individuals), which correspond to the same source object" +
" are under the same sub-model! Submodel:"+e.getKey()+", source object:"+o+", res:"+res);
}
rez.put(e.getKey(), res);
}
}
}
return rez;
}
| Map<EWPOntSubModel, Resource> function(Object o) { final Set<Entry<EWPOntSubModel, OntModelAdp>> moSet = ontAdpMap.entrySet(); final Map<EWPOntSubModel, Resource> rez = new LinkedHashMap<EWPOntSubModel, Resource>(15); final Iterator<Resource> iter = domNodeResourceMap1N.getMappedObjects(o).iterator(); while (iter.hasNext()) { final Resource res = iter.next(); final Iterator<Entry<EWPOntSubModel, OntModelAdp>> iter2 = moSet.iterator(); while (iter2.hasNext()) { final Entry<EWPOntSubModel, OntModelAdp> e = iter2.next(); if ( res.getModel().equals(e.getValue().getTopRdfModel()) e.getValue().getTopRdfModel().containsResource(res) ) { if (log.isDebugEnabled() && (rez.get(e.getKey()) != null)) { log.error(STR + STR+e.getKey()+STR+o+STR+res); } rez.put(e.getKey(), res); } } } return rez; } | /**
* Function returns mapping {@linkplain EWPOntSubModel} -> Resource.
* It checks the real containment of the resource in a model.
* @param o
* @return
*/ | Function returns mapping EWPOntSubModel -> Resource. It checks the real containment of the resource in a model | getRdfResourcesForSourceObjectAsMap_expensive | {
"repo_name": "ruslanrf/wpps",
"path": "wpps_plugins/tuwien.dbai.wpps.core/src/tuwien/dbai/wpps/core/wpmodel/ontology/WPOntSubModels.java",
"license": "gpl-2.0",
"size": 6206
} | [
"com.hp.hpl.jena.rdf.model.Resource",
"java.util.Iterator",
"java.util.LinkedHashMap",
"java.util.Map",
"java.util.Set"
] | import com.hp.hpl.jena.rdf.model.Resource; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; | import com.hp.hpl.jena.rdf.model.*; import java.util.*; | [
"com.hp.hpl",
"java.util"
] | com.hp.hpl; java.util; | 107,449 |
boolean isPatternMatchingAllIndices(MetaData metaData, String[] indicesOrAliases, String[] concreteIndices) {
// if we end up matching on all indices, check, if its a wildcard parameter, or a "-something" structure
if (concreteIndices.length == metaData.getConcreteAllIndices().length && indicesOrAliases.length > 0) {
//we might have something like /-test1,+test1 that would identify all indices
//or something like /-test1 with test1 index missing and IndicesOptions.lenient()
if (indicesOrAliases[0].charAt(0) == '-') {
return true;
}
//otherwise we check if there's any simple regex
for (String indexOrAlias : indicesOrAliases) {
if (Regex.isSimpleMatchPattern(indexOrAlias)) {
return true;
}
}
}
return false;
}
static final class Context {
private final ClusterState state;
private final IndicesOptions options;
private final long startTime;
private final boolean preserveAliases;
Context(ClusterState state, IndicesOptions options) {
this(state, options, System.currentTimeMillis());
}
Context(ClusterState state, IndicesOptions options, boolean preserveAliases) {
this(state, options, System.currentTimeMillis(), preserveAliases);
}
Context(ClusterState state, IndicesOptions options, long startTime) {
this(state, options, startTime, false);
}
Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) {
this.state = state;
this.options = options;
this.startTime = startTime;
this.preserveAliases = preserveAliases;
} | boolean isPatternMatchingAllIndices(MetaData metaData, String[] indicesOrAliases, String[] concreteIndices) { if (concreteIndices.length == metaData.getConcreteAllIndices().length && indicesOrAliases.length > 0) { if (indicesOrAliases[0].charAt(0) == '-') { return true; } for (String indexOrAlias : indicesOrAliases) { if (Regex.isSimpleMatchPattern(indexOrAlias)) { return true; } } } return false; } static final class Context { private final ClusterState state; private final IndicesOptions options; private final long startTime; private final boolean preserveAliases; Context(ClusterState state, IndicesOptions options) { this(state, options, System.currentTimeMillis()); } Context(ClusterState state, IndicesOptions options, boolean preserveAliases) { this(state, options, System.currentTimeMillis(), preserveAliases); } Context(ClusterState state, IndicesOptions options, long startTime) { this(state, options, startTime, false); } Context(ClusterState state, IndicesOptions options, long startTime, boolean preserveAliases) { this.state = state; this.options = options; this.startTime = startTime; this.preserveAliases = preserveAliases; } | /**
* Identifies whether the first argument (an array containing index names) is a pattern that matches all indices
*
* @param indicesOrAliases the array containing index names
* @param concreteIndices array containing the concrete indices that the first argument refers to
* @return true if the first argument is a pattern that maps to all available indices, false otherwise
*/ | Identifies whether the first argument (an array containing index names) is a pattern that matches all indices | isPatternMatchingAllIndices | {
"repo_name": "jimczi/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java",
"license": "apache-2.0",
"size": 46217
} | [
"org.elasticsearch.action.support.IndicesOptions",
"org.elasticsearch.cluster.ClusterState",
"org.elasticsearch.common.regex.Regex"
] | import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.regex.Regex; | import org.elasticsearch.action.support.*; import org.elasticsearch.cluster.*; import org.elasticsearch.common.regex.*; | [
"org.elasticsearch.action",
"org.elasticsearch.cluster",
"org.elasticsearch.common"
] | org.elasticsearch.action; org.elasticsearch.cluster; org.elasticsearch.common; | 721,721 |
@Override
public RegEx plan() {
return regEx;
} | RegEx function() { return regEx; } | /**
* Generates a plan for regular expression execution.
* The naive planner simply returns the original regular expression.
*
* @return The regex execution plan.
*/ | Generates a plan for regular expression execution. The naive planner simply returns the original regular expression | plan | {
"repo_name": "SwathiMystery/succinct",
"path": "core/src/main/java/edu/berkeley/cs/succinct/regex/planner/NaiveRegExPlanner.java",
"license": "apache-2.0",
"size": 803
} | [
"edu.berkeley.cs.succinct.regex.parser.RegEx"
] | import edu.berkeley.cs.succinct.regex.parser.RegEx; | import edu.berkeley.cs.succinct.regex.parser.*; | [
"edu.berkeley.cs"
] | edu.berkeley.cs; | 1,940,651 |
public static boolean isImageType(final String urlString) {
if ((urlString == null) || StringUtils.isBlank(urlString)) {
return false;
}
final String extension = StringUtils.substringAfterLast(urlString.toLowerCase(), ".");
final String[] imageExtensions = {
"png", "gif", "jpg", "jpeg", "bmp", "tif", "tiff"
};
for (String ext : imageExtensions) {
if (ext.equals(extension)) {
return true;
}
}
return false;
} | static boolean function(final String urlString) { if ((urlString == null) StringUtils.isBlank(urlString)) { return false; } final String extension = StringUtils.substringAfterLast(urlString.toLowerCase(), "."); final String[] imageExtensions = { "png", "gif", "jpg", "jpeg", "bmp", "tif", "tiff" }; for (String ext : imageExtensions) { if (ext.equals(extension)) { return true; } } return false; } | /**
* Check if url points to an image by extension
*
* TODO: Improve method to check file type by peeping at the
* content
*
* @param urlString
* @return true if is of image type
*/ | Check if url points to an image by extension content | isImageType | {
"repo_name": "structr/structr",
"path": "structr-ui/src/main/java/org/structr/web/common/ImageHelper.java",
"license": "gpl-3.0",
"size": 32324
} | [
"org.apache.commons.lang3.StringUtils"
] | import org.apache.commons.lang3.StringUtils; | import org.apache.commons.lang3.*; | [
"org.apache.commons"
] | org.apache.commons; | 329,190 |
@Test
public void testDestroyCachesAbruptly() throws Exception {
Ignite ignite = startGrids(NODES);
ignite.cluster().active(true);
startCachesDynamically(ignite);
checkDestroyCachesAbruptly(ignite);
} | void function() throws Exception { Ignite ignite = startGrids(NODES); ignite.cluster().active(true); startCachesDynamically(ignite); checkDestroyCachesAbruptly(ignite); } | /**
* Test destroy caches abruptly with checkpoints.
*
* @throws Exception If failed.
*/ | Test destroy caches abruptly with checkpoints | testDestroyCachesAbruptly | {
"repo_name": "shroman/ignite",
"path": "modules/core/src/test/java/org/apache/ignite/internal/processors/cache/persistence/IgnitePdsDestroyCacheTest.java",
"license": "apache-2.0",
"size": 2507
} | [
"org.apache.ignite.Ignite"
] | import org.apache.ignite.Ignite; | import org.apache.ignite.*; | [
"org.apache.ignite"
] | org.apache.ignite; | 1,913,870 |
@Override
public void discard(SharedPreferences.Editor editor) {
DateTime discardedTill = DateTime.now().plusMinutes(1440); // in 24 hours
String discardTimeString = discardedTill.toString(fmt);
editor.putString(SURVEY_CARD_DISCARDED_TILL, discardTimeString);
} | void function(SharedPreferences.Editor editor) { DateTime discardedTill = DateTime.now().plusMinutes(1440); String discardTimeString = discardedTill.toString(fmt); editor.putString(SURVEY_CARD_DISCARDED_TILL, discardTimeString); } | /**
* Handles discarding the survey card. Grace period of 24 hours
* Card should be shown again depending on the next function
*
* @param editor Editor to be used for saving values
*/ | Handles discarding the survey card. Grace period of 24 hours Card should be shown again depending on the next function | discard | {
"repo_name": "kordianbruck/TumCampusApp",
"path": "app/src/main/java/de/tum/in/tumcampusapp/cards/SurveyCard.java",
"license": "gpl-2.0",
"size": 7506
} | [
"android.content.SharedPreferences",
"org.joda.time.DateTime"
] | import android.content.SharedPreferences; import org.joda.time.DateTime; | import android.content.*; import org.joda.time.*; | [
"android.content",
"org.joda.time"
] | android.content; org.joda.time; | 1,046,635 |
console.info("Asynchronous scan: namespace=" + params.namespace + " set=" + params.set);
recordCount = 0;
final long begin = System.currentTimeMillis();
ScanPolicy policy = new ScanPolicy();
client.scanAll(policy, new RecordSequenceListener() { | console.info(STR + params.namespace + STR + params.set); recordCount = 0; final long begin = System.currentTimeMillis(); ScanPolicy policy = new ScanPolicy(); client.scanAll(policy, new RecordSequenceListener() { | /**
* Asynchronous scan example.
*/ | Asynchronous scan example | runExample | {
"repo_name": "wgpshashank/aerospike-client-java",
"path": "examples/src/com/aerospike/examples/AsyncScan.java",
"license": "apache-2.0",
"size": 2737
} | [
"com.aerospike.client.listener.RecordSequenceListener",
"com.aerospike.client.policy.ScanPolicy"
] | import com.aerospike.client.listener.RecordSequenceListener; import com.aerospike.client.policy.ScanPolicy; | import com.aerospike.client.listener.*; import com.aerospike.client.policy.*; | [
"com.aerospike.client"
] | com.aerospike.client; | 801,831 |
void getBlockInfo(IBlockAccess world, int x, int y, int z, ForgeDirection side, EntityPlayer player, List<IChatComponent> info,
boolean debug); | void getBlockInfo(IBlockAccess world, int x, int y, int z, ForgeDirection side, EntityPlayer player, List<IChatComponent> info, boolean debug); | /**
* This function appends information to a list provided to it.
*
* @param world
* Reference to the world.
* @param x
* X coordinate of the block.
* @param y
* Y coordinate of the block.
* @param z
* Z coordinate of the block.
* @param side
* The side of the block that is being queried.
* @param player
* Player doing the querying - this can be NULL.
* @param info
* The list that the information should be appended to.
* @param debug
* If true, the block should return "debug" information.
*/ | This function appends information to a list provided to it | getBlockInfo | {
"repo_name": "TheJulianJES/UtilsPlus",
"path": "src/api/java/cofh/api/block/IBlockInfo.java",
"license": "gpl-3.0",
"size": 1336
} | [
"java.util.List",
"net.minecraft.entity.player.EntityPlayer",
"net.minecraft.util.IChatComponent",
"net.minecraft.world.IBlockAccess",
"net.minecraftforge.common.util.ForgeDirection"
] | import java.util.List; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.util.IChatComponent; import net.minecraft.world.IBlockAccess; import net.minecraftforge.common.util.ForgeDirection; | import java.util.*; import net.minecraft.entity.player.*; import net.minecraft.util.*; import net.minecraft.world.*; import net.minecraftforge.common.util.*; | [
"java.util",
"net.minecraft.entity",
"net.minecraft.util",
"net.minecraft.world",
"net.minecraftforge.common"
] | java.util; net.minecraft.entity; net.minecraft.util; net.minecraft.world; net.minecraftforge.common; | 1,588,171 |
@Test
public void testDFSClose() throws Exception {
Configuration conf = getTestConfiguration();
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
FileSystem fileSys = cluster.getFileSystem();
// create two files, leaving them open
fileSys.create(new Path("/test/dfsclose/file-0"));
fileSys.create(new Path("/test/dfsclose/file-1"));
// create another file, close it, and read it, so
// the client gets a socket in its SocketCache
Path p = new Path("/non-empty-file");
DFSTestUtil.createFile(fileSys, p, 1L, (short)1, 0L);
DFSTestUtil.readFile(fileSys, p);
fileSys.close();
} finally {
if (cluster != null) {cluster.shutdown();}
}
} | void function() throws Exception { Configuration conf = getTestConfiguration(); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); FileSystem fileSys = cluster.getFileSystem(); fileSys.create(new Path(STR)); fileSys.create(new Path(STR)); Path p = new Path(STR); DFSTestUtil.createFile(fileSys, p, 1L, (short)1, 0L); DFSTestUtil.readFile(fileSys, p); fileSys.close(); } finally { if (cluster != null) {cluster.shutdown();} } } | /**
* Tests DFSClient.close throws no ConcurrentModificationException if
* multiple files are open.
* Also tests that any cached sockets are closed. (HDFS-3359)
*/ | Tests DFSClient.close throws no ConcurrentModificationException if multiple files are open. Also tests that any cached sockets are closed. (HDFS-3359) | testDFSClose | {
"repo_name": "tomatoKiller/Hadoop_Source_Learn",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java",
"license": "apache-2.0",
"size": 28874
} | [
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.fs.Path"
] | import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; | import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 205,761 |
public List<GpodnetPodcast> getPodcastsForTag(GpodnetTag tag, int count)
throws GpodnetServiceException {
Validate.notNull(tag);
try {
URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format(
"/api/2/tag/%s/%d.json", tag.getName(), count), null);
HttpGet request = new HttpGet(uri);
String response = executeRequest(request);
JSONArray jsonArray = new JSONArray(response);
return readPodcastListFromJSONArray(jsonArray);
} catch (JSONException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
} catch (URISyntaxException e) {
e.printStackTrace();
throw new GpodnetServiceException(e);
}
} | List<GpodnetPodcast> function(GpodnetTag tag, int count) throws GpodnetServiceException { Validate.notNull(tag); try { URI uri = new URI(BASE_SCHEME, BASE_HOST, String.format( STR, tag.getName(), count), null); HttpGet request = new HttpGet(uri); String response = executeRequest(request); JSONArray jsonArray = new JSONArray(response); return readPodcastListFromJSONArray(jsonArray); } catch (JSONException e) { e.printStackTrace(); throw new GpodnetServiceException(e); } catch (URISyntaxException e) { e.printStackTrace(); throw new GpodnetServiceException(e); } } | /**
* Returns the [count] most subscribed podcasts for the given tag.
*
* @throws IllegalArgumentException if tag is null
*/ | Returns the [count] most subscribed podcasts for the given tag | getPodcastsForTag | {
"repo_name": "eric-stanley/AntennaPod",
"path": "src/de/danoeh/antennapod/gpoddernet/GpodnetService.java",
"license": "mit",
"size": 28085
} | [
"de.danoeh.antennapod.gpoddernet.model.GpodnetPodcast",
"de.danoeh.antennapod.gpoddernet.model.GpodnetTag",
"java.net.URISyntaxException",
"java.util.List",
"org.apache.commons.lang3.Validate",
"org.apache.http.client.methods.HttpGet",
"org.json.JSONArray",
"org.json.JSONException"
] | import de.danoeh.antennapod.gpoddernet.model.GpodnetPodcast; import de.danoeh.antennapod.gpoddernet.model.GpodnetTag; import java.net.URISyntaxException; import java.util.List; import org.apache.commons.lang3.Validate; import org.apache.http.client.methods.HttpGet; import org.json.JSONArray; import org.json.JSONException; | import de.danoeh.antennapod.gpoddernet.model.*; import java.net.*; import java.util.*; import org.apache.commons.lang3.*; import org.apache.http.client.methods.*; import org.json.*; | [
"de.danoeh.antennapod",
"java.net",
"java.util",
"org.apache.commons",
"org.apache.http",
"org.json"
] | de.danoeh.antennapod; java.net; java.util; org.apache.commons; org.apache.http; org.json; | 2,816,437 |
public static void createDataNodeVersionFile(File[] parent,
StorageInfo version, String bpid, String bpidToWrite) throws IOException {
DataStorage storage = new DataStorage(version);
storage.setDatanodeUuid("FixedDatanodeUuid");
File[] versionFiles = new File[parent.length];
for (int i = 0; i < parent.length; i++) {
File versionFile = new File(parent[i], "VERSION");
StorageDirectory sd = new StorageDirectory(parent[i].getParentFile());
storage.createStorageID(sd);
storage.writeProperties(versionFile, sd);
versionFiles[i] = versionFile;
File bpDir = BlockPoolSliceStorage.getBpRoot(bpid, parent[i]);
createBlockPoolVersionFile(bpDir, version, bpidToWrite);
}
} | static void function(File[] parent, StorageInfo version, String bpid, String bpidToWrite) throws IOException { DataStorage storage = new DataStorage(version); storage.setDatanodeUuid(STR); File[] versionFiles = new File[parent.length]; for (int i = 0; i < parent.length; i++) { File versionFile = new File(parent[i], STR); StorageDirectory sd = new StorageDirectory(parent[i].getParentFile()); storage.createStorageID(sd); storage.writeProperties(versionFile, sd); versionFiles[i] = versionFile; File bpDir = BlockPoolSliceStorage.getBpRoot(bpid, parent[i]); createBlockPoolVersionFile(bpDir, version, bpidToWrite); } } | /**
* Create a <code>version</code> file for datanode inside the specified parent
* directory. If such a file already exists, it will be overwritten.
* The given version string will be written to the file as the layout
* version. None of the parameters may be null.
*
* @param parent directory where namenode VERSION file is stored
* @param version StorageInfo to create VERSION file from
* @param bpid Block pool Id
* @param bpidToWrite Block pool Id to write into the version file
*/ | Create a <code>version</code> file for datanode inside the specified parent directory. If such a file already exists, it will be overwritten. The given version string will be written to the file as the layout version. None of the parameters may be null | createDataNodeVersionFile | {
"repo_name": "gnudeep/hadoop-2.6",
"path": "hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/UpgradeUtilities.java",
"license": "apache-2.0",
"size": 24391
} | [
"java.io.File",
"java.io.IOException",
"org.apache.hadoop.hdfs.server.common.Storage",
"org.apache.hadoop.hdfs.server.common.StorageInfo",
"org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceStorage",
"org.apache.hadoop.hdfs.server.datanode.DataStorage"
] | import java.io.File; import java.io.IOException; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.datanode.BlockPoolSliceStorage; import org.apache.hadoop.hdfs.server.datanode.DataStorage; | import java.io.*; import org.apache.hadoop.hdfs.server.common.*; import org.apache.hadoop.hdfs.server.datanode.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 217,266 |
public PrizeEntity getPrize(Long prizesId) {
LOGGER.log(Level.INFO, "Inicia proceso de consultar premio con id = {0}", prizesId);
PrizeEntity prize = prizePersistence.find(prizesId);
if (prize == null) {
LOGGER.log(Level.SEVERE, "El premio con el id = {0} no existe", prizesId);
}
LOGGER.log(Level.INFO, "Termina proceso de consultar premio con id = {0}", prizesId);
return prize;
}
| PrizeEntity function(Long prizesId) { LOGGER.log(Level.INFO, STR, prizesId); PrizeEntity prize = prizePersistence.find(prizesId); if (prize == null) { LOGGER.log(Level.SEVERE, STR, prizesId); } LOGGER.log(Level.INFO, STR, prizesId); return prize; } | /**
* Busca un premio por ID
*
* @param prizesId El id del premio a buscar
* @return El premio encontrado, null si no lo encuentra.
*/ | Busca un premio por ID | getPrize | {
"repo_name": "Uniandes-isis2603/backstepbystep",
"path": "backstepbystep-back/src/main/java/co/edu/uniandes/csw/bookstore/ejb/PrizeLogic.java",
"license": "mit",
"size": 5730
} | [
"co.edu.uniandes.csw.bookstore.entities.PrizeEntity",
"java.util.logging.Level"
] | import co.edu.uniandes.csw.bookstore.entities.PrizeEntity; import java.util.logging.Level; | import co.edu.uniandes.csw.bookstore.entities.*; import java.util.logging.*; | [
"co.edu.uniandes",
"java.util"
] | co.edu.uniandes; java.util; | 1,365,142 |
boolean subqueryReferencesTarget(String name, boolean baseTable)
throws StandardException
{
return false;
} | boolean subqueryReferencesTarget(String name, boolean baseTable) throws StandardException { return false; } | /**
* Return whether or not this ResultSetNode contains a subquery with a
* reference to the specified target.
*
* @param name The table name.
*
* @return boolean Whether or not a reference to the table was found.
*
* @exception StandardException Thrown on error
*/ | Return whether or not this ResultSetNode contains a subquery with a reference to the specified target | subqueryReferencesTarget | {
"repo_name": "trejkaz/derby",
"path": "java/engine/org/apache/derby/impl/sql/compile/ResultSetNode.java",
"license": "apache-2.0",
"size": 65254
} | [
"org.apache.derby.iapi.error.StandardException"
] | import org.apache.derby.iapi.error.StandardException; | import org.apache.derby.iapi.error.*; | [
"org.apache.derby"
] | org.apache.derby; | 1,267,834 |
public KualiDecimal getUnpaidBalance91toSYSPR() {
return unpaidBalance91toSYSPR;
} | KualiDecimal function() { return unpaidBalance91toSYSPR; } | /**
* Gets the unpaidBalance91toSYSPR attribute.
*
* @return Returns the unpaidBalance91toSYSPR.
*/ | Gets the unpaidBalance91toSYSPR attribute | getUnpaidBalance91toSYSPR | {
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-ar/src/main/java/org/kuali/kfs/module/ar/businessobject/ContractsAndGrantsAgingReport.java",
"license": "agpl-3.0",
"size": 20464
} | [
"org.kuali.rice.core.api.util.type.KualiDecimal"
] | import org.kuali.rice.core.api.util.type.KualiDecimal; | import org.kuali.rice.core.api.util.type.*; | [
"org.kuali.rice"
] | org.kuali.rice; | 33,253 |
public static final int getInt(InputStream is) throws IOException {
return (is.read() << 24) + (is.read() << 16) + (is.read() << 8) + is.read();
}
| static final int function(InputStream is) throws IOException { return (is.read() << 24) + (is.read() << 16) + (is.read() << 8) + is.read(); } | /**
* Gets an <CODE>int</CODE> from an <CODE>InputStream</CODE>.
*
* @param is an <CODE>InputStream</CODE>
* @return the value of an <CODE>int</CODE>
*/ | Gets an <code>int</code> from an <code>InputStream</code> | getInt | {
"repo_name": "MesquiteProject/MesquiteArchive",
"path": "releases/Mesquite1.12/Mesquite Project/LibrarySource/com/lowagie/text/pdf/codec/PngImage.java",
"license": "lgpl-3.0",
"size": 35528
} | [
"java.io.IOException",
"java.io.InputStream"
] | import java.io.IOException; import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 708,219 |
protected static String buildValidatorKey(Class clazz, String context) {
ActionInvocation invocation = ActionContext.getContext().getActionInvocation();
ActionProxy proxy = invocation.getProxy();
ActionConfig config = proxy.getConfig();
StringBuilder sb = new StringBuilder(clazz.getName());
sb.append("/");
if (StringUtils.isNotBlank(config.getPackageName())) {
sb.append(config.getPackageName());
sb.append("/");
}
// the key needs to use the name of the action from the config file,
// instead of the url, so wild card actions will have the same validator
// see WW-2996
// UPDATE:
// WW-3753 Using the config name instead of the context only for
// wild card actions to keep the flexibility provided
// by the original design (such as mapping different contexts
// to the same action and method if desired)
String configName = config.getName();
if (configName.contains(ActionConfig.WILDCARD)) {
sb.append(configName);
sb.append("|");
sb.append(proxy.getMethod());
} else {
sb.append(context);
}
return sb.toString();
} | static String function(Class clazz, String context) { ActionInvocation invocation = ActionContext.getContext().getActionInvocation(); ActionProxy proxy = invocation.getProxy(); ActionConfig config = proxy.getConfig(); StringBuilder sb = new StringBuilder(clazz.getName()); sb.append("/"); if (StringUtils.isNotBlank(config.getPackageName())) { sb.append(config.getPackageName()); sb.append("/"); } String configName = config.getName(); if (configName.contains(ActionConfig.WILDCARD)) { sb.append(configName); sb.append(" "); sb.append(proxy.getMethod()); } else { sb.append(context); } return sb.toString(); } | /**
* Builds a key for validators - used when caching validators.
*
* @param clazz the action.
* @return a validator key which is the class name plus context.
*/ | Builds a key for validators - used when caching validators | buildValidatorKey | {
"repo_name": "WillJiang/WillJiang",
"path": "src/xwork-core/src/main/java/com/opensymphony/xwork2/validator/AnnotationActionValidatorManager.java",
"license": "apache-2.0",
"size": 18330
} | [
"com.opensymphony.xwork2.ActionContext",
"com.opensymphony.xwork2.ActionInvocation",
"com.opensymphony.xwork2.ActionProxy",
"com.opensymphony.xwork2.config.entities.ActionConfig",
"org.apache.commons.lang3.StringUtils"
] | import com.opensymphony.xwork2.ActionContext; import com.opensymphony.xwork2.ActionInvocation; import com.opensymphony.xwork2.ActionProxy; import com.opensymphony.xwork2.config.entities.ActionConfig; import org.apache.commons.lang3.StringUtils; | import com.opensymphony.xwork2.*; import com.opensymphony.xwork2.config.entities.*; import org.apache.commons.lang3.*; | [
"com.opensymphony.xwork2",
"org.apache.commons"
] | com.opensymphony.xwork2; org.apache.commons; | 2,025,202 |
@Override
public ObjectId call() {
checkNotNull(tree, "child tree not set");
checkNotNull(childPath, "child tree path not set");
String ancestorPath = resolveAncestorPath();
checkArgument(NodeRef.isChild(ancestorPath, childPath), String.format(
"child path '%s' is not a child of ancestor path '%s'", childPath, ancestorPath));
RevTree tree = this.tree.get();
checkState(null != tree, "child tree supplier returned null");
ObjectDatabase targetDb = indexDb ? index : odb;
RevTreeBuilder root = resolveAncestor();
return writeBack(root, ancestorPath, tree, childPath, targetDb,
metadataId.or(ObjectId.NULL));
} | ObjectId function() { checkNotNull(tree, STR); checkNotNull(childPath, STR); String ancestorPath = resolveAncestorPath(); checkArgument(NodeRef.isChild(ancestorPath, childPath), String.format( STR, childPath, ancestorPath)); RevTree tree = this.tree.get(); checkState(null != tree, STR); ObjectDatabase targetDb = indexDb ? index : odb; RevTreeBuilder root = resolveAncestor(); return writeBack(root, ancestorPath, tree, childPath, targetDb, metadataId.or(ObjectId.NULL)); } | /**
* Executes the write back operation.
*
* @return the {@link ObjectId id} of the resulting new ancestor tree.
*/ | Executes the write back operation | call | {
"repo_name": "rouault/GeoGit",
"path": "src/core/src/main/java/org/geogit/api/plumbing/WriteBack.java",
"license": "bsd-3-clause",
"size": 8938
} | [
"com.google.common.base.Preconditions",
"org.geogit.api.NodeRef",
"org.geogit.api.ObjectId",
"org.geogit.api.RevTree",
"org.geogit.api.RevTreeBuilder",
"org.geogit.storage.ObjectDatabase"
] | import com.google.common.base.Preconditions; import org.geogit.api.NodeRef; import org.geogit.api.ObjectId; import org.geogit.api.RevTree; import org.geogit.api.RevTreeBuilder; import org.geogit.storage.ObjectDatabase; | import com.google.common.base.*; import org.geogit.api.*; import org.geogit.storage.*; | [
"com.google.common",
"org.geogit.api",
"org.geogit.storage"
] | com.google.common; org.geogit.api; org.geogit.storage; | 1,718,773 |
@Test
public void shouldNotHaveOddNumbers()
{
final List<Integer> numbers = Arrays.asList(1, 2, 4, 6, 8, 10);
assertThat(numbers, not(AreEvenNumbers.evenNumbers()));
} | void function() { final List<Integer> numbers = Arrays.asList(1, 2, 4, 6, 8, 10); assertThat(numbers, not(AreEvenNumbers.evenNumbers())); } | /**
* {@link AreEvenNumbers} inspired by <a
* href="http://java.dzone.com/articles/using-hamcrest-and-junit">this
* blog</a>
*/ | <code>AreEvenNumbers</code> inspired by this blog | shouldNotHaveOddNumbers | {
"repo_name": "krevelen/coala",
"path": "coala-core/src/test/java/io/coala/reason/MatchTest.java",
"license": "apache-2.0",
"size": 3946
} | [
"java.util.Arrays",
"java.util.List",
"org.hamcrest.MatcherAssert",
"org.hamcrest.Matchers"
] | import java.util.Arrays; import java.util.List; import org.hamcrest.MatcherAssert; import org.hamcrest.Matchers; | import java.util.*; import org.hamcrest.*; | [
"java.util",
"org.hamcrest"
] | java.util; org.hamcrest; | 579,072 |
private static VarPattern map(Resource resource){
if(isHasResourceResource(resource)){
return var().pattern();
}
VarPattern var = base(resource);
var = var.val(resource.getValue());
return var;
} | static VarPattern function(Resource resource){ if(isHasResourceResource(resource)){ return var().pattern(); } VarPattern var = base(resource); var = var.val(resource.getValue()); return var; } | /**
* Map a Resource to a var IF it is not attached in a has relation to another instance
* @param resource resource to be mapped
* @return var patterns representing the given instance
*/ | Map a Resource to a var IF it is not attached in a has relation to another instance | map | {
"repo_name": "alexandraorth/grakn",
"path": "grakn-migration/export/src/main/java/ai/grakn/migration/export/InstanceMapper.java",
"license": "gpl-3.0",
"size": 5895
} | [
"ai.grakn.concept.Resource",
"ai.grakn.graql.Graql",
"ai.grakn.graql.VarPattern"
] | import ai.grakn.concept.Resource; import ai.grakn.graql.Graql; import ai.grakn.graql.VarPattern; | import ai.grakn.concept.*; import ai.grakn.graql.*; | [
"ai.grakn.concept",
"ai.grakn.graql"
] | ai.grakn.concept; ai.grakn.graql; | 2,903,682 |
static void clearLoaders(@NotNull final LoaderContextCompat context, final int loaderId,
@NotNull final ContextInvocationFactory<?, ?> factory) {
sMainRunner.run(new ClearFactoryExecution(context, factory, loaderId), 0,
TimeUnit.MILLISECONDS);
} | static void clearLoaders(@NotNull final LoaderContextCompat context, final int loaderId, @NotNull final ContextInvocationFactory<?, ?> factory) { sMainRunner.run(new ClearFactoryExecution(context, factory, loaderId), 0, TimeUnit.MILLISECONDS); } | /**
* Destroys all the loaders with the specified invocation factory.
*
* @param context the context instance.
* @param loaderId the Loader ID.
* @param factory the invocation factory.
*/ | Destroys all the loaders with the specified invocation factory | clearLoaders | {
"repo_name": "davide-maestroni/jroutine",
"path": "android-core/src/main/java/com/github/dm/jrt/android/v4/core/LoaderInvocation.java",
"license": "apache-2.0",
"size": 29475
} | [
"com.github.dm.jrt.android.core.invocation.ContextInvocationFactory",
"java.util.concurrent.TimeUnit",
"org.jetbrains.annotations.NotNull"
] | import com.github.dm.jrt.android.core.invocation.ContextInvocationFactory; import java.util.concurrent.TimeUnit; import org.jetbrains.annotations.NotNull; | import com.github.dm.jrt.android.core.invocation.*; import java.util.concurrent.*; import org.jetbrains.annotations.*; | [
"com.github.dm",
"java.util",
"org.jetbrains.annotations"
] | com.github.dm; java.util; org.jetbrains.annotations; | 275,999 |
public void setDmxService(DmxService dmxService) {
this.dmxService = dmxService;
} | void function(DmxService dmxService) { this.dmxService = dmxService; } | /**
* DmxService loaded via DS.
*/ | DmxService loaded via DS | setDmxService | {
"repo_name": "vgoldman/openhab",
"path": "bundles/binding/org.openhab.binding.dmx/src/main/java/org/openhab/binding/dmx/internal/DmxGenericBindingProvider.java",
"license": "epl-1.0",
"size": 7695
} | [
"org.openhab.binding.dmx.DmxService"
] | import org.openhab.binding.dmx.DmxService; | import org.openhab.binding.dmx.*; | [
"org.openhab.binding"
] | org.openhab.binding; | 2,540,510 |
public void setAllowedThreadGroupings(ThreadGrouping... groupings)
{
threadGrouping.getItems().addAll(groupings);
// Don't show choice if there actually is no choice :)
setVisibility(groupings.length > 1, threadGroupingLabel, threadGrouping);
bindGroupings();
} | void function(ThreadGrouping... groupings) { threadGrouping.getItems().addAll(groupings); setVisibility(groupings.length > 1, threadGroupingLabel, threadGrouping); bindGroupings(); } | /**
* Specify which {@link ThreadGrouping}s are allowed for the View. This method should be called by the controller
* which configures the View, if the View supports groupings.
* <p>
*
* @param groupings the {@link ThreadGrouping}s allowed for the View
*/ | Specify which <code>ThreadGrouping</code>s are allowed for the View. This method should be called by the controller which configures the View, if the View supports groupings. | setAllowedThreadGroupings | {
"repo_name": "RichardWarburton/honest-profiler",
"path": "src/main/java/com/insightfullogic/honest_profiler/ports/javafx/controller/AbstractViewController.java",
"license": "mit",
"size": 30814
} | [
"com.insightfullogic.honest_profiler.core.aggregation.grouping.ThreadGrouping"
] | import com.insightfullogic.honest_profiler.core.aggregation.grouping.ThreadGrouping; | import com.insightfullogic.honest_profiler.core.aggregation.grouping.*; | [
"com.insightfullogic.honest_profiler"
] | com.insightfullogic.honest_profiler; | 2,365,061 |
public static Point metersToPixels(Point meters) {
return new Point(metersToPixels(meters.x) + 200, - metersToPixels(meters.y) + 500);
} | static Point function(Point meters) { return new Point(metersToPixels(meters.x) + 200, - metersToPixels(meters.y) + 500); } | /**
* Converts values in meters to pixels.
* @param meters A point whose position is in meters.
* @return This position in pixels.
*/ | Converts values in meters to pixels | metersToPixels | {
"repo_name": "pmenage/bumple-game",
"path": "src/com/paulinemenage/bumple/game/Bumple.java",
"license": "mit",
"size": 4991
} | [
"com.paulinemenage.bumple.physics.Point"
] | import com.paulinemenage.bumple.physics.Point; | import com.paulinemenage.bumple.physics.*; | [
"com.paulinemenage.bumple"
] | com.paulinemenage.bumple; | 1,762,264 |
protected String getStringProperty(JSONObject jsonObject, String propertyName) throws JSONException {
if (jsonObject.has(propertyName)) {
return jsonObject.getString(propertyName);
}
return null;
} | String function(JSONObject jsonObject, String propertyName) throws JSONException { if (jsonObject.has(propertyName)) { return jsonObject.getString(propertyName); } return null; } | /**
* Gets the string property.
*
* @param jsonObject
* the json object
* @param propertyName
* the property name
* @return the property value
* @throws JSONException
* the jSON exception
*/ | Gets the string property | getStringProperty | {
"repo_name": "SirmaITT/conservation-space-1.7.0",
"path": "docker/sep-alfresco/alfresco-emf-integration/alfresco-cmf/src/main/java/com/sirma/itt/cmf/integration/webscript/BaseAlfrescoScript.java",
"license": "lgpl-3.0",
"size": 26195
} | [
"org.json.JSONException",
"org.json.JSONObject"
] | import org.json.JSONException; import org.json.JSONObject; | import org.json.*; | [
"org.json"
] | org.json; | 1,561,776 |
public void setHWDecoderEnabled(boolean enabled, boolean force) {
final HWDecoderUtil.Decoder decoder = enabled ?
HWDecoderUtil.getDecoderFromDevice() :
HWDecoderUtil.Decoder.NONE;
if (decoder == HWDecoderUtil.Decoder.NONE ||
(decoder == HWDecoderUtil.Decoder.UNKNOWN && !force)) {
addOption(":codec=all");
return;
}
if (!AndroidUtil.isJellyBeanOrLater()) {
addOption(":file-caching=1500");
addOption(":network-caching=1500");
Log.i(TAG, "file/network caching for hw decoders");
}
final StringBuilder sb = new StringBuilder(":codec=");
if (decoder == HWDecoderUtil.Decoder.MEDIACODEC)
sb.append(getMediaCodecModule()).append(",");
else if (decoder == HWDecoderUtil.Decoder.OMX)
sb.append("iomx,");
else
sb.append(getMediaCodecModule()).append(",iomx,");
sb.append("all");
addOption(sb.toString());
} | void function(boolean enabled, boolean force) { final HWDecoderUtil.Decoder decoder = enabled ? HWDecoderUtil.getDecoderFromDevice() : HWDecoderUtil.Decoder.NONE; if (decoder == HWDecoderUtil.Decoder.NONE (decoder == HWDecoderUtil.Decoder.UNKNOWN && !force)) { addOption(STR); return; } if (!AndroidUtil.isJellyBeanOrLater()) { addOption(STR); addOption(STR); Log.i(TAG, STR); } final StringBuilder sb = new StringBuilder(STR); if (decoder == HWDecoderUtil.Decoder.MEDIACODEC) sb.append(getMediaCodecModule()).append(","); else if (decoder == HWDecoderUtil.Decoder.OMX) sb.append("iomx,"); else sb.append(getMediaCodecModule()).append(STR); sb.append("all"); addOption(sb.toString()); } | /**
* Add or remove hw acceleration media options
*
* @param enabled if true, hw decoder will be used
* @param force force hw acceleration even for unknown devices
*/ | Add or remove hw acceleration media options | setHWDecoderEnabled | {
"repo_name": "hanhailong/VCL-Official",
"path": "libvlc/src/org/videolan/libvlc/Media.java",
"license": "gpl-2.0",
"size": 23080
} | [
"android.util.Log",
"org.videolan.libvlc.util.AndroidUtil",
"org.videolan.libvlc.util.HWDecoderUtil"
] | import android.util.Log; import org.videolan.libvlc.util.AndroidUtil; import org.videolan.libvlc.util.HWDecoderUtil; | import android.util.*; import org.videolan.libvlc.util.*; | [
"android.util",
"org.videolan.libvlc"
] | android.util; org.videolan.libvlc; | 1,337,847 |
private void setRealizationEpoch(final Date value) {
if (realizationEpoch == Long.MIN_VALUE) {
realizationEpoch = value.getTime();
} else {
MetadataUtilities.propertyAlreadySet(AbstractDatum.class, "setRealizationEpoch", "realizationEpoch");
}
} | void function(final Date value) { if (realizationEpoch == Long.MIN_VALUE) { realizationEpoch = value.getTime(); } else { MetadataUtilities.propertyAlreadySet(AbstractDatum.class, STR, STR); } } | /**
* Invoked by JAXB only at unmarshalling time.
*
* @see #getRealizationEpoch()
*/ | Invoked by JAXB only at unmarshalling time | setRealizationEpoch | {
"repo_name": "desruisseaux/sis",
"path": "core/sis-referencing/src/main/java/org/apache/sis/referencing/datum/AbstractDatum.java",
"license": "apache-2.0",
"size": 24228
} | [
"java.util.Date",
"org.apache.sis.internal.metadata.MetadataUtilities"
] | import java.util.Date; import org.apache.sis.internal.metadata.MetadataUtilities; | import java.util.*; import org.apache.sis.internal.metadata.*; | [
"java.util",
"org.apache.sis"
] | java.util; org.apache.sis; | 414,440 |
private int testConnectionFlowCommitRollback(
Object ds, boolean invokeExtra, boolean isCommit)
throws IOException, SQLException {
final int extraInvokations = invokeExtra ? 25 : 0;
final int rowCount = 10;
final boolean isXA = ds instanceof XADataSource;
final boolean isCP = ds instanceof ConnectionPoolDataSource;
// Generate trace file name and define trace behavior.
String dsType = (isXA ? "xa_" : (isCP ? "cp_" : ""));
String tbl = "ds_" + dsType +
(invokeExtra ? "base_" : "extra_") +
(isCommit ? "commit" : "rollback");
File traceFile = SupportFilesSetup.getReadWrite(tbl + ".trace");
J2EEDataSource.setBeanProperty(ds, "traceFile",
PrivilegedFileOpsForTests.getAbsolutePath(traceFile));
J2EEDataSource.setBeanProperty(ds, "traceFileAppend", Boolean.FALSE);
J2EEDataSource.setBeanProperty( ds, "traceLevel",
new Integer(ClientDataSourceInterface.TRACE_ALL));
// Obtain connection.
PooledConnection physicalCon = null;
Connection con;
if (isXA) {
physicalCon = ((XADataSource)ds).getXAConnection();
con = physicalCon.getConnection();
} else if (isCP) {
physicalCon = ((ClientConnectionPoolDataSourceInterface)ds).
getPooledConnection();
con = physicalCon.getConnection();
} else {
con = ((DataSource)ds).getConnection();
}
con.setAutoCommit(false);
// Run test sequence.
// step 0: create table
Statement stmt = con.createStatement();
stmt.executeUpdate("create table " + tbl + " (id int)");
con.commit(); // Unconditional commit to persist table
endTranscation(con, isCommit, extraInvokations);
// step 1: insert data
PreparedStatement ps =
con.prepareStatement("insert into " + tbl + " values (?)");
for (int i=0; i < rowCount; i++) {
ps.setInt(1, i);
ps.executeUpdate();
endTranscation(con, isCommit, extraInvokations);
}
ps.close();
// Unconditional commit, should catch "missed" rollbacks above when we
// do a select with another connection at the end.
con.commit();
// step 2: select data
ResultSet rs = stmt.executeQuery("select count(*) from " + tbl);
rs.next();
rs.getInt(1);
rs.close();
endTranscation(con, isCommit, extraInvokations);
// step 3: values clause
rs = stmt.executeQuery("values 7");
assertTrue(rs.next());
assertEquals(7, rs.getInt(1));
rs.close();
stmt.close();
endTranscation(con, isCommit, extraInvokations);
con.close();
if (physicalCon != null) {
physicalCon.close();
}
// step 4: table content validation
stmt = createStatement();
rs = stmt.executeQuery("select count(*) from " + tbl);
rs.next();
assertEquals("Potential COMMIT/ROLLBACK protocol error",
isCommit ? rowCount : 0, rs.getInt(1));
// Parse the trace file for commits or rollbacks.
String token = "SEND BUFFER: " + (isXA ? "SYNCCTL" :
(isCommit ? "RDBCMM" : "RDBRLLBCK"));
int tokenCount = 0;
BufferedReader r = new BufferedReader(
PrivilegedFileOpsForTests.getFileReader(traceFile));
String line;
while ((line = r.readLine()) != null) {
if (line.startsWith("[derby]") && line.indexOf(token) != -1) {
println((isCommit ? "COMMIT: " : "ROLLBACK: ") + line);
tokenCount++;
}
}
r.close();
assertTrue("Parsing failed, no COMMITS/ROLLBACKS detected",
tokenCount > 0);
println(ds.getClass().getName() + ", invokeExtra=" + invokeExtra +
", isCommit=" + isCommit + ", tokenCount=" + tokenCount);
return tokenCount;
} | int function( Object ds, boolean invokeExtra, boolean isCommit) throws IOException, SQLException { final int extraInvokations = invokeExtra ? 25 : 0; final int rowCount = 10; final boolean isXA = ds instanceof XADataSource; final boolean isCP = ds instanceof ConnectionPoolDataSource; String dsType = (isXA ? "xa_" : (isCP ? "cp_" : STRds_STRbase_STRextra_STRcommitSTRrollbackSTR.traceSTRtraceFileSTRtraceFileAppendSTRtraceLevelSTRcreate table STR (id int)STRinsert into STR values (?)STRselect count(*) from STRvalues 7STRselect count(*) from STRPotential COMMIT/ROLLBACK protocol errorSTRSEND BUFFER: STRSYNCCTLSTRRDBCMMSTRRDBRLLBCKSTR[derby]STRCOMMIT: STRROLLBACK: STRParsing failed, no COMMITS/ROLLBACKS detectedSTR, invokeExtra=STR, isCommit=STR, tokenCount=" + tokenCount); return tokenCount; } | /**
* Performs a test sequence accessing the server, then parses the client
* connection trace file to obtain the number of commit or rollback
* commands flowed from the client to the server.
*
* @param ds data source used to obtain a connection to the database
* (must be using the test framework defaults)
* @param invokeExtra if {@code true} extra invocations of either commit or
* rollback are performed (depending on value of {@code isCommit})
* @param isCommit if {@code true}, commits are invoked, otherwise
* rollbacks are invoked
* @return The number of wire flows detected (depending on value of
* {@code isCommit}).
* @throws IOException if reading/parsing the trace file fails
* @throws SQLException if something goes wrong
*/ | Performs a test sequence accessing the server, then parses the client connection trace file to obtain the number of commit or rollback commands flowed from the client to the server | testConnectionFlowCommitRollback | {
"repo_name": "viaper/DBPlus",
"path": "DerbyHodgepodge/java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/J2EEDataSourceTest.java",
"license": "apache-2.0",
"size": 186489
} | [
"java.io.IOException",
"java.sql.SQLException",
"javax.sql.ConnectionPoolDataSource",
"javax.sql.XADataSource"
] | import java.io.IOException; import java.sql.SQLException; import javax.sql.ConnectionPoolDataSource; import javax.sql.XADataSource; | import java.io.*; import java.sql.*; import javax.sql.*; | [
"java.io",
"java.sql",
"javax.sql"
] | java.io; java.sql; javax.sql; | 661,654 |
@FIXVersion(introduced="4.3")
@TagNumRef(tagNum=TagNum.TradeOriginationDate)
public void setTradeOriginationDate(Date tradeOriginationDate) {
this.tradeOriginationDate = tradeOriginationDate;
} | @FIXVersion(introduced="4.3") @TagNumRef(tagNum=TagNum.TradeOriginationDate) void function(Date tradeOriginationDate) { this.tradeOriginationDate = tradeOriginationDate; } | /**
* Message field setter.
* @param tradeOriginationDate field value
*/ | Message field setter | setTradeOriginationDate | {
"repo_name": "marvisan/HadesFIX",
"path": "Model/src/main/java/net/hades/fix/message/group/SideCrossOrdCxlGroup.java",
"license": "gpl-3.0",
"size": 18915
} | [
"java.util.Date",
"net.hades.fix.message.anno.FIXVersion",
"net.hades.fix.message.anno.TagNumRef",
"net.hades.fix.message.type.TagNum"
] | import java.util.Date; import net.hades.fix.message.anno.FIXVersion; import net.hades.fix.message.anno.TagNumRef; import net.hades.fix.message.type.TagNum; | import java.util.*; import net.hades.fix.message.anno.*; import net.hades.fix.message.type.*; | [
"java.util",
"net.hades.fix"
] | java.util; net.hades.fix; | 2,568,384 |
@Override public void exitValueType(@NotNull InfixParser.ValueTypeContext ctx) { } | @Override public void exitValueType(@NotNull InfixParser.ValueTypeContext ctx) { } | /**
* {@inheritDoc}
* <p/>
* The default implementation does nothing.
*/ | The default implementation does nothing | enterValueType | {
"repo_name": "PulfordJ/small-compiler",
"path": "src/generated/java/InfixBaseListener.java",
"license": "gpl-2.0",
"size": 11441
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 2,286,889 |
@Deprecated
@Override
@DoNotCall("Always throws UnsupportedOperationException")
public void remove(Range<C> range) {
throw new UnsupportedOperationException();
} | @DoNotCall(STR) void function(Range<C> range) { throw new UnsupportedOperationException(); } | /**
* Guaranteed to throw an exception and leave the {@code RangeSet} unmodified.
*
* @throws UnsupportedOperationException always
* @deprecated Unsupported operation.
*/ | Guaranteed to throw an exception and leave the RangeSet unmodified | remove | {
"repo_name": "typetools/guava",
"path": "guava/src/com/google/common/collect/ImmutableRangeSet.java",
"license": "apache-2.0",
"size": 26195
} | [
"com.google.errorprone.annotations.DoNotCall"
] | import com.google.errorprone.annotations.DoNotCall; | import com.google.errorprone.annotations.*; | [
"com.google.errorprone"
] | com.google.errorprone; | 645,120 |
public static BooleanVerifier verify(final Boolean value) {
return verify(value, null);
} | static BooleanVerifier function(final Boolean value) { return verify(value, null); } | /**
* <p>
* Starts a chain for verifying the specified {@code value} as a boolean using a {@link BooleanVerifier}.
* </p>
* <p>
* If {@code value} fails any subsequent verifications within this chain a {@link VerifierException} will be thrown
* immediately by the method for the offending verification.
* </p>
*
* @param value
* the {@code Boolean} to be verified (may be {@literal null})
* @return A {@link BooleanVerifier} to be used to verify {@code value}.
* @throws VerifierException
* If a problem occurs while setting up the {@link BooleanVerifier}.
* @see #verify(Boolean, Object)
* @see BooleanVerifier
*/ | Starts a chain for verifying the specified value as a boolean using a <code>BooleanVerifier</code>. If value fails any subsequent verifications within this chain a <code>VerifierException</code> will be thrown immediately by the method for the offending verification. | verify | {
"repo_name": "Skelp/verifier",
"path": "src/main/java/org/notninja/verifier/Verifier.java",
"license": "mit",
"size": 49484
} | [
"org.notninja.verifier.type.BooleanVerifier"
] | import org.notninja.verifier.type.BooleanVerifier; | import org.notninja.verifier.type.*; | [
"org.notninja.verifier"
] | org.notninja.verifier; | 2,253,882 |
private List<DeferredQueueRecordType> queryDeferredQueue(QueryDeferredQueueRequestType queryDeferredQueueRequest)
throws DeferredQueueException {
LOG.debug("Start: DeferredQueueManagerHelper.queryDeferredQueue method - query deferred messages.");
List<DeferredQueueRecordType> response = new ArrayList<DeferredQueueRecordType>();
try {
AsyncMsgRecordDao queueDao = new AsyncMsgRecordDao();
List<AsyncMsgRecord> asyncResponse = queueDao.queryByCriteria(queryDeferredQueueRequest);
if (asyncResponse != null && asyncResponse.size() > 0) {
for (AsyncMsgRecord asyncRecord : asyncResponse) {
DeferredQueueRecordType queueRecord = new DeferredQueueRecordType();
queueRecord.setMessageId(asyncRecord.getMessageId());
queueRecord.setCreationTime(XMLDateUtil.date2Gregorian(asyncRecord.getCreationTime()));
queueRecord.setResponseTime(XMLDateUtil.date2Gregorian(asyncRecord.getResponseTime()));
queueRecord.setDuration(asyncRecord.getDuration());
queueRecord.setServiceName(asyncRecord.getServiceName());
queueRecord.setDirection(asyncRecord.getDirection());
queueRecord.setCommunityId(asyncRecord.getCommunityId());
queueRecord.setStatus(asyncRecord.getStatus());
queueRecord.setResponseType(asyncRecord.getResponseType());
response.add(queueRecord);
}
}
} catch (Exception e) {
LOG.error("Exception occurred while querying deferred queue: ", e);
throw new DeferredQueueException(e);
}
LOG.debug("End: DeferredQueueManagerHelper.queryDeferredQueue method - query deferred messages.");
return response;
} | List<DeferredQueueRecordType> function(QueryDeferredQueueRequestType queryDeferredQueueRequest) throws DeferredQueueException { LOG.debug(STR); List<DeferredQueueRecordType> response = new ArrayList<DeferredQueueRecordType>(); try { AsyncMsgRecordDao queueDao = new AsyncMsgRecordDao(); List<AsyncMsgRecord> asyncResponse = queueDao.queryByCriteria(queryDeferredQueueRequest); if (asyncResponse != null && asyncResponse.size() > 0) { for (AsyncMsgRecord asyncRecord : asyncResponse) { DeferredQueueRecordType queueRecord = new DeferredQueueRecordType(); queueRecord.setMessageId(asyncRecord.getMessageId()); queueRecord.setCreationTime(XMLDateUtil.date2Gregorian(asyncRecord.getCreationTime())); queueRecord.setResponseTime(XMLDateUtil.date2Gregorian(asyncRecord.getResponseTime())); queueRecord.setDuration(asyncRecord.getDuration()); queueRecord.setServiceName(asyncRecord.getServiceName()); queueRecord.setDirection(asyncRecord.getDirection()); queueRecord.setCommunityId(asyncRecord.getCommunityId()); queueRecord.setStatus(asyncRecord.getStatus()); queueRecord.setResponseType(asyncRecord.getResponseType()); response.add(queueRecord); } } } catch (Exception e) { LOG.error(STR, e); throw new DeferredQueueException(e); } LOG.debug(STR); return response; } | /**
* Call deferred queue dao to query for matching records
*
* @param queryDeferredQueueRequest
* @return found list of queue records
* @throws DeferredQueueException
*/ | Call deferred queue dao to query for matching records | queryDeferredQueue | {
"repo_name": "AurionProject/Aurion",
"path": "Product/Production/Adapters/General/CONNECTAdapterWeb/src/main/java/gov/hhs/fha/nhinc/adapter/deferred/queue/DeferredQueueManagerHelper.java",
"license": "bsd-3-clause",
"size": 21689
} | [
"gov.hhs.fha.nhinc.asyncmsgs.dao.AsyncMsgRecordDao",
"gov.hhs.fha.nhinc.asyncmsgs.model.AsyncMsgRecord",
"gov.hhs.fha.nhinc.common.deferredqueuemanager.DeferredQueueRecordType",
"gov.hhs.fha.nhinc.common.deferredqueuemanager.QueryDeferredQueueRequestType",
"gov.hhs.fha.nhinc.util.format.XMLDateUtil",
"java.util.ArrayList",
"java.util.List"
] | import gov.hhs.fha.nhinc.asyncmsgs.dao.AsyncMsgRecordDao; import gov.hhs.fha.nhinc.asyncmsgs.model.AsyncMsgRecord; import gov.hhs.fha.nhinc.common.deferredqueuemanager.DeferredQueueRecordType; import gov.hhs.fha.nhinc.common.deferredqueuemanager.QueryDeferredQueueRequestType; import gov.hhs.fha.nhinc.util.format.XMLDateUtil; import java.util.ArrayList; import java.util.List; | import gov.hhs.fha.nhinc.asyncmsgs.dao.*; import gov.hhs.fha.nhinc.asyncmsgs.model.*; import gov.hhs.fha.nhinc.common.deferredqueuemanager.*; import gov.hhs.fha.nhinc.util.format.*; import java.util.*; | [
"gov.hhs.fha",
"java.util"
] | gov.hhs.fha; java.util; | 510,691 |
public void logError(String msg)
{
this.log(Level.ERROR, msg);
}
| void function(String msg) { this.log(Level.ERROR, msg); } | /**
* Convenience method to log an error message (org.apache.log4j.Level.ERROR).
*
* @param msg a description of the error.
*
* @see com.teletalk.jserver.log.LogManager
*/ | Convenience method to log an error message (org.apache.log4j.Level.ERROR) | logError | {
"repo_name": "tolo/JServer",
"path": "src/java/com/teletalk/jserver/log/LoggableObject.java",
"license": "apache-2.0",
"size": 25463
} | [
"org.apache.log4j.Level"
] | import org.apache.log4j.Level; | import org.apache.log4j.*; | [
"org.apache.log4j"
] | org.apache.log4j; | 1,964,137 |
public static SqlValidatorNamespace lookup(
SqlValidatorScope scope,
List<String> names) {
assert names.size() > 0;
final SqlNameMatcher nameMatcher =
scope.getValidator().getCatalogReader().nameMatcher();
final SqlValidatorScope.ResolvedImpl resolved =
new SqlValidatorScope.ResolvedImpl();
scope.resolve(ImmutableList.of(names.get(0)), nameMatcher, false, resolved);
assert resolved.count() == 1;
SqlValidatorNamespace namespace = resolved.only().namespace;
for (String name : Util.skip(names)) {
namespace = namespace.lookupChild(name);
assert namespace != null;
}
return namespace;
} | static SqlValidatorNamespace function( SqlValidatorScope scope, List<String> names) { assert names.size() > 0; final SqlNameMatcher nameMatcher = scope.getValidator().getCatalogReader().nameMatcher(); final SqlValidatorScope.ResolvedImpl resolved = new SqlValidatorScope.ResolvedImpl(); scope.resolve(ImmutableList.of(names.get(0)), nameMatcher, false, resolved); assert resolved.count() == 1; SqlValidatorNamespace namespace = resolved.only().namespace; for (String name : Util.skip(names)) { namespace = namespace.lookupChild(name); assert namespace != null; } return namespace; } | /**
* Resolves a multi-part identifier such as "SCHEMA.EMP.EMPNO" to a
* namespace. The returned namespace, never null, may represent a
* schema, table, column, etc.
*/ | Resolves a multi-part identifier such as "SCHEMA.EMP.EMPNO" to a namespace. The returned namespace, never null, may represent a schema, table, column, etc | lookup | {
"repo_name": "dindin5258/calcite",
"path": "core/src/main/java/org/apache/calcite/sql/validate/SqlValidatorUtil.java",
"license": "apache-2.0",
"size": 42385
} | [
"com.google.common.collect.ImmutableList",
"java.util.List",
"org.apache.calcite.util.Util"
] | import com.google.common.collect.ImmutableList; import java.util.List; import org.apache.calcite.util.Util; | import com.google.common.collect.*; import java.util.*; import org.apache.calcite.util.*; | [
"com.google.common",
"java.util",
"org.apache.calcite"
] | com.google.common; java.util; org.apache.calcite; | 255,247 |
protected void renderModel(EntityDragon p_77036_1_, float p_77036_2_, float p_77036_3_, float p_77036_4_, float p_77036_5_, float p_77036_6_, float p_77036_7_)
{
if (p_77036_1_.deathTicks > 0)
{
float var8 = (float)p_77036_1_.deathTicks / 200.0F;
GL11.glDepthFunc(GL11.GL_LEQUAL);
GL11.glEnable(GL11.GL_ALPHA_TEST);
GL11.glAlphaFunc(GL11.GL_GREATER, var8);
this.bindTexture(enderDragonExplodingTextures);
this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_);
GL11.glAlphaFunc(GL11.GL_GREATER, 0.1F);
GL11.glDepthFunc(GL11.GL_EQUAL);
}
this.bindEntityTexture(p_77036_1_);
this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_);
if (p_77036_1_.hurtTime > 0)
{
GL11.glDepthFunc(GL11.GL_EQUAL);
GL11.glDisable(GL11.GL_TEXTURE_2D);
GL11.glEnable(GL11.GL_BLEND);
GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA);
GL11.glColor4f(1.0F, 0.0F, 0.0F, 0.5F);
this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_);
GL11.glEnable(GL11.GL_TEXTURE_2D);
GL11.glDisable(GL11.GL_BLEND);
GL11.glDepthFunc(GL11.GL_LEQUAL);
}
} | void function(EntityDragon p_77036_1_, float p_77036_2_, float p_77036_3_, float p_77036_4_, float p_77036_5_, float p_77036_6_, float p_77036_7_) { if (p_77036_1_.deathTicks > 0) { float var8 = (float)p_77036_1_.deathTicks / 200.0F; GL11.glDepthFunc(GL11.GL_LEQUAL); GL11.glEnable(GL11.GL_ALPHA_TEST); GL11.glAlphaFunc(GL11.GL_GREATER, var8); this.bindTexture(enderDragonExplodingTextures); this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_); GL11.glAlphaFunc(GL11.GL_GREATER, 0.1F); GL11.glDepthFunc(GL11.GL_EQUAL); } this.bindEntityTexture(p_77036_1_); this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_); if (p_77036_1_.hurtTime > 0) { GL11.glDepthFunc(GL11.GL_EQUAL); GL11.glDisable(GL11.GL_TEXTURE_2D); GL11.glEnable(GL11.GL_BLEND); GL11.glBlendFunc(GL11.GL_SRC_ALPHA, GL11.GL_ONE_MINUS_SRC_ALPHA); GL11.glColor4f(1.0F, 0.0F, 0.0F, 0.5F); this.mainModel.render(p_77036_1_, p_77036_2_, p_77036_3_, p_77036_4_, p_77036_5_, p_77036_6_, p_77036_7_); GL11.glEnable(GL11.GL_TEXTURE_2D); GL11.glDisable(GL11.GL_BLEND); GL11.glDepthFunc(GL11.GL_LEQUAL); } } | /**
* Renders the model in RenderLiving
*/ | Renders the model in RenderLiving | renderModel | {
"repo_name": "mviitanen/marsmod",
"path": "mcp/src/minecraft/net/minecraft/client/renderer/entity/RenderDragon.java",
"license": "gpl-2.0",
"size": 15046
} | [
"net.minecraft.entity.boss.EntityDragon"
] | import net.minecraft.entity.boss.EntityDragon; | import net.minecraft.entity.boss.*; | [
"net.minecraft.entity"
] | net.minecraft.entity; | 1,467,535 |
String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state)) {
return true;
}
return false;
} | String state = Environment.getExternalStorageState(); if (Environment.MEDIA_MOUNTED.equals(state)) { return true; } return false; } | /**
* Checks if external storage is available for read and write.
*
* @return return true if external is writable,false otherwise.
*/ | Checks if external storage is available for read and write | isExternalStorageWritable | {
"repo_name": "OneWorld0neDream/QingQiQiu",
"path": "MagicArenaCore/src/main/java/com/framework/magicarena/core/storage/external/SDCardUtils.java",
"license": "apache-2.0",
"size": 10213
} | [
"android.os.Environment"
] | import android.os.Environment; | import android.os.*; | [
"android.os"
] | android.os; | 1,200,293 |
@Override
protected void onPostCreate(Bundle savedInstanceState) {
if (GlobalDefines.LOG_LOCAL) {
Log.i(GlobalDefines.LOG_TAG, this.getClass().getSimpleName() + ": onPostCreate()");
}
super.onPostCreate(savedInstanceState);
// Sync the toggle state after onRestoreInstanceState has occurred.
this.drawerToggle.syncState();
} | void function(Bundle savedInstanceState) { if (GlobalDefines.LOG_LOCAL) { Log.i(GlobalDefines.LOG_TAG, this.getClass().getSimpleName() + STR); } super.onPostCreate(savedInstanceState); this.drawerToggle.syncState(); } | /**
* Syncs the toggle state of the ActionBarDrawerToggle
*
* @param savedInstanceState data used to restore the previous state
* @see android.app.Activity#onPostCreate(android.os.Bundle)
*/ | Syncs the toggle state of the ActionBarDrawerToggle | onPostCreate | {
"repo_name": "tellmas/permissions",
"path": "src/com/tellmas/android/permissions/MainActivity.java",
"license": "mit",
"size": 19230
} | [
"android.os.Bundle",
"android.util.Log"
] | import android.os.Bundle; import android.util.Log; | import android.os.*; import android.util.*; | [
"android.os",
"android.util"
] | android.os; android.util; | 1,389,675 |
protected short[] convertValueToArray(Object value) {
if (value instanceof List) {
List list = (List) value;
short[] target = new short[list.size()];
for (int i = 0; i < list.size(); i++) {
Object element = list.get(i);
target[i] = convertType(element);
}
return target;
}
if (value instanceof Collection) {
Collection collection = (Collection) value;
short[] target = new short[collection.size()];
int i = 0;
for (Object element : collection) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof Iterable) {
Iterable iterable = (Iterable) value;
int count = 0;
for (Object element : iterable) {
count++;
}
short[] target = new short[count];
int i = 0;
for (Object element : iterable) {
target[i] = convertType(element);
i++;
}
return target;
}
if (value instanceof CharSequence) {
String[] strings = CsvUtil.toStringArray(value.toString());
return convertArrayToArray(strings);
}
// everything else:
return convertToSingleElementArray(value);
} | short[] function(Object value) { if (value instanceof List) { List list = (List) value; short[] target = new short[list.size()]; for (int i = 0; i < list.size(); i++) { Object element = list.get(i); target[i] = convertType(element); } return target; } if (value instanceof Collection) { Collection collection = (Collection) value; short[] target = new short[collection.size()]; int i = 0; for (Object element : collection) { target[i] = convertType(element); i++; } return target; } if (value instanceof Iterable) { Iterable iterable = (Iterable) value; int count = 0; for (Object element : iterable) { count++; } short[] target = new short[count]; int i = 0; for (Object element : iterable) { target[i] = convertType(element); i++; } return target; } if (value instanceof CharSequence) { String[] strings = CsvUtil.toStringArray(value.toString()); return convertArrayToArray(strings); } return convertToSingleElementArray(value); } | /**
* Converts non-array value to array. Detects various
* collection types and iterates them to make conversion
* and to create target array.
*/ | Converts non-array value to array. Detects various collection types and iterates them to make conversion and to create target array | convertValueToArray | {
"repo_name": "Artemish/jodd",
"path": "jodd-core/src/main/java/jodd/typeconverter/impl/ShortArrayConverter.java",
"license": "bsd-3-clause",
"size": 5012
} | [
"java.util.Collection",
"java.util.List"
] | import java.util.Collection; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 420,337 |
Set<String> getBanks(); | Set<String> getBanks(); | /**
* Get a set of all banks currently registered by the economy.
*
* @return a set of the names of all banks currently registered by the economy.
*/ | Get a set of all banks currently registered by the economy | getBanks | {
"repo_name": "MinecraftWars/Gringotts",
"path": "src/main/java/org/gestern/gringotts/api/Eco.java",
"license": "bsd-2-clause",
"size": 3546
} | [
"java.util.Set"
] | import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 2,248,714 |
public QueryStringQueryBuilder fuzziness(Fuzziness fuzziness) {
this.fuzziness = fuzziness == null ? DEFAULT_FUZZINESS : fuzziness;
return this;
} | QueryStringQueryBuilder function(Fuzziness fuzziness) { this.fuzziness = fuzziness == null ? DEFAULT_FUZZINESS : fuzziness; return this; } | /**
* Set the edit distance for fuzzy queries. Default is "AUTO".
*/ | Set the edit distance for fuzzy queries. Default is "AUTO" | fuzziness | {
"repo_name": "gmarz/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java",
"license": "apache-2.0",
"size": 43313
} | [
"org.elasticsearch.common.unit.Fuzziness"
] | import org.elasticsearch.common.unit.Fuzziness; | import org.elasticsearch.common.unit.*; | [
"org.elasticsearch.common"
] | org.elasticsearch.common; | 1,791,275 |
@Transactional
public List<UserAuthSession> getExpiredUserSessions() {
List<UserAuthSession> resp = new ArrayList<UserAuthSession>();
try {
TypedQuery<UserAuthSession> qr = getEntityManager().createNamedQuery("UserAuthSession.getAllExpiredSessions", UserAuthSession.class);
DateService dtSer = new DateService();
Date currentdt = dtSer.getCurrentDate();
qr.setParameter("currentTime", currentdt);
if (qr != null) {
resp = qr.getResultList();
}
return resp;
} catch (Exception n) {
logger.error("getExpiredUserSessions", n);
}
return resp;
} | List<UserAuthSession> function() { List<UserAuthSession> resp = new ArrayList<UserAuthSession>(); try { TypedQuery<UserAuthSession> qr = getEntityManager().createNamedQuery(STR, UserAuthSession.class); DateService dtSer = new DateService(); Date currentdt = dtSer.getCurrentDate(); qr.setParameter(STR, currentdt); if (qr != null) { resp = qr.getResultList(); } return resp; } catch (Exception n) { logger.error(STR, n); } return resp; } | /**
* to retrieve all expired user sessions.
*
* @return
*/ | to retrieve all expired user sessions | getExpiredUserSessions | {
"repo_name": "Agnie-Software/3a",
"path": "code/user-session/src/main/java/com/agnie/useradmin/session/server/dao/UserAuthSessionManagerImpl.java",
"license": "gpl-2.0",
"size": 5129
} | [
"com.agnie.common.time.DateService",
"com.agnie.useradmin.session.server.entity.UserAuthSession",
"java.util.ArrayList",
"java.util.Date",
"java.util.List",
"javax.persistence.TypedQuery"
] | import com.agnie.common.time.DateService; import com.agnie.useradmin.session.server.entity.UserAuthSession; import java.util.ArrayList; import java.util.Date; import java.util.List; import javax.persistence.TypedQuery; | import com.agnie.common.time.*; import com.agnie.useradmin.session.server.entity.*; import java.util.*; import javax.persistence.*; | [
"com.agnie.common",
"com.agnie.useradmin",
"java.util",
"javax.persistence"
] | com.agnie.common; com.agnie.useradmin; java.util; javax.persistence; | 208,448 |
DefaultMutableTreeNode findMatchingChild(TreePath parent, DefaultMutableTreeNode n, String name) {
name = name.replace("/", "");
if (n.getChildCount() == 0)
return null;
else {
DefaultMutableTreeNode c = null;
for (int i = 0; i < n.getChildCount(); i++) {
c = (DefaultMutableTreeNode) n.getChildAt(i);
Log.fine("child name: " + c.toString() + " name: " + name);
if (c.toString().equals(name)) {
Log.fine("child names are equal... returning child");
return c;
}
else
Log.fine("child names not equal");
}
}
return null;
} | DefaultMutableTreeNode findMatchingChild(TreePath parent, DefaultMutableTreeNode n, String name) { name = name.replace("/", STRchild name: STR name: STRchild names are equal... returning childSTRchild names not equal"); } } return null; } | /**
* Method to find a child with a specific name.
*
* @param parent TreePath for the parent we are searching
* @param n Node to look for children
* @param name Name for the child we are looking for
*
* @return DefaultMutableTreeNode Returns the child we are looking for,
* or null if it does not exist.
*/ | Method to find a child with a specific name | findMatchingChild | {
"repo_name": "yyhpys/ccnx-trace-interest",
"path": "javasrc/src/org/ccnx/ccn/utils/explorer/ContentExplorer.java",
"license": "lgpl-2.1",
"size": 40065
} | [
"javax.swing.tree.DefaultMutableTreeNode",
"javax.swing.tree.TreePath"
] | import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; | import javax.swing.tree.*; | [
"javax.swing"
] | javax.swing; | 2,900,493 |
public static void main(String[] args) throws Exception {
try {
SpkDiarizationLogger.setup();
Parameter parameter = MainTools.getParameters(args);
info(parameter, "SIterativeSegmentation");
if (parameter.show.isEmpty() == false) {
// Clusters
ClusterSet clusterSet = MainTools.readClusterSet(parameter);
// clusters.debug();
ArrayList<String> toRemove = new ArrayList<String>();
for (String name : clusterSet) {
Cluster cluster = clusterSet.getCluster(name);
int length = cluster.getLength();
if (length < 50) {
logger.fine("\tremove cluster : " + name + " len = " + length);
toRemove.add(name);
}
}
for (String name : toRemove) {
clusterSet.removeCluster(name);
}
ClusterSet filterClusterSet = new ClusterSet();
filterClusterSet.read(parameter.show, parameter.getParameterSegmentationFilterFile());
// Features
AudioFeatureSet featureSet = MainTools.readFeatureSet(parameter, clusterSet);
// Models
GMMArrayList gmmList = MainTools.readGMMContainer(parameter);
ClusterSet clusterSetResult = make(featureSet, clusterSet, filterClusterSet, gmmList, parameter);
// Seg outPut
MainTools.writeClusterSet(parameter, clusterSetResult, false);
}
} catch (DiarizationException e) {
logger.log(Level.SEVERE, "", e);
e.printStackTrace();
}
} | static void function(String[] args) throws Exception { try { SpkDiarizationLogger.setup(); Parameter parameter = MainTools.getParameters(args); info(parameter, STR); if (parameter.show.isEmpty() == false) { ClusterSet clusterSet = MainTools.readClusterSet(parameter); ArrayList<String> toRemove = new ArrayList<String>(); for (String name : clusterSet) { Cluster cluster = clusterSet.getCluster(name); int length = cluster.getLength(); if (length < 50) { logger.fine(STR + name + STR + length); toRemove.add(name); } } for (String name : toRemove) { clusterSet.removeCluster(name); } ClusterSet filterClusterSet = new ClusterSet(); filterClusterSet.read(parameter.show, parameter.getParameterSegmentationFilterFile()); AudioFeatureSet featureSet = MainTools.readFeatureSet(parameter, clusterSet); GMMArrayList gmmList = MainTools.readGMMContainer(parameter); ClusterSet clusterSetResult = make(featureSet, clusterSet, filterClusterSet, gmmList, parameter); MainTools.writeClusterSet(parameter, clusterSetResult, false); } } catch (DiarizationException e) { logger.log(Level.SEVERE, "", e); e.printStackTrace(); } } | /**
* The main method.
*
* @param args the arguments
* @throws Exception the exception
*/ | The main method | main | {
"repo_name": "Adirockzz95/GenderDetect",
"path": "src/src/fr/lium/spkDiarization/tools/SIterativeSegmentation.java",
"license": "gpl-3.0",
"size": 7519
} | [
"fr.lium.spkDiarization.lib.DiarizationException",
"fr.lium.spkDiarization.lib.MainTools",
"fr.lium.spkDiarization.lib.SpkDiarizationLogger",
"fr.lium.spkDiarization.libClusteringData.Cluster",
"fr.lium.spkDiarization.libClusteringData.ClusterSet",
"fr.lium.spkDiarization.libFeature.AudioFeatureSet",
"fr.lium.spkDiarization.libModel.gaussian.GMMArrayList",
"fr.lium.spkDiarization.parameter.Parameter",
"java.util.ArrayList",
"java.util.logging.Level"
] | import fr.lium.spkDiarization.lib.DiarizationException; import fr.lium.spkDiarization.lib.MainTools; import fr.lium.spkDiarization.lib.SpkDiarizationLogger; import fr.lium.spkDiarization.libClusteringData.Cluster; import fr.lium.spkDiarization.libClusteringData.ClusterSet; import fr.lium.spkDiarization.libFeature.AudioFeatureSet; import fr.lium.spkDiarization.libModel.gaussian.GMMArrayList; import fr.lium.spkDiarization.parameter.Parameter; import java.util.ArrayList; import java.util.logging.Level; | import fr.lium.*; import java.util.*; import java.util.logging.*; | [
"fr.lium",
"java.util"
] | fr.lium; java.util; | 2,522,495 |
public FormGenerationResult submitApplication(ProposalDevelopmentDocument pdDoc)
throws S2sCommunicationException; | FormGenerationResult function(ProposalDevelopmentDocument pdDoc) throws S2sCommunicationException; | /**
*
* This method is used to submit forms to the grants.guv
*
* @param pdDoc
* Proposal Development Document.
* @return true if submitted false otherwise.
* @throws S2sCommunicationException
*/ | This method is used to submit forms to the grants.guv | submitApplication | {
"repo_name": "blackcathacker/kc.preclean",
"path": "coeus-code/src/main/java/org/kuali/coeus/propdev/impl/s2s/S2sSubmissionService.java",
"license": "apache-2.0",
"size": 4019
} | [
"org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocument",
"org.kuali.coeus.propdev.impl.s2s.connect.S2sCommunicationException",
"org.kuali.coeus.s2sgen.api.generate.FormGenerationResult"
] | import org.kuali.coeus.propdev.impl.core.ProposalDevelopmentDocument; import org.kuali.coeus.propdev.impl.s2s.connect.S2sCommunicationException; import org.kuali.coeus.s2sgen.api.generate.FormGenerationResult; | import org.kuali.coeus.propdev.impl.core.*; import org.kuali.coeus.propdev.impl.s2s.connect.*; import org.kuali.coeus.s2sgen.api.generate.*; | [
"org.kuali.coeus"
] | org.kuali.coeus; | 753,763 |
public static ListBuilder builder(Config pConfig) {
sLogger.entry();
List<ConfigSource> sources = new ArrayList<>();
@SuppressWarnings("unchecked")
List<ConfigParser> parsers = pConfig.bind("bootstrap.parsers", List.class);
if (parsers == null)
throw new IllegalArgumentException();
String appName = pConfig.bind("application.name", String.class);
Set<String> extensions = new HashSet<>();
for (ConfigParser cp : parsers)
extensions.addAll(cp.getFileExtensions());
ConfigSourceFactoryFactory factory = new CoreFactoryFactory();
for (String extension : extensions)
sources.add(factory.getClassPathConfigSourceFactory().create("library." + extension, null));
for (String extension : extensions)
sources.add(factory.getClassPathConfigSourceFactory().create("application." + extension, null));
for (String extension : extensions)
sources.add(factory.getFileConfigSourceFactory().create("application." + extension, null));
if (appName != null) {
for (String extension : extensions)
sources.add(factory.getClassPathConfigSourceFactory().create(appName + "." + extension, null));
for (String extension : extensions)
sources.add(factory.getFileConfigSourceFactory().create(appName + "." + extension, null));
}
sources.add(factory.getEnvironmentalVariablesConfigSourceFactory().create(null, null));
sources.add(factory.getSystemPropertiesConfigSourceFactory().create(null, null));
sources.add(factory.getDockerSecretsConfigSourceFactory().create(null, null));
return LoggerUtils.nonNullExit(sLogger, new ListBuilder(sources));
} | static ListBuilder function(Config pConfig) { sLogger.entry(); List<ConfigSource> sources = new ArrayList<>(); @SuppressWarnings(STR) List<ConfigParser> parsers = pConfig.bind(STR, List.class); if (parsers == null) throw new IllegalArgumentException(); String appName = pConfig.bind(STR, String.class); Set<String> extensions = new HashSet<>(); for (ConfigParser cp : parsers) extensions.addAll(cp.getFileExtensions()); ConfigSourceFactoryFactory factory = new CoreFactoryFactory(); for (String extension : extensions) sources.add(factory.getClassPathConfigSourceFactory().create(STR + extension, null)); for (String extension : extensions) sources.add(factory.getClassPathConfigSourceFactory().create(STR + extension, null)); for (String extension : extensions) sources.add(factory.getFileConfigSourceFactory().create(STR + extension, null)); if (appName != null) { for (String extension : extensions) sources.add(factory.getClassPathConfigSourceFactory().create(appName + "." + extension, null)); for (String extension : extensions) sources.add(factory.getFileConfigSourceFactory().create(appName + "." + extension, null)); } sources.add(factory.getEnvironmentalVariablesConfigSourceFactory().create(null, null)); sources.add(factory.getSystemPropertiesConfigSourceFactory().create(null, null)); sources.add(factory.getDockerSecretsConfigSourceFactory().create(null, null)); return LoggerUtils.nonNullExit(sLogger, new ListBuilder(sources)); } | /**
* Generates a builder
*
* @param pConfig the Config
* @return the ListBuilder
*/ | Generates a builder | builder | {
"repo_name": "diamondq/dq-common-config",
"path": "config-core/src/main/java/com/diamondq/common/config/core/std/StdConfigListBuilder.java",
"license": "apache-2.0",
"size": 3299
} | [
"com.diamondq.common.config.Config",
"com.diamondq.common.config.core.impl.LoggerUtils",
"com.diamondq.common.config.spi.ConfigParser",
"com.diamondq.common.config.spi.ConfigSource",
"com.diamondq.common.config.spi.ConfigSourceFactoryFactory",
"java.util.ArrayList",
"java.util.HashSet",
"java.util.List",
"java.util.Set"
] | import com.diamondq.common.config.Config; import com.diamondq.common.config.core.impl.LoggerUtils; import com.diamondq.common.config.spi.ConfigParser; import com.diamondq.common.config.spi.ConfigSource; import com.diamondq.common.config.spi.ConfigSourceFactoryFactory; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; | import com.diamondq.common.config.*; import com.diamondq.common.config.core.impl.*; import com.diamondq.common.config.spi.*; import java.util.*; | [
"com.diamondq.common",
"java.util"
] | com.diamondq.common; java.util; | 1,461,147 |
public void setLocaterInfo(SourceLocator locator)
{
m_lineNumber = locator.getLineNumber();
m_columnNumber = locator.getColumnNumber();
} | void function(SourceLocator locator) { m_lineNumber = locator.getLineNumber(); m_columnNumber = locator.getColumnNumber(); } | /**
* Set the location information for this element.
*
* @param locator Source Locator with location information for this element
*/ | Set the location information for this element | setLocaterInfo | {
"repo_name": "mirego/j2objc",
"path": "xalan/third_party/android/platform/external/apache-xml/src/main/java/org/apache/xalan/templates/ElemTemplateElement.java",
"license": "apache-2.0",
"size": 44686
} | [
"javax.xml.transform.SourceLocator"
] | import javax.xml.transform.SourceLocator; | import javax.xml.transform.*; | [
"javax.xml"
] | javax.xml; | 2,484,587 |
public static void loadAllDatabases() throws FilesReplacedException {
ArrayList<FilePathPair> movedFiles = new ArrayList<FilePathPair>();
Arrays.stream(Constants.DATABASES)
.map(IDatabase::load)
.filter(Objects::nonNull)
.forEach(movedFiles::add);
if (!movedFiles.isEmpty()) {
throw new FilesReplacedException(movedFiles);
}
} | static void function() throws FilesReplacedException { ArrayList<FilePathPair> movedFiles = new ArrayList<FilePathPair>(); Arrays.stream(Constants.DATABASES) .map(IDatabase::load) .filter(Objects::nonNull) .forEach(movedFiles::add); if (!movedFiles.isEmpty()) { throw new FilesReplacedException(movedFiles); } } | /**
* This method loads/refreshes all databases based on data contained within
* the data file defined by the file path of each record.
*
* @throws FilesReplacedException
* if unrecognized files were moved
*/ | This method loads/refreshes all databases based on data contained within the data file defined by the file path of each record | loadAllDatabases | {
"repo_name": "cs2103jan2016-w13-4j/main",
"path": "src/main/java/jfdi/storage/DatabaseManager.java",
"license": "mit",
"size": 2406
} | [
"java.util.ArrayList",
"java.util.Arrays",
"java.util.Objects"
] | import java.util.ArrayList; import java.util.Arrays; import java.util.Objects; | import java.util.*; | [
"java.util"
] | java.util; | 430,235 |
// Using reflection, get the available field names in the class
Class c = null;
int toScope = PageContext.PAGE_SCOPE;
if (scope != null) {
toScope = getScope(scope);
}
try {
c = Class.forName(clazz);
} catch (ClassNotFoundException cnf) {
log.error("ClassNotFound - maybe a typo?");
throw new JspException(cnf.getMessage());
}
try {
// if var is null, expose all variables
if (var == null) {
Field[] fields = c.getDeclaredFields();
AccessibleObject.setAccessible(fields, true);
for (Field field : fields) {
pageContext.setAttribute(field.getName(), field.get(this),
toScope);
}
} else {
try {
Object value = c.getField(var).get(this);
pageContext.setAttribute(c.getField(var).getName(), value,
toScope);
} catch (NoSuchFieldException nsf) {
log.error(nsf.getMessage());
throw new JspException(nsf);
}
}
} catch (IllegalAccessException iae) {
log.error("Illegal Access Exception - maybe a classloader issue?");
throw new JspException(iae);
}
// Continue processing this page
return (SKIP_BODY);
} | Class c = null; int toScope = PageContext.PAGE_SCOPE; if (scope != null) { toScope = getScope(scope); } try { c = Class.forName(clazz); } catch (ClassNotFoundException cnf) { log.error(STR); throw new JspException(cnf.getMessage()); } try { if (var == null) { Field[] fields = c.getDeclaredFields(); AccessibleObject.setAccessible(fields, true); for (Field field : fields) { pageContext.setAttribute(field.getName(), field.get(this), toScope); } } else { try { Object value = c.getField(var).get(this); pageContext.setAttribute(c.getField(var).getName(), value, toScope); } catch (NoSuchFieldException nsf) { log.error(nsf.getMessage()); throw new JspException(nsf); } } } catch (IllegalAccessException iae) { log.error(STR); throw new JspException(iae); } return (SKIP_BODY); } | /**
* Main method that does processing and exposes Constants in specified scope
*
* @return int
* @throws JspException
* if processing fails
*/ | Main method that does processing and exposes Constants in specified scope | doStartTag | {
"repo_name": "paawak/blog",
"path": "code/SpringMVCWithFlex/ims-web/src/main/java/com/swayam/ims/webapp/taglib/ConstantsTag.java",
"license": "gpl-2.0",
"size": 4975
} | [
"java.lang.reflect.AccessibleObject",
"java.lang.reflect.Field",
"javax.servlet.jsp.JspException",
"javax.servlet.jsp.PageContext"
] | import java.lang.reflect.AccessibleObject; import java.lang.reflect.Field; import javax.servlet.jsp.JspException; import javax.servlet.jsp.PageContext; | import java.lang.reflect.*; import javax.servlet.jsp.*; | [
"java.lang",
"javax.servlet"
] | java.lang; javax.servlet; | 328,911 |
private Vector<ArangoDBIndex> getIndices(String collectionName) throws ArangoDBException {
Vector<ArangoDBIndex> indices = new Vector<ArangoDBIndex>();
IndexesEntity indexes;
try {
indexes = driver.getIndexes(collectionName);
} catch (ArangoException e) {
throw new ArangoDBException(e);
}
for (IndexEntity indexEntity : indexes.getIndexes()) {
indices.add(new ArangoDBIndex(indexEntity));
}
return indices;
} | Vector<ArangoDBIndex> function(String collectionName) throws ArangoDBException { Vector<ArangoDBIndex> indices = new Vector<ArangoDBIndex>(); IndexesEntity indexes; try { indexes = driver.getIndexes(collectionName); } catch (ArangoException e) { throw new ArangoDBException(e); } for (IndexEntity indexEntity : indexes.getIndexes()) { indices.add(new ArangoDBIndex(indexEntity)); } return indices; } | /**
* Get the List of indices of a collection
*
* @param collectionName
* the collection name
*
* @return Vector<ArangoDBIndex> List of indices
*
* @throws ArangoDBException
* if creation failed
*/ | Get the List of indices of a collection | getIndices | {
"repo_name": "Unni34/blueprints",
"path": "src/main/java/com/arangodb/blueprints/client/ArangoDBSimpleGraphClient.java",
"license": "apache-2.0",
"size": 23089
} | [
"com.arangodb.ArangoException",
"com.arangodb.entity.IndexEntity",
"com.arangodb.entity.IndexesEntity",
"java.util.Vector"
] | import com.arangodb.ArangoException; import com.arangodb.entity.IndexEntity; import com.arangodb.entity.IndexesEntity; import java.util.Vector; | import com.arangodb.*; import com.arangodb.entity.*; import java.util.*; | [
"com.arangodb",
"com.arangodb.entity",
"java.util"
] | com.arangodb; com.arangodb.entity; java.util; | 2,771,940 |
public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String policyName, final ServiceCallback<Void> serviceCallback) {
return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, policyName), serviceCallback);
} | ServiceFuture<Void> function(String resourceGroupName, String policyName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, policyName), serviceCallback); } | /**
* Deletes Policy.
*
* @param resourceGroupName The name of the resource group.
* @param policyName The name of the policy.
* @param serviceCallback the async ServiceCallback to handle successful and failed responses.
* @throws IllegalArgumentException thrown if parameters fail the validation
* @return the {@link ServiceFuture} object
*/ | Deletes Policy | beginDeleteAsync | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/network/mgmt-v2019_07_01/src/main/java/com/microsoft/azure/management/network/v2019_07_01/implementation/WebApplicationFirewallPoliciesInner.java",
"license": "mit",
"size": 51475
} | [
"com.microsoft.rest.ServiceCallback",
"com.microsoft.rest.ServiceFuture"
] | import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; | import com.microsoft.rest.*; | [
"com.microsoft.rest"
] | com.microsoft.rest; | 1,302,639 |
public void addLiteral(final ILiteral lit, final Term rewrite) {
addRewrite(rewrite);
mClause.addLiteral(lit);
} | void function(final ILiteral lit, final Term rewrite) { addRewrite(rewrite); mClause.addLiteral(lit); } | /**
* Add a literal and its rewrite proof. This is called whenever we create a new literal. It is expected that
* every term rewrites to exactly one literal.
*
* @param lit
* The collected literal.
* @param rewrite
* the rewrite proof from the original argument to the literal.
*/ | Add a literal and its rewrite proof. This is called whenever we create a new literal. It is expected that every term rewrites to exactly one literal | addLiteral | {
"repo_name": "juergenchrist/smtinterpol",
"path": "SMTInterpol/src/de/uni_freiburg/informatik/ultimate/smtinterpol/convert/Clausifier.java",
"license": "gpl-3.0",
"size": 88373
} | [
"de.uni_freiburg.informatik.ultimate.logic.Term",
"de.uni_freiburg.informatik.ultimate.smtinterpol.dpll.ILiteral"
] | import de.uni_freiburg.informatik.ultimate.logic.Term; import de.uni_freiburg.informatik.ultimate.smtinterpol.dpll.ILiteral; | import de.uni_freiburg.informatik.ultimate.logic.*; import de.uni_freiburg.informatik.ultimate.smtinterpol.dpll.*; | [
"de.uni_freiburg.informatik"
] | de.uni_freiburg.informatik; | 1,866,486 |
public static Data create(final InputStream stream, final char delimiter, final char quote, final char escape, final char[] linebreak) throws IOException {
return new IterableData(new CSVDataInput(stream, delimiter, quote, escape, linebreak).iterator());
} | static Data function(final InputStream stream, final char delimiter, final char quote, final char escape, final char[] linebreak) throws IOException { return new IterableData(new CSVDataInput(stream, delimiter, quote, escape, linebreak).iterator()); } | /**
* Creates a new data object from a CSV file.
*
* @param stream the stream
* @param delimiter the delimiter
* @param quote the quote
* @param escape the escape
* @param linebreak the linebreak
* @return the data
* @throws IOException Signals that an I/O exception has occurred.
*/ | Creates a new data object from a CSV file | create | {
"repo_name": "TheRealRasu/arx",
"path": "src/main/org/deidentifier/arx/Data.java",
"license": "apache-2.0",
"size": 16198
} | [
"java.io.IOException",
"java.io.InputStream",
"org.deidentifier.arx.io.CSVDataInput"
] | import java.io.IOException; import java.io.InputStream; import org.deidentifier.arx.io.CSVDataInput; | import java.io.*; import org.deidentifier.arx.io.*; | [
"java.io",
"org.deidentifier.arx"
] | java.io; org.deidentifier.arx; | 531,463 |
IvyModule undeclaredArtifact(Map<String, ?> options); | IvyModule undeclaredArtifact(Map<String, ?> options); | /**
* Adds an artifact that is not declared in the ivy.xml file.
*/ | Adds an artifact that is not declared in the ivy.xml file | undeclaredArtifact | {
"repo_name": "HenryHarper/Acquire-Reboot",
"path": "gradle/src/internal-integ-testing/org/gradle/test/fixtures/ivy/IvyModule.java",
"license": "mit",
"size": 2493
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 231,023 |
for (int i = 0; i < array.length - 1; i++) {
for (int j = i + 1; j < array.length; j++) {
if (array[i] != null && array[i].equals(array[j])) {
array[j] = null;
} else if (array[i] == null && array[j] != null) {
array[i] = array[j];
array[j] = null;
}
}
}
//count new array length
int lengthCopyArray = 0;
for (String s : array) {
if (s != null) {
lengthCopyArray++;
}
}
//create copyArray for copy array without dublicate of strings
String[] copyArray = Arrays.copyOf(array, lengthCopyArray);
return copyArray;
} | for (int i = 0; i < array.length - 1; i++) { for (int j = i + 1; j < array.length; j++) { if (array[i] != null && array[i].equals(array[j])) { array[j] = null; } else if (array[i] == null && array[j] != null) { array[i] = array[j]; array[j] = null; } } } int lengthCopyArray = 0; for (String s : array) { if (s != null) { lengthCopyArray++; } } String[] copyArray = Arrays.copyOf(array, lengthCopyArray); return copyArray; } | /**
* Delete Copy of strings.
* @param array - array of strings.
* @return array - array without copy of strings.
*/ | Delete Copy of strings | deleteCopy | {
"repo_name": "Alesandrus/aivanov",
"path": "chapter_001/lesson_5/src/main/java/ru/job4j/DeleteCopy.java",
"license": "apache-2.0",
"size": 945
} | [
"java.util.Arrays"
] | import java.util.Arrays; | import java.util.*; | [
"java.util"
] | java.util; | 1,361,314 |
protected void fillHPath (DataSet ds, RectF rect, float h, float angle, float sweep)
{
float angle2;
Strip s;
angle2 = (angle + sweep) % 360;
if (isVisible (angle))
if (isVisible (angle2))
s = Strip.FRONT;
else
s = Strip.LEFT;
else
if (isVisible (angle2))
s = Strip.RIGHT;
else
s = Strip.BACK;
s.fillDataSet(ds, rect, h, angle, angle2);
}
| void function (DataSet ds, RectF rect, float h, float angle, float sweep) { float angle2; Strip s; angle2 = (angle + sweep) % 360; if (isVisible (angle)) if (isVisible (angle2)) s = Strip.FRONT; else s = Strip.LEFT; else if (isVisible (angle2)) s = Strip.RIGHT; else s = Strip.BACK; s.fillDataSet(ds, rect, h, angle, angle2); } | /**
* Creates the paths describing the vertical side of a slice.
* @param ds the dataset to be filled
* @param rect the enclosing rect
* @param h height of the slice
* @param angle the start angle
* @param sweep the clockwise sweep
*/ | Creates the paths describing the vertical side of a slice | fillHPath | {
"repo_name": "WaniKani/Android-Notification",
"path": "src/com/wanikani/androidnotifier/graph/PiePlot.java",
"license": "gpl-3.0",
"size": 11934
} | [
"android.graphics.RectF"
] | import android.graphics.RectF; | import android.graphics.*; | [
"android.graphics"
] | android.graphics; | 2,735,893 |
void prepareIntent(Context context); | void prepareIntent(Context context); | /** Builds the intent using the parameters assigned to the object.
* NOTE: This operation can be expensive and should be performed on a background thread. This must be called before getIntent(). */ | Builds the intent using the parameters assigned to the object | prepareIntent | {
"repo_name": "jeremyje/android-beryl",
"path": "beryl/src/org/beryl/intents/IIntentBuilder.java",
"license": "mit",
"size": 524
} | [
"android.content.Context"
] | import android.content.Context; | import android.content.*; | [
"android.content"
] | android.content; | 1,481,399 |
public TableItem getNonEmpty(int index)
{
int nonEmptyIndex = nonEmptyIndexes.get(index);
return table.getItem(nonEmptyIndex );
}
| TableItem function(int index) { int nonEmptyIndex = nonEmptyIndexes.get(index); return table.getItem(nonEmptyIndex ); } | /**
* Return the row/table-item on the specified index.
* IMPORTANT: the indexes of the non-empty rows are populated with a call to nrNonEmpty(). Make sure to call that first.
*
* @param index the index of the non-empty row/table-item
* @return the requested non-empty row/table-item
*/ | Return the row/table-item on the specified index | getNonEmpty | {
"repo_name": "soluvas/pdi-ce",
"path": "src-ui/org/pentaho/di/ui/core/widget/TableView.java",
"license": "apache-2.0",
"size": 87146
} | [
"org.eclipse.swt.widgets.TableItem"
] | import org.eclipse.swt.widgets.TableItem; | import org.eclipse.swt.widgets.*; | [
"org.eclipse.swt"
] | org.eclipse.swt; | 38,942 |
public void deleteAuthorizables(Session jcrSession,
Resource baseResource,
String [] paths,
List<Modification> changes
) throws RepositoryException; | void function(Session jcrSession, Resource baseResource, String [] paths, List<Modification> changes ) throws RepositoryException; | /**
* Deletes one or more users or groups from the repository
*
* @param jcrSession the JCR session of the user creating the user
* @param baseResource the base resource to calculate the relative paths from (required)
* @param paths An array of relative resource paths to Authorizables to be deleted (required)
* @param changes The list of changes for this operation (optional)
* @throws RepositoryException
*/ | Deletes one or more users or groups from the repository | deleteAuthorizables | {
"repo_name": "Nimco/sling",
"path": "bundles/jcr/jackrabbit-usermanager/src/main/java/org/apache/sling/jackrabbit/usermanager/DeleteAuthorizables.java",
"license": "apache-2.0",
"size": 2034
} | [
"java.util.List",
"javax.jcr.RepositoryException",
"javax.jcr.Session",
"org.apache.sling.api.resource.Resource",
"org.apache.sling.servlets.post.Modification"
] | import java.util.List; import javax.jcr.RepositoryException; import javax.jcr.Session; import org.apache.sling.api.resource.Resource; import org.apache.sling.servlets.post.Modification; | import java.util.*; import javax.jcr.*; import org.apache.sling.api.resource.*; import org.apache.sling.servlets.post.*; | [
"java.util",
"javax.jcr",
"org.apache.sling"
] | java.util; javax.jcr; org.apache.sling; | 1,624,301 |
public TabLayoutPanel getPanel() {
return panel;
} | TabLayoutPanel function() { return panel; } | /**
* Get underlying {@code TabLayoutPanel}.
*
* @return Underlying tab panel.
*/ | Get underlying TabLayoutPanel | getPanel | {
"repo_name": "snogaraleal/wbi",
"path": "app/client/ui/coordinators/TabCoordinator.java",
"license": "gpl-3.0",
"size": 4251
} | [
"com.google.gwt.user.client.ui.TabLayoutPanel"
] | import com.google.gwt.user.client.ui.TabLayoutPanel; | import com.google.gwt.user.client.ui.*; | [
"com.google.gwt"
] | com.google.gwt; | 881,199 |
@Override
public int malloc() throws OutOfMemoryException {
if (m_head == -1) {
if (m_watermark == m_maxCapacity) {
if (m_growthFactor == 0) {
throw new OutOfMemoryException("Out of memory (" + maxBlocks() + "/" + usedBlocks()
+ " blocks used)");
} else {
increaseSize();
return malloc();
}
} else {
m_head = m_watermark;
set_next(m_head, -1);
m_watermark++;
}
} else {
m_free--;
}
int oldHead = m_head;
m_head = next(oldHead);
set_next(oldHead, -1);
m_initializer.initialize(this, oldHead, m_blockSize);
return oldHead;
} | int function() throws OutOfMemoryException { if (m_head == -1) { if (m_watermark == m_maxCapacity) { if (m_growthFactor == 0) { throw new OutOfMemoryException(STR + maxBlocks() + "/" + usedBlocks() + STR); } else { increaseSize(); return malloc(); } } else { m_head = m_watermark; set_next(m_head, -1); m_watermark++; } } else { m_free--; } int oldHead = m_head; m_head = next(oldHead); set_next(oldHead, -1); m_initializer.initialize(this, oldHead, m_blockSize); return oldHead; } | /**
* Allocates a single block and returns a pointer to that block
*
* @return pointer to newly allocated block
*
* @throws net.yadan.banana.memory.OutOfMemoryException : if there are 0 free blocks
*/ | Allocates a single block and returns a pointer to that block | malloc | {
"repo_name": "KobeFeng/banana",
"path": "banana/src/net/yadan/banana/memory/block/BigBlockAllocator.java",
"license": "bsd-3-clause",
"size": 18869
} | [
"net.yadan.banana.memory.OutOfMemoryException"
] | import net.yadan.banana.memory.OutOfMemoryException; | import net.yadan.banana.memory.*; | [
"net.yadan.banana"
] | net.yadan.banana; | 2,628,275 |
if (abort()) {
return;
}
try {
if (mainFrame != null) {
if (EventQueue.isDispatchThread()) {
mainFrame.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); | if (abort()) { return; } try { if (mainFrame != null) { if (EventQueue.isDispatchThread()) { mainFrame.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR)); | /**
* Close the active wallet.
*/ | Close the active wallet | actionPerformed | {
"repo_name": "allyouneedinfoappstoclick/multibit",
"path": "src/main/java/org/multibit/viewsystem/swing/action/CloseWalletAction.java",
"license": "mit",
"size": 5621
} | [
"java.awt.Cursor",
"java.awt.EventQueue"
] | import java.awt.Cursor; import java.awt.EventQueue; | import java.awt.*; | [
"java.awt"
] | java.awt; | 1,319,642 |
public static BufferedImage convertRenderedImage(RenderedImage img) {
if (img instanceof BufferedImage) return (BufferedImage) img;
ColorModel cm = img.getColorModel();
int width = img.getWidth();
int height = img.getHeight();
WritableRaster raster = cm.createCompatibleWritableRaster(width, height);
boolean isAlphaPremultiplied = cm.isAlphaPremultiplied();
Hashtable<String, Object> properties = new Hashtable<String, Object>();
String[] keys = img.getPropertyNames();
if (keys != null) {
for (int i=0; i<keys.length; i++) {
properties.put(keys[i], img.getProperty(keys[i]));
}
}
BufferedImage result = new BufferedImage(cm,
raster, isAlphaPremultiplied, properties);
img.copyData(raster);
return result;
} | static BufferedImage function(RenderedImage img) { if (img instanceof BufferedImage) return (BufferedImage) img; ColorModel cm = img.getColorModel(); int width = img.getWidth(); int height = img.getHeight(); WritableRaster raster = cm.createCompatibleWritableRaster(width, height); boolean isAlphaPremultiplied = cm.isAlphaPremultiplied(); Hashtable<String, Object> properties = new Hashtable<String, Object>(); String[] keys = img.getPropertyNames(); if (keys != null) { for (int i=0; i<keys.length; i++) { properties.put(keys[i], img.getProperty(keys[i])); } } BufferedImage result = new BufferedImage(cm, raster, isAlphaPremultiplied, properties); img.copyData(raster); return result; } | /**
* Converts a java.awt.image.RenderedImage into a
* java.awt.image.BufferedImage.
*
* This code was adapted from
* <a href="http://www.jguru.com/faq/view.jsp?EID=114602">a jGuru post</a>.
*/ | Converts a java.awt.image.RenderedImage into a java.awt.image.BufferedImage. This code was adapted from a jGuru post | convertRenderedImage | {
"repo_name": "bramalingam/bioformats",
"path": "components/formats-bsd/src/loci/formats/gui/AWTImageTools.java",
"license": "gpl-2.0",
"size": 71667
} | [
"java.awt.image.BufferedImage",
"java.awt.image.ColorModel",
"java.awt.image.RenderedImage",
"java.awt.image.WritableRaster",
"java.util.Hashtable"
] | import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.RenderedImage; import java.awt.image.WritableRaster; import java.util.Hashtable; | import java.awt.image.*; import java.util.*; | [
"java.awt",
"java.util"
] | java.awt; java.util; | 1,980,586 |
boolean isVitalRecord(NodeRef nodeRef);
//void initialiseVitalRecord(NodeRef nodeRef);
| boolean isVitalRecord(NodeRef nodeRef); | /**
* Indicates whether the record is a vital one or not.
*
* @param nodeRef node reference
* @return boolean true if this is a vital record, false otherwise
*/ | Indicates whether the record is a vital one or not | isVitalRecord | {
"repo_name": "dnacreative/records-management",
"path": "rm-server/source/java/org/alfresco/module/org_alfresco_module_rm/vital/VitalRecordService.java",
"license": "lgpl-3.0",
"size": 2356
} | [
"org.alfresco.service.cmr.repository.NodeRef"
] | import org.alfresco.service.cmr.repository.NodeRef; | import org.alfresco.service.cmr.repository.*; | [
"org.alfresco.service"
] | org.alfresco.service; | 766,338 |
public Object getProperty(String name) {
if (name.equals(XMLConstants.ACCESS_EXTERNAL_DTD)) {
return _accessExternalDTD;
} else if (name.equals(XalanConstants.SECURITY_MANAGER)) {
return _xmlSecurityManager;
}
return null;
} | Object function(String name) { if (name.equals(XMLConstants.ACCESS_EXTERNAL_DTD)) { return _accessExternalDTD; } else if (name.equals(XalanConstants.SECURITY_MANAGER)) { return _xmlSecurityManager; } return null; } | /**
* Get property value
*/ | Get property value | getProperty | {
"repo_name": "wangsongpeng/jdk-src",
"path": "src/main/java/com/sun/org/apache/xml/internal/utils/XMLReaderManager.java",
"license": "apache-2.0",
"size": 9698
} | [
"com.sun.org.apache.xalan.internal.XalanConstants",
"javax.xml.XMLConstants"
] | import com.sun.org.apache.xalan.internal.XalanConstants; import javax.xml.XMLConstants; | import com.sun.org.apache.xalan.internal.*; import javax.xml.*; | [
"com.sun.org",
"javax.xml"
] | com.sun.org; javax.xml; | 2,611,029 |
public DateType getDateProperty(String qualifiedName)
{
AbstractField prop = getAbstractProperty(qualifiedName);
if (prop != null)
{
if (prop instanceof DateType)
{
return (DateType) prop;
}
else
{
throw new IllegalArgumentException("Property asked is not a Date Property");
}
}
return null;
} | DateType function(String qualifiedName) { AbstractField prop = getAbstractProperty(qualifiedName); if (prop != null) { if (prop instanceof DateType) { return (DateType) prop; } else { throw new IllegalArgumentException(STR); } } return null; } | /**
* Get the Date property with its name
*
* @param qualifiedName
* The name of the property to get, it must include the namespace prefix, e.g. "pdf:Keywords".
* @return Date Type property
*
*/ | Get the Date property with its name | getDateProperty | {
"repo_name": "BezrukovM/veraPDF-pdfbox",
"path": "xmpbox/src/main/java/org/apache/xmpbox/schema/XMPSchema.java",
"license": "apache-2.0",
"size": 42084
} | [
"org.apache.xmpbox.type.AbstractField",
"org.apache.xmpbox.type.DateType"
] | import org.apache.xmpbox.type.AbstractField; import org.apache.xmpbox.type.DateType; | import org.apache.xmpbox.type.*; | [
"org.apache.xmpbox"
] | org.apache.xmpbox; | 2,215,938 |
public final Property<String> name() {
return metaBean().name().createProperty(this);
} | final Property<String> function() { return metaBean().name().createProperty(this); } | /**
* Gets the the {@code name} property.
* @return the property, not null
*/ | Gets the the name property | name | {
"repo_name": "McLeodMoores/starling",
"path": "projects/financial/src/main/java/com/opengamma/financial/analytics/curve/AbstractCurveDefinition.java",
"license": "apache-2.0",
"size": 8341
} | [
"org.joda.beans.Property"
] | import org.joda.beans.Property; | import org.joda.beans.*; | [
"org.joda.beans"
] | org.joda.beans; | 1,660,730 |
public static ParquetMetadata mergeMetadataFiles(List<Path> files, Configuration conf) throws IOException {
Preconditions.checkArgument(!files.isEmpty(), "Cannot merge an empty list of metadata");
GlobalMetaData globalMetaData = null;
List<BlockMetaData> blocks = new ArrayList<BlockMetaData>();
for (Path p : files) {
ParquetMetadata pmd = ParquetFileReader.readFooter(conf, p, ParquetMetadataConverter.NO_FILTER);
FileMetaData fmd = pmd.getFileMetaData();
globalMetaData = mergeInto(fmd, globalMetaData, true);
blocks.addAll(pmd.getBlocks());
}
// collapse GlobalMetaData into a single FileMetaData, which will throw if they are not compatible
return new ParquetMetadata(globalMetaData.merge(), blocks);
} | static ParquetMetadata function(List<Path> files, Configuration conf) throws IOException { Preconditions.checkArgument(!files.isEmpty(), STR); GlobalMetaData globalMetaData = null; List<BlockMetaData> blocks = new ArrayList<BlockMetaData>(); for (Path p : files) { ParquetMetadata pmd = ParquetFileReader.readFooter(conf, p, ParquetMetadataConverter.NO_FILTER); FileMetaData fmd = pmd.getFileMetaData(); globalMetaData = mergeInto(fmd, globalMetaData, true); blocks.addAll(pmd.getBlocks()); } return new ParquetMetadata(globalMetaData.merge(), blocks); } | /**
* Given a list of metadata files, merge them into a single ParquetMetadata
* Requires that the schemas be compatible, and the extraMetadata be exactly equal.
*/ | Given a list of metadata files, merge them into a single ParquetMetadata Requires that the schemas be compatible, and the extraMetadata be exactly equal | mergeMetadataFiles | {
"repo_name": "rdblue/parquet-mr",
"path": "parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ParquetFileWriter.java",
"license": "apache-2.0",
"size": 34129
} | [
"java.io.IOException",
"java.util.ArrayList",
"java.util.List",
"org.apache.hadoop.conf.Configuration",
"org.apache.hadoop.fs.Path",
"org.apache.parquet.Preconditions",
"org.apache.parquet.format.converter.ParquetMetadataConverter",
"org.apache.parquet.hadoop.metadata.BlockMetaData",
"org.apache.parquet.hadoop.metadata.FileMetaData",
"org.apache.parquet.hadoop.metadata.GlobalMetaData",
"org.apache.parquet.hadoop.metadata.ParquetMetadata"
] | import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.parquet.Preconditions; import org.apache.parquet.format.converter.ParquetMetadataConverter; import org.apache.parquet.hadoop.metadata.BlockMetaData; import org.apache.parquet.hadoop.metadata.FileMetaData; import org.apache.parquet.hadoop.metadata.GlobalMetaData; import org.apache.parquet.hadoop.metadata.ParquetMetadata; | import java.io.*; import java.util.*; import org.apache.hadoop.conf.*; import org.apache.hadoop.fs.*; import org.apache.parquet.*; import org.apache.parquet.format.converter.*; import org.apache.parquet.hadoop.metadata.*; | [
"java.io",
"java.util",
"org.apache.hadoop",
"org.apache.parquet"
] | java.io; java.util; org.apache.hadoop; org.apache.parquet; | 2,859,358 |
@ServiceMethod(returns = ReturnType.SINGLE)
PrivateEndpointConnectionInner delete(
String resourceGroupName, String searchServiceName, String privateEndpointConnectionName); | @ServiceMethod(returns = ReturnType.SINGLE) PrivateEndpointConnectionInner delete( String resourceGroupName, String searchServiceName, String privateEndpointConnectionName); | /**
* Disconnects the private endpoint connection and deletes it from the search service.
*
* @param resourceGroupName The name of the resource group within the current subscription. You can obtain this
* value from the Azure Resource Manager API or the portal.
* @param searchServiceName The name of the Azure Cognitive Search service associated with the specified resource
* group.
* @param privateEndpointConnectionName The name of the private endpoint connection to the Azure Cognitive Search
* service with the specified resource group.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return describes an existing Private Endpoint connection to the Azure Cognitive Search service.
*/ | Disconnects the private endpoint connection and deletes it from the search service | delete | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-search/src/main/java/com/azure/resourcemanager/search/fluent/PrivateEndpointConnectionsClient.java",
"license": "mit",
"size": 26598
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.resourcemanager.search.fluent.models.PrivateEndpointConnectionInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.resourcemanager.search.fluent.models.PrivateEndpointConnectionInner; | import com.azure.core.annotation.*; import com.azure.resourcemanager.search.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 321,681 |
public void addServletNames(String... servletNames) {
Assert.notNull(servletNames, "ServletNames must not be null");
this.servletNames.addAll(Arrays.asList(servletNames));
} | void function(String... servletNames) { Assert.notNull(servletNames, STR); this.servletNames.addAll(Arrays.asList(servletNames)); } | /**
* Add servlet names for the filter.
* @param servletNames the servlet names to add
*/ | Add servlet names for the filter | addServletNames | {
"repo_name": "ChunPIG/spring-boot",
"path": "spring-boot/src/main/java/org/springframework/boot/context/embedded/FilterRegistrationBean.java",
"license": "apache-2.0",
"size": 10805
} | [
"java.util.Arrays",
"org.springframework.util.Assert"
] | import java.util.Arrays; import org.springframework.util.Assert; | import java.util.*; import org.springframework.util.*; | [
"java.util",
"org.springframework.util"
] | java.util; org.springframework.util; | 292,573 |
private Element toElement(String xml) {
DOMResult dom = new DOMResult();
try {
TransformerFactory.newInstance().newTransformer().transform(new StreamSource(new StringReader(xml)), dom);
} catch (Exception e) {
e.printStackTrace();
}
return ((Document) dom.getNode()).getDocumentElement();
} | Element function(String xml) { DOMResult dom = new DOMResult(); try { TransformerFactory.newInstance().newTransformer().transform(new StreamSource(new StringReader(xml)), dom); } catch (Exception e) { e.printStackTrace(); } return ((Document) dom.getNode()).getDocumentElement(); } | /**
* To element.
*
* @param xml the xml
* @return the element
*/ | To element | toElement | {
"repo_name": "cunningt/fuse-bxms-integ",
"path": "quickstarts/switchyard-rules-interview-container/src/main/java/org/switchyard/quickstarts/rules/interview/Transformers.java",
"license": "apache-2.0",
"size": 3318
} | [
"java.io.StringReader",
"javax.xml.transform.TransformerFactory",
"javax.xml.transform.dom.DOMResult",
"javax.xml.transform.stream.StreamSource",
"org.w3c.dom.Document",
"org.w3c.dom.Element"
] | import java.io.StringReader; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMResult; import javax.xml.transform.stream.StreamSource; import org.w3c.dom.Document; import org.w3c.dom.Element; | import java.io.*; import javax.xml.transform.*; import javax.xml.transform.dom.*; import javax.xml.transform.stream.*; import org.w3c.dom.*; | [
"java.io",
"javax.xml",
"org.w3c.dom"
] | java.io; javax.xml; org.w3c.dom; | 376,031 |
public Map<String, PlayerInfo> getPlayers() {
return players;
} | Map<String, PlayerInfo> function() { return players; } | /**
* Returns a map from player name to player info for all players.
*
* @return Map
*/ | Returns a map from player name to player info for all players | getPlayers | {
"repo_name": "benruijl/WalledIn",
"path": "src/main/java/walledin/game/GameLogicManager.java",
"license": "gpl-3.0",
"size": 19354
} | [
"java.util.Map"
] | import java.util.Map; | import java.util.*; | [
"java.util"
] | java.util; | 2,252,118 |
int consume( Name name,
NodeKey key ); | int consume( Name name, NodeKey key ); | /**
* Consume the next child with the supplied name and key.
*
* @param name the name of the node; may not be null
* @param key the key for the node; may not be null
* @return the same-name-sibling index for this node; always positive
*/ | Consume the next child with the supplied name and key | consume | {
"repo_name": "stemig62/modeshape",
"path": "modeshape-jcr/src/main/java/org/modeshape/jcr/cache/ChildReferences.java",
"license": "apache-2.0",
"size": 18175
} | [
"org.modeshape.jcr.value.Name"
] | import org.modeshape.jcr.value.Name; | import org.modeshape.jcr.value.*; | [
"org.modeshape.jcr"
] | org.modeshape.jcr; | 927,070 |
public void setLocations(Collection<String> locationStrings) {
List<PropertiesLocation> locations = new ArrayList<>();
if (locationStrings != null) {
for (String locationString : locationStrings) {
locations.add(new PropertiesLocation(locationString));
}
}
setLocations(locations);
} | void function(Collection<String> locationStrings) { List<PropertiesLocation> locations = new ArrayList<>(); if (locationStrings != null) { for (String locationString : locationStrings) { locations.add(new PropertiesLocation(locationString)); } } setLocations(locations); } | /**
* A list of locations to load properties. This option will override any default locations and only use the
* locations from this option.
*/ | A list of locations to load properties. This option will override any default locations and only use the locations from this option | setLocations | {
"repo_name": "tadayosi/camel",
"path": "core/camel-base/src/main/java/org/apache/camel/component/properties/PropertiesComponent.java",
"license": "apache-2.0",
"size": 25517
} | [
"java.util.ArrayList",
"java.util.Collection",
"java.util.List"
] | import java.util.ArrayList; import java.util.Collection; import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 1,139,865 |
protected void addOcciComputeCoresPropertyDescriptor(Object object) {
itemPropertyDescriptors.add
(createItemPropertyDescriptor
(((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(),
getResourceLocator(),
getString("_UI_G3_16xlarge_occiComputeCores_feature"),
getString("_UI_PropertyDescriptor_description", "_UI_G3_16xlarge_occiComputeCores_feature", "_UI_G3_16xlarge_type"),
Ec2Package.eINSTANCE.getG3_16xlarge_OcciComputeCores(),
true,
false,
false,
ItemPropertyDescriptor.GENERIC_VALUE_IMAGE,
null,
null));
} | void function(Object object) { itemPropertyDescriptors.add (createItemPropertyDescriptor (((ComposeableAdapterFactory)adapterFactory).getRootAdapterFactory(), getResourceLocator(), getString(STR), getString(STR, STR, STR), Ec2Package.eINSTANCE.getG3_16xlarge_OcciComputeCores(), true, false, false, ItemPropertyDescriptor.GENERIC_VALUE_IMAGE, null, null)); } | /**
* This adds a property descriptor for the Occi Compute Cores feature.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/ | This adds a property descriptor for the Occi Compute Cores feature. | addOcciComputeCoresPropertyDescriptor | {
"repo_name": "occiware/Multi-Cloud-Studio",
"path": "plugins/org.eclipse.cmf.occi.multicloud.aws.ec2.edit/src-gen/org/eclipse/cmf/occi/multicloud/aws/ec2/provider/G3_16xlargeItemProvider.java",
"license": "epl-1.0",
"size": 7648
} | [
"org.eclipse.cmf.occi.multicloud.aws.ec2.Ec2Package",
"org.eclipse.emf.edit.provider.ComposeableAdapterFactory",
"org.eclipse.emf.edit.provider.ItemPropertyDescriptor"
] | import org.eclipse.cmf.occi.multicloud.aws.ec2.Ec2Package; import org.eclipse.emf.edit.provider.ComposeableAdapterFactory; import org.eclipse.emf.edit.provider.ItemPropertyDescriptor; | import org.eclipse.cmf.occi.multicloud.aws.ec2.*; import org.eclipse.emf.edit.provider.*; | [
"org.eclipse.cmf",
"org.eclipse.emf"
] | org.eclipse.cmf; org.eclipse.emf; | 2,144,971 |
public boolean printAssetStatementReportBodyForEndowmentTotal(List<AssetStatementReportDataHolder> endowmentAssetStatementReportDataHolders, Document document) {
BigDecimal totalHistoryIncomeCash = BigDecimal.ZERO;
BigDecimal totalHistoryPrincipalCash = BigDecimal.ZERO;
TreeMap<Integer, TreeMap<String, List<ReportGroupData>>> reportGroupsForIncomeTotal = null;
TreeMap<Integer, TreeMap<String, List<ReportGroupData>>> reportGroupsForPrincipalTotal = null;
// get the cash totals
for (AssetStatementReportDataHolder data : endowmentAssetStatementReportDataHolders) {
totalHistoryIncomeCash = totalHistoryIncomeCash.add(data.getHistoryIncomeCash());
totalHistoryPrincipalCash = totalHistoryPrincipalCash.add(data.getHistoryPrincipalCash());
}
// for income
reportGroupsForIncomeTotal = createReportGroupsForTotal(endowmentAssetStatementReportDataHolders, IncomePrincipalIndicator.INCOME);
// for principal
reportGroupsForPrincipalTotal = createReportGroupsForTotal(endowmentAssetStatementReportDataHolders, IncomePrincipalIndicator.PRINCIPAL);
// for each kemid
try {
Font cellFont = regularFont;
// for the common info
AssetStatementReportDataHolder reportData = endowmentAssetStatementReportDataHolders.get(0);
document.newPage();
// header
StringBuffer title = new StringBuffer();
title.append(reportData.getInstitution()).append("\n");
title.append("STATEMENT OF ASSETS FOR PERIOD ENDING").append("\n");
title.append(reportData.getMonthEndDate()).append("\n\n");
Paragraph header = new Paragraph(title.toString());
header.setAlignment(Element.ALIGN_CENTER);
document.add(header);
// report table
float[] colsWidth = {15f, 17f, 17f, 17f, 17f, 17f};
PdfPTable table = new PdfPTable(colsWidth);
table.setWidthPercentage(FULL_TABLE_WIDTH);
table.getDefaultCell().setPadding(5);
// column titles
table.addCell("");
table.addCell(createCell("UNITS HELD", titleFont, Element.ALIGN_RIGHT, true));
table.addCell(createCell("MARKET VALUE", titleFont, Element.ALIGN_RIGHT, true));
table.addCell(createCell("ESTIMATED\nANNUAL INCOME", titleFont, Element.ALIGN_RIGHT, true));
table.addCell(createCell("FY REMAINDER ESTIMATED\nANNUAL INCOME", titleFont, Element.ALIGN_RIGHT, true));
table.addCell(createCell("NEXT FY ESTIMATED\nANNUAL INCOME", titleFont, Element.ALIGN_RIGHT, true));
// 1. Expendable funds
PdfPCell cellExpendableFunds = new PdfPCell(new Paragraph("EXPENDABLE FUNDS", titleFont));
cellExpendableFunds.setColspan(6);
table.addCell(cellExpendableFunds);
PdfPCell cellCashEquivalnets = new PdfPCell(new Paragraph("CASH AND EQUIVALENTS", titleFont));
cellCashEquivalnets.setColspan(6);
table.addCell(cellCashEquivalnets);
// report groups for income
printReportGroupForIncomeEndowmentTotal(reportGroupsForIncomeTotal, totalHistoryIncomeCash, document, table, cellFont);
// 2. Endowed funds
PdfPCell cellEndowedFunds = new PdfPCell(new Paragraph("ENDOWED FUNDS", titleFont));
cellEndowedFunds.setColspan(6);
table.addCell(cellEndowedFunds);
table.addCell(cellCashEquivalnets);
// report groups for principal
printReportGroupForPrincipalEndowmentTotal(reportGroupsForPrincipalTotal, totalHistoryPrincipalCash, document, table, cellFont);
// 3. total (endowment + non-endowed)
PdfPCell blank = new PdfPCell(new Paragraph("", cellFont));
blank.setColspan(6);
blank.setBackgroundColor(Color.LIGHT_GRAY);
table.addCell(blank);
BigDecimal totalKemidMarketValue = BigDecimal.ZERO;
BigDecimal totalKemidEstimatedAnnualIncome = BigDecimal.ZERO;
BigDecimal totalKemidFYRemainderEstimatedAnnualIncome = BigDecimal.ZERO;
BigDecimal totalKemidNextFYEstimayedAnnualIncome = BigDecimal.ZERO;
for (AssetStatementReportDataHolder data : endowmentAssetStatementReportDataHolders) {
totalKemidMarketValue = totalKemidMarketValue.add(data.getTotalSumOfMarketValue(IncomePrincipalIndicator.INCOME).add(data.getTotalSumOfMarketValue(IncomePrincipalIndicator.PRINCIPAL)));
totalKemidEstimatedAnnualIncome = totalKemidEstimatedAnnualIncome.add(data.getTotalSumOfEstimatedIncome(IncomePrincipalIndicator.INCOME).add(data.getTotalSumOfEstimatedIncome(IncomePrincipalIndicator.PRINCIPAL)));
totalKemidFYRemainderEstimatedAnnualIncome = totalKemidFYRemainderEstimatedAnnualIncome.add(data.getTotalSumOfRemainderOfFYEstimated(IncomePrincipalIndicator.INCOME).add(data.getTotalSumOfRemainderOfFYEstimated(IncomePrincipalIndicator.PRINCIPAL)));
totalKemidNextFYEstimayedAnnualIncome = totalKemidNextFYEstimayedAnnualIncome.add(data.getTotalSumOfNextFYEstimatedIncome(IncomePrincipalIndicator.INCOME).add(data.getTotalSumOfNextFYEstimatedIncome(IncomePrincipalIndicator.PRINCIPAL)));
}
table.addCell(new Paragraph("TOTAL KEMID VALUE", titleFont));
table.addCell("");
table.addCell(getAmountCell(totalKemidMarketValue.add(totalHistoryIncomeCash).add(totalHistoryPrincipalCash), titleFont));
table.addCell(getAmountCell(totalKemidEstimatedAnnualIncome, titleFont));
table.addCell(getAmountCell(totalKemidFYRemainderEstimatedAnnualIncome, titleFont));
table.addCell(getAmountCell(totalKemidNextFYEstimayedAnnualIncome, titleFont));
document.add(table);
} catch (Exception e) {
LOG.error(e.getMessage());
return false;
}
return true;
} | boolean function(List<AssetStatementReportDataHolder> endowmentAssetStatementReportDataHolders, Document document) { BigDecimal totalHistoryIncomeCash = BigDecimal.ZERO; BigDecimal totalHistoryPrincipalCash = BigDecimal.ZERO; TreeMap<Integer, TreeMap<String, List<ReportGroupData>>> reportGroupsForIncomeTotal = null; TreeMap<Integer, TreeMap<String, List<ReportGroupData>>> reportGroupsForPrincipalTotal = null; for (AssetStatementReportDataHolder data : endowmentAssetStatementReportDataHolders) { totalHistoryIncomeCash = totalHistoryIncomeCash.add(data.getHistoryIncomeCash()); totalHistoryPrincipalCash = totalHistoryPrincipalCash.add(data.getHistoryPrincipalCash()); } reportGroupsForIncomeTotal = createReportGroupsForTotal(endowmentAssetStatementReportDataHolders, IncomePrincipalIndicator.INCOME); reportGroupsForPrincipalTotal = createReportGroupsForTotal(endowmentAssetStatementReportDataHolders, IncomePrincipalIndicator.PRINCIPAL); try { Font cellFont = regularFont; AssetStatementReportDataHolder reportData = endowmentAssetStatementReportDataHolders.get(0); document.newPage(); StringBuffer title = new StringBuffer(); title.append(reportData.getInstitution()).append("\n"); title.append(STR).append("\n"); title.append(reportData.getMonthEndDate()).append("\n\n"); Paragraph header = new Paragraph(title.toString()); header.setAlignment(Element.ALIGN_CENTER); document.add(header); float[] colsWidth = {15f, 17f, 17f, 17f, 17f, 17f}; PdfPTable table = new PdfPTable(colsWidth); table.setWidthPercentage(FULL_TABLE_WIDTH); table.getDefaultCell().setPadding(5); table.addCell(STRUNITS HELDSTRMARKET VALUESTRESTIMATED\nANNUAL INCOMESTRFY REMAINDER ESTIMATED\nANNUAL INCOMESTRNEXT FY ESTIMATED\nANNUAL INCOMESTREXPENDABLE FUNDSSTRCASH AND EQUIVALENTSSTRENDOWED FUNDSSTRSTRTOTAL KEMID VALUESTR"); table.addCell(getAmountCell(totalKemidMarketValue.add(totalHistoryIncomeCash).add(totalHistoryPrincipalCash), titleFont)); table.addCell(getAmountCell(totalKemidEstimatedAnnualIncome, titleFont)); table.addCell(getAmountCell(totalKemidFYRemainderEstimatedAnnualIncome, titleFont)); table.addCell(getAmountCell(totalKemidNextFYEstimayedAnnualIncome, titleFont)); document.add(table); } catch (Exception e) { LOG.error(e.getMessage()); return false; } return true; } | /**
* Prints report body for endowment detail
*
* @param endowmentAssetStatementReportDataHolders
* @param document
* @return
*/ | Prints report body for endowment detail | printAssetStatementReportBodyForEndowmentTotal | {
"repo_name": "Ariah-Group/Finance",
"path": "af_webapp/src/main/java/org/kuali/kfs/module/endow/report/util/AssetStatementReportPrint.java",
"license": "apache-2.0",
"size": 86911
} | [
"com.lowagie.text.Document",
"com.lowagie.text.Element",
"com.lowagie.text.Font",
"com.lowagie.text.Paragraph",
"com.lowagie.text.pdf.PdfPTable",
"java.math.BigDecimal",
"java.util.List",
"java.util.TreeMap",
"org.kuali.kfs.module.endow.EndowConstants",
"org.kuali.kfs.module.endow.report.util.AssetStatementReportDataHolder"
] | import com.lowagie.text.Document; import com.lowagie.text.Element; import com.lowagie.text.Font; import com.lowagie.text.Paragraph; import com.lowagie.text.pdf.PdfPTable; import java.math.BigDecimal; import java.util.List; import java.util.TreeMap; import org.kuali.kfs.module.endow.EndowConstants; import org.kuali.kfs.module.endow.report.util.AssetStatementReportDataHolder; | import com.lowagie.text.*; import com.lowagie.text.pdf.*; import java.math.*; import java.util.*; import org.kuali.kfs.module.endow.*; import org.kuali.kfs.module.endow.report.util.*; | [
"com.lowagie.text",
"java.math",
"java.util",
"org.kuali.kfs"
] | com.lowagie.text; java.math; java.util; org.kuali.kfs; | 1,897,226 |
@Override
public final Object parseObject(final String text) throws ParseException {
final Element element = getTree(text, new ParsePosition(0));
final Object object = parse(element);
element.close();
return object;
} | final Object function(final String text) throws ParseException { final Element element = getTree(text, new ParsePosition(0)); final Object object = parse(element); element.close(); return object; } | /**
* Parses a <cite>Well Know Text</cite> (WKT).
*
* @param text The text to be parsed.
* @return The object.
* @throws ParseException if the string can't be parsed.
*/ | Parses a Well Know Text (WKT) | parseObject | {
"repo_name": "geotools/geotools",
"path": "modules/library/referencing/src/main/java/org/geotools/referencing/wkt/AbstractParser.java",
"license": "lgpl-2.1",
"size": 13956
} | [
"java.text.ParseException",
"java.text.ParsePosition"
] | import java.text.ParseException; import java.text.ParsePosition; | import java.text.*; | [
"java.text"
] | java.text; | 2,657,565 |
public Drug addDrug( LoggedInInfo info, Drug d) {
LogAction.addLogSynchronous(info, "RxManager.addDrug", "providerNo=" +
info.getLoggedInProviderNo()
+ " drug.brandName="+d.getBrandName()
+ " demographicNo="+d.getDemographicId());
// Will throw an exception if access is denied.
this.writeCheck(info, d.getDemographicId());
// Have to set ID to null so that database
// can auto-generate one for this drug.
d.setId(null);
if (this.drugDao.addNewDrug(d)) {
// If the addNewDrug(d) call succeeds d will
// contain the ID that was auto-generated by the
// database.
return d;
} else {
return null;
}
} | Drug function( LoggedInInfo info, Drug d) { LogAction.addLogSynchronous(info, STR, STR + info.getLoggedInProviderNo() + STR+d.getBrandName() + STR+d.getDemographicId()); this.writeCheck(info, d.getDemographicId()); d.setId(null); if (this.drugDao.addNewDrug(d)) { return d; } else { return null; } } | /**
* Adds a new drug to the database.
*
* @param info information regarding the logged in user.
* @param d the drug object to add to the database.
*
* @return the drug object that was added.
*/ | Adds a new drug to the database | addDrug | {
"repo_name": "scoophealth/oscar",
"path": "src/main/java/org/oscarehr/managers/RxManager.java",
"license": "gpl-2.0",
"size": 21251
} | [
"org.oscarehr.common.model.Drug",
"org.oscarehr.util.LoggedInInfo"
] | import org.oscarehr.common.model.Drug; import org.oscarehr.util.LoggedInInfo; | import org.oscarehr.common.model.*; import org.oscarehr.util.*; | [
"org.oscarehr.common",
"org.oscarehr.util"
] | org.oscarehr.common; org.oscarehr.util; | 729,199 |
List<CutOverEvent> getEvents() {
return this.events;
} | List<CutOverEvent> getEvents() { return this.events; } | /**
* <p>Returns the list of all associated cutover events. </p>
*
* @return unmodifiable list
* @since 3.0
*/ | Returns the list of all associated cutover events. | getEvents | {
"repo_name": "MenoData/Time4J",
"path": "base/src/main/java/net/time4j/history/ChronoHistory.java",
"license": "lgpl-2.1",
"size": 75558
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 293,190 |
Date getCurrentTime() throws IOException; | Date getCurrentTime() throws IOException; | /**
* Get the current time from the system.
*
* @return the Date object created
* @throws IOException thrown when an issue occurs
*/ | Get the current time from the system | getCurrentTime | {
"repo_name": "googleinterns/blueprint-step-2020",
"path": "src/main/java/com/google/sps/model/CalendarClient.java",
"license": "apache-2.0",
"size": 2664
} | [
"java.io.IOException",
"java.util.Date"
] | import java.io.IOException; import java.util.Date; | import java.io.*; import java.util.*; | [
"java.io",
"java.util"
] | java.io; java.util; | 2,625,312 |
public CallSummary<EmptyPayload, WorkerResponse> getWorker(String provisionerId, String workerType, String workerGroup, String workerId) throws APICallFailure {
return apiCall(null, "GET", "/provisioners/" + uriEncode(provisionerId) + "/worker-types/" + uriEncode(workerType) + "/workers/" + uriEncode(workerGroup) + "/" + uriEncode(workerId), WorkerResponse.class);
}
/**
* Quarantine a worker | CallSummary<EmptyPayload, WorkerResponse> function(String provisionerId, String workerType, String workerGroup, String workerId) throws APICallFailure { return apiCall(null, "GET", STR + uriEncode(provisionerId) + STR + uriEncode(workerType) + STR + uriEncode(workerGroup) + "/" + uriEncode(workerId), WorkerResponse.class); } /** * Quarantine a worker | /**
* Get a worker from a worker-type.
*
* @see "[Get a worker-type API Documentation](https://docs.taskcluster.net/reference/platform/queue/api-docs#getWorker)"
*/ | Get a worker from a worker-type | getWorker | {
"repo_name": "taskcluster/taskcluster-client-java",
"path": "src/main/java/org/mozilla/taskcluster/client/queue/Queue.java",
"license": "mpl-2.0",
"size": 45666
} | [
"org.mozilla.taskcluster.client.APICallFailure",
"org.mozilla.taskcluster.client.CallSummary",
"org.mozilla.taskcluster.client.EmptyPayload"
] | import org.mozilla.taskcluster.client.APICallFailure; import org.mozilla.taskcluster.client.CallSummary; import org.mozilla.taskcluster.client.EmptyPayload; | import org.mozilla.taskcluster.client.*; | [
"org.mozilla.taskcluster"
] | org.mozilla.taskcluster; | 2,482,897 |
public
Deferred<ArrayList<KeyValue>> getClosestRowBefore(final RegionInfo region,
final byte[] tabl,
final byte[] row,
final byte[] family) {
final class GetClosestRowBefore extends HBaseRpc {
GetClosestRowBefore() {
super(tabl, row);
} | Deferred<ArrayList<KeyValue>> function(final RegionInfo region, final byte[] tabl, final byte[] row, final byte[] family) { final class GetClosestRowBefore extends HBaseRpc { GetClosestRowBefore() { super(tabl, row); } | /**
* Finds the highest row that's less than or equal to the given row.
* @param region The region in which to search.
* @param tabl The table to which the key belongs.
* @param row The row to search.
* @param family The family to get.
* @return A Deferred {@link ArrayList} of {@link KeyValue}. The list is
* guaranteed to be non-{@code null} but may be empty.
*/ | Finds the highest row that's less than or equal to the given row | getClosestRowBefore | {
"repo_name": "SteamShon/asynchbase",
"path": "src/RegionClient.java",
"license": "bsd-3-clause",
"size": 88345
} | [
"com.stumbleupon.async.Deferred",
"java.util.ArrayList"
] | import com.stumbleupon.async.Deferred; import java.util.ArrayList; | import com.stumbleupon.async.*; import java.util.*; | [
"com.stumbleupon.async",
"java.util"
] | com.stumbleupon.async; java.util; | 129,539 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.