method
stringlengths 13
441k
| clean_method
stringlengths 7
313k
| doc
stringlengths 17
17.3k
| comment
stringlengths 3
1.42k
| method_name
stringlengths 1
273
| extra
dict | imports
sequence | imports_info
stringlengths 19
34.8k
| cluster_imports_info
stringlengths 15
3.66k
| libraries
sequence | libraries_info
stringlengths 6
661
| id
int64 0
2.92M
|
---|---|---|---|---|---|---|---|---|---|---|---|
public boolean isUseAnonymousId()
{
if (!isAnonymousEnabled())
{
return false;
}
if (useAnonymousId != null)
{
// useAnonymousId has already been supplied
return useAnonymousId;
}
// Determine if the containing topic is anonymous
Topic topic = message.getTopic();
if (topic != null)
{
// Determine if topic is anonymous
// Topic may or may not be detached (ie. it may have been retrieved as the surrogate key of this mesage, so its only initialized attribute may be the topic ID).
// Retrieve the full topic only when this is an issue via a try / catch
Boolean postAnonymous = null;
try
{
postAnonymous = topic.getPostAnonymous();
}
catch (RuntimeException e)
{
// Topic must be detached, retrieve it via ForumManager
MessageForumsForumManager forumManager = (MessageForumsForumManager)ComponentManager.get("org.sakaiproject.api.app.messageforums.MessageForumsForumManager");
topic = forumManager.getTopicById(true, topic.getId());
postAnonymous = topic.getPostAnonymous();
}
if (postAnonymous)
{
// Are we supposed to reveal authors' identities to certain roles in this topic?
if (topic.getRevealIDsToRoles())
{
if (getUIPermissionsManager().isIdentifyAnonAuthors(topic))
{
// This user has permission to identify authors in this topic
useAnonymousId = Boolean.FALSE;
return false;
}
}
useAnonymousId = Boolean.TRUE;
return true;
}
}
// Topic is not anonymous; reveal identities
useAnonymousId = Boolean.FALSE;
return false;
} | boolean function() { if (!isAnonymousEnabled()) { return false; } if (useAnonymousId != null) { return useAnonymousId; } Topic topic = message.getTopic(); if (topic != null) { Boolean postAnonymous = null; try { postAnonymous = topic.getPostAnonymous(); } catch (RuntimeException e) { MessageForumsForumManager forumManager = (MessageForumsForumManager)ComponentManager.get(STR); topic = forumManager.getTopicById(true, topic.getId()); postAnonymous = topic.getPostAnonymous(); } if (postAnonymous) { if (topic.getRevealIDsToRoles()) { if (getUIPermissionsManager().isIdentifyAnonAuthors(topic)) { useAnonymousId = Boolean.FALSE; return false; } } useAnonymousId = Boolean.TRUE; return true; } } useAnonymousId = Boolean.FALSE; return false; } | /**
* Determines whether the message's author should display as an anonymousID.
* If setUseAnonymousId has not yet been invoked against 'this',
* this method will go to the database, and then cache the result for any subsequent calls.
* It's preferable to use setUseAnonymousId up front wherever possible for performance gains
*/ | Determines whether the message's author should display as an anonymousID. If setUseAnonymousId has not yet been invoked against 'this', this method will go to the database, and then cache the result for any subsequent calls. It's preferable to use setUseAnonymousId up front wherever possible for performance gains | isUseAnonymousId | {
"repo_name": "OpenCollabZA/sakai",
"path": "msgcntr/messageforums-app/src/java/org/sakaiproject/tool/messageforums/ui/DiscussionMessageBean.java",
"license": "apache-2.0",
"size": 14825
} | [
"org.sakaiproject.api.app.messageforums.MessageForumsForumManager",
"org.sakaiproject.api.app.messageforums.Topic",
"org.sakaiproject.component.cover.ComponentManager"
] | import org.sakaiproject.api.app.messageforums.MessageForumsForumManager; import org.sakaiproject.api.app.messageforums.Topic; import org.sakaiproject.component.cover.ComponentManager; | import org.sakaiproject.api.app.messageforums.*; import org.sakaiproject.component.cover.*; | [
"org.sakaiproject.api",
"org.sakaiproject.component"
] | org.sakaiproject.api; org.sakaiproject.component; | 261,153 |
public List getSectionRefs()
{
log.debug("getSectionRefs()");
String xpath = basePath + "/" + QTIConstantStrings.SECTIONREF;
return this.selectNodes(xpath);
}
| List function() { log.debug(STR); String xpath = basePath + "/" + QTIConstantStrings.SECTIONREF; return this.selectNodes(xpath); } | /**
* Get a collection of section refs.
*
* @return
*/ | Get a collection of section refs | getSectionRefs | {
"repo_name": "harfalm/Sakai-10.1",
"path": "samigo/samigo-cp/src/java/org/sakaiproject/tool/assessment/contentpackaging/Manifest.java",
"license": "apache-2.0",
"size": 7755
} | [
"java.util.List",
"org.sakaiproject.tool.assessment.qti.constants.QTIConstantStrings"
] | import java.util.List; import org.sakaiproject.tool.assessment.qti.constants.QTIConstantStrings; | import java.util.*; import org.sakaiproject.tool.assessment.qti.constants.*; | [
"java.util",
"org.sakaiproject.tool"
] | java.util; org.sakaiproject.tool; | 1,302,464 |
private void setInputConnectionTarget(final View targetView) {
if (containerView == null) {
Log.e(
TAG,
"Can't set the input connection target because there is no containerView to use as a handler.");
return;
} | void function(final View targetView) { if (containerView == null) { Log.e( TAG, STR); return; } | /**
* This is the crucial trick that gets the InputConnection creation to happen on the correct
* thread pre Android N.
* https://cs.chromium.org/chromium/src/content/public/android/java/src/org/chromium/content/browser/input/ThreadedInputConnectionFactory.java?l=169&rcl=f0698ee3e4483fad5b0c34159276f71cfaf81f3a
*
* <p>{@code targetView} should have a {@link View#getHandler} method with the thread that future
* InputConnections should be created on.
*/ | This is the crucial trick that gets the InputConnection creation to happen on the correct thread pre Android N. HREF targetView should have a <code>View#getHandler</code> method with the thread that future InputConnections should be created on | setInputConnectionTarget | {
"repo_name": "tvolkert/plugins",
"path": "packages/webview_flutter/webview_flutter_android/android/src/main/java/io/flutter/plugins/webviewflutter/InputAwareWebView.java",
"license": "bsd-3-clause",
"size": 9407
} | [
"android.util.Log",
"android.view.View"
] | import android.util.Log; import android.view.View; | import android.util.*; import android.view.*; | [
"android.util",
"android.view"
] | android.util; android.view; | 1,964,169 |
public java.util.List<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI> getSubterm_multisets_CardinalityHLAPI(){
java.util.List<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI> retour = new ArrayList<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI>();
for (Term elemnt : getSubterm()) {
if(elemnt.getClass().equals(fr.lip6.move.pnml.symmetricnet.multisets.impl.CardinalityImpl.class)){
retour.add(new fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI(
(fr.lip6.move.pnml.symmetricnet.multisets.Cardinality)elemnt
));
}
}
return retour;
}
| java.util.List<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI> function(){ java.util.List<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI> retour = new ArrayList<fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI>(); for (Term elemnt : getSubterm()) { if(elemnt.getClass().equals(fr.lip6.move.pnml.symmetricnet.multisets.impl.CardinalityImpl.class)){ retour.add(new fr.lip6.move.pnml.symmetricnet.multisets.hlapi.CardinalityHLAPI( (fr.lip6.move.pnml.symmetricnet.multisets.Cardinality)elemnt )); } } return retour; } | /**
* This accessor return a list of encapsulated subelement, only of CardinalityHLAPI kind.
* WARNING : this method can creates a lot of new object in memory.
*/ | This accessor return a list of encapsulated subelement, only of CardinalityHLAPI kind. WARNING : this method can creates a lot of new object in memory | getSubterm_multisets_CardinalityHLAPI | {
"repo_name": "lhillah/pnmlframework",
"path": "pnmlFw-SNNet/src/fr/lip6/move/pnml/symmetricnet/integers/hlapi/NumberConstantHLAPI.java",
"license": "epl-1.0",
"size": 94704
} | [
"fr.lip6.move.pnml.symmetricnet.terms.Term",
"java.util.ArrayList",
"java.util.List"
] | import fr.lip6.move.pnml.symmetricnet.terms.Term; import java.util.ArrayList; import java.util.List; | import fr.lip6.move.pnml.symmetricnet.terms.*; import java.util.*; | [
"fr.lip6.move",
"java.util"
] | fr.lip6.move; java.util; | 1,041,940 |
public void connectToNodeLight(final DiscoveryNode node) {
if (node.equals(localNode)) {
return;
}
transport.connectToNodeLight(node);
} | void function(final DiscoveryNode node) { if (node.equals(localNode)) { return; } transport.connectToNodeLight(node); } | /**
* Lightly connect to the specified node
*
* @param node the node to connect to
*/ | Lightly connect to the specified node | connectToNodeLight | {
"repo_name": "gmarz/elasticsearch",
"path": "core/src/main/java/org/elasticsearch/transport/TransportService.java",
"license": "apache-2.0",
"size": 45508
} | [
"org.elasticsearch.cluster.node.DiscoveryNode"
] | import org.elasticsearch.cluster.node.DiscoveryNode; | import org.elasticsearch.cluster.node.*; | [
"org.elasticsearch.cluster"
] | org.elasticsearch.cluster; | 1,407,372 |
void exitClassType_lf_classOrInterfaceType(@NotNull Java8Parser.ClassType_lf_classOrInterfaceTypeContext ctx); | void exitClassType_lf_classOrInterfaceType(@NotNull Java8Parser.ClassType_lf_classOrInterfaceTypeContext ctx); | /**
* Exit a parse tree produced by {@link Java8Parser#classType_lf_classOrInterfaceType}.
*
* @param ctx the parse tree
*/ | Exit a parse tree produced by <code>Java8Parser#classType_lf_classOrInterfaceType</code> | exitClassType_lf_classOrInterfaceType | {
"repo_name": "BigDaddy-Germany/WHOAMI",
"path": "WHOAMI/src/de/aima13/whoami/modules/syntaxcheck/languages/antlrgen/Java8Listener.java",
"license": "mit",
"size": 97945
} | [
"org.antlr.v4.runtime.misc.NotNull"
] | import org.antlr.v4.runtime.misc.NotNull; | import org.antlr.v4.runtime.misc.*; | [
"org.antlr.v4"
] | org.antlr.v4; | 100,315 |
protected User readUser(final InputStream inputStream)
throws ReaderException, IOException
{
Profiler profiler = new Profiler(AbstractUserAction.class);
syncOut.setHeader("Content-Type", acceptedContentType);
final User user;
if (acceptedContentType.equals(DEFAULT_CONTENT_TYPE))
{
UserReader userReader = new UserReader();
user = userReader.read(inputStream);
}
else if (acceptedContentType.equals(JSON_CONTENT_TYPE))
{
JsonUserReader userReader = new JsonUserReader();
user = userReader.read(inputStream);
}
else
{
// Should never happen.
throw new IOException("Unknown content being asked for: "
+ acceptedContentType);
}
profiler.checkpoint("readUser");
return user;
} | User function(final InputStream inputStream) throws ReaderException, IOException { Profiler profiler = new Profiler(AbstractUserAction.class); syncOut.setHeader(STR, acceptedContentType); final User user; if (acceptedContentType.equals(DEFAULT_CONTENT_TYPE)) { UserReader userReader = new UserReader(); user = userReader.read(inputStream); } else if (acceptedContentType.equals(JSON_CONTENT_TYPE)) { JsonUserReader userReader = new JsonUserReader(); user = userReader.read(inputStream); } else { throw new IOException(STR + acceptedContentType); } profiler.checkpoint(STR); return user; } | /**
* Read the user from the given stream of marshalled data.
*
* @param inputStream The stream to read in.
* @return User instance, never null.
*
* @throws IOException Any errors in reading the stream.
*/ | Read the user from the given stream of marshalled data | readUser | {
"repo_name": "opencadc/ac",
"path": "cadc-access-control-server/src/main/java/ca/nrc/cadc/ac/server/web/users/AbstractUserAction.java",
"license": "agpl-3.0",
"size": 14177
} | [
"ca.nrc.cadc.ac.ReaderException",
"ca.nrc.cadc.ac.User",
"ca.nrc.cadc.ac.json.JsonUserReader",
"ca.nrc.cadc.ac.xml.UserReader",
"ca.nrc.cadc.profiler.Profiler",
"java.io.IOException",
"java.io.InputStream"
] | import ca.nrc.cadc.ac.ReaderException; import ca.nrc.cadc.ac.User; import ca.nrc.cadc.ac.json.JsonUserReader; import ca.nrc.cadc.ac.xml.UserReader; import ca.nrc.cadc.profiler.Profiler; import java.io.IOException; import java.io.InputStream; | import ca.nrc.cadc.ac.*; import ca.nrc.cadc.ac.json.*; import ca.nrc.cadc.ac.xml.*; import ca.nrc.cadc.profiler.*; import java.io.*; | [
"ca.nrc.cadc",
"java.io"
] | ca.nrc.cadc; java.io; | 252,694 |
public CertificateDetails root() {
return this.root;
} | CertificateDetails function() { return this.root; } | /**
* Get root certificate.
*
* @return the root value
*/ | Get root certificate | root | {
"repo_name": "selvasingh/azure-sdk-for-java",
"path": "sdk/appservice/mgmt-v2019_08_01/src/main/java/com/microsoft/azure/management/appservice/v2019_08_01/implementation/AppServiceCertificateOrderInner.java",
"license": "mit",
"size": 12516
} | [
"com.microsoft.azure.management.appservice.v2019_08_01.CertificateDetails"
] | import com.microsoft.azure.management.appservice.v2019_08_01.CertificateDetails; | import com.microsoft.azure.management.appservice.v2019_08_01.*; | [
"com.microsoft.azure"
] | com.microsoft.azure; | 2,184,417 |
private void parseTemplate(String content)
{
Pattern pattern = Pattern.compile("<!--\\s*(BEGIN|END)\\s*:\\s*(\\w+)\\s*-->(.*?)(?=(?:<!--\\s*(?:BEGIN|END)\\s*:\\s*\\w+\\s*-->)|(?:\\s*$))", Pattern.CASE_INSENSITIVE | Pattern.DOTALL);
Matcher matcher = pattern.matcher(content);
BlockContent currentBlock = null;
String currentBlockPath = "";
while(matcher.find())
{
if (matcher.group(1).equalsIgnoreCase("BEGIN"))
{
if (currentBlock == null)
currentBlock = new BlockContent();
else
currentBlock = (BlockContent)currentBlock.addContentItem(new BlockContent());
currentBlock.setName(matcher.group(2));
if (currentBlockPath.equals(""))
currentBlockPath = currentBlock.getName();
else
currentBlockPath += "." + currentBlock.getName();
this.blockMap.put(currentBlockPath, currentBlock);
}
else if (matcher.group(1).equalsIgnoreCase("END"))
{
currentBlock = currentBlock.getParent();
if (currentBlock != null)
currentBlockPath = currentBlockPath.substring(0, currentBlockPath.lastIndexOf("."));
}
if (currentBlock != null && matcher.group(3) != null && !matcher.group(3).equals(""))
currentBlock.addContentItem(new TextContent(matcher.group(3)));
}
}
| void function(String content) { Pattern pattern = Pattern.compile(STR, Pattern.CASE_INSENSITIVE Pattern.DOTALL); Matcher matcher = pattern.matcher(content); BlockContent currentBlock = null; String currentBlockPath = STRBEGINSTRSTR.STRENDSTR.STR")) currentBlock.addContentItem(new TextContent(matcher.group(3))); } } | /**
* Parse the template
*/ | Parse the template | parseTemplate | {
"repo_name": "bealearts/livecyclemojos",
"path": "lca-utils/src/main/java/com/bealearts/template/SimpleTemplate.java",
"license": "mit",
"size": 5509
} | [
"java.util.regex.Matcher",
"java.util.regex.Pattern"
] | import java.util.regex.Matcher; import java.util.regex.Pattern; | import java.util.regex.*; | [
"java.util"
] | java.util; | 840,402 |
@Override
protected InputStream getInputStream(String f1, String f2, String name) throws DataException {
String file = f1 + File.separator + f2 + File.separator + name;
try {
return new FileInputStream(new File(path, file));
} catch (FileNotFoundException e) {
throw new MissingChunkException();
}
} | InputStream function(String f1, String f2, String name) throws DataException { String file = f1 + File.separator + f2 + File.separator + name; try { return new FileInputStream(new File(path, file)); } catch (FileNotFoundException e) { throw new MissingChunkException(); } } | /**
* Get the input stream for a chunk file.
*
* @param f1 the first part of the pathname
* @param f2 the second part of the pathname
* @param name the name of the file
* @return an input stream
* @throws DataException if there is an error getting data for this chunk
*/ | Get the input stream for a chunk file | getInputStream | {
"repo_name": "HolodeckOne-Minecraft/WorldEdit",
"path": "worldedit-core/src/main/java/com/sk89q/worldedit/world/storage/FileLegacyChunkStore.java",
"license": "gpl-3.0",
"size": 2209
} | [
"com.sk89q.worldedit.world.DataException",
"java.io.File",
"java.io.FileInputStream",
"java.io.FileNotFoundException",
"java.io.InputStream"
] | import com.sk89q.worldedit.world.DataException; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.InputStream; | import com.sk89q.worldedit.world.*; import java.io.*; | [
"com.sk89q.worldedit",
"java.io"
] | com.sk89q.worldedit; java.io; | 131,764 |
private static MainWindow hecate;
public static void main(String[] args) {
//test t =new test();
hecate = new MainWindow();
hecate.setVisible(true);
} | static MainWindow hecate; public static void function(String[] args) { hecate = new MainWindow(); hecate.setVisible(true); } | /** A
* Creates and shows the main Hecate Window
* @param args
*/ | A Creates and shows the main Hecate Window | main | {
"repo_name": "apapamichail/Hecate",
"path": "src/gr/uoi/cs/daintiness/hecate/Hecate.java",
"license": "mit",
"size": 477
} | [
"gr.uoi.cs.daintiness.hecate.gui.swing.MainWindow"
] | import gr.uoi.cs.daintiness.hecate.gui.swing.MainWindow; | import gr.uoi.cs.daintiness.hecate.gui.swing.*; | [
"gr.uoi.cs"
] | gr.uoi.cs; | 385,965 |
List<NamespaceJunction> junctions(); | List<NamespaceJunction> junctions(); | /**
* Gets the junctions property: List of Cache namespace junctions to target for namespace associations.
*
* @return the junctions value.
*/ | Gets the junctions property: List of Cache namespace junctions to target for namespace associations | junctions | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/storagecache/azure-resourcemanager-storagecache/src/main/java/com/azure/resourcemanager/storagecache/models/StorageTarget.java",
"license": "mit",
"size": 13063
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 2,899,847 |
public UpdateRequestBuilder setVersionType(VersionType versionType) {
request.versionType(versionType);
return this;
} | UpdateRequestBuilder function(VersionType versionType) { request.versionType(versionType); return this; } | /**
* Sets the versioning type. Defaults to {@link org.elasticsearch.index.VersionType#INTERNAL}.
*/ | Sets the versioning type. Defaults to <code>org.elasticsearch.index.VersionType#INTERNAL</code> | setVersionType | {
"repo_name": "anti-social/elasticsearch",
"path": "src/main/java/org/elasticsearch/action/update/UpdateRequestBuilder.java",
"license": "apache-2.0",
"size": 11206
} | [
"org.elasticsearch.index.VersionType"
] | import org.elasticsearch.index.VersionType; | import org.elasticsearch.index.*; | [
"org.elasticsearch.index"
] | org.elasticsearch.index; | 1,594,314 |
private TreePath findTreePathOfMapSource(MapSource mapSource) {
@SuppressWarnings("unchecked")
Enumeration<ComparableTreeNode> rootDescendants = rootNode.depthFirstEnumeration();
while (rootDescendants.hasMoreElements()) {
ComparableTreeNode descendantNode = rootDescendants.nextElement();
if (descendantNode.getUserObject().equals(mapSource)) {
return new TreePath(descendantNode.getPath());
}
}
return null;
}
| TreePath function(MapSource mapSource) { @SuppressWarnings(STR) Enumeration<ComparableTreeNode> rootDescendants = rootNode.depthFirstEnumeration(); while (rootDescendants.hasMoreElements()) { ComparableTreeNode descendantNode = rootDescendants.nextElement(); if (descendantNode.getUserObject().equals(mapSource)) { return new TreePath(descendantNode.getPath()); } } return null; } | /**
* This method searches for a TreePath of a requested MapSouce
*
* @param mapSource
* - analyzed mapSource object
* @return TreePath of a specified MapSource or null otherwise
*/ | This method searches for a TreePath of a requested MapSouce | findTreePathOfMapSource | {
"repo_name": "bh4017/mobac",
"path": "src/main/java/mobac/gui/components/JMapSourceTree.java",
"license": "gpl-2.0",
"size": 20035
} | [
"java.util.Enumeration",
"javax.swing.tree.TreePath"
] | import java.util.Enumeration; import javax.swing.tree.TreePath; | import java.util.*; import javax.swing.tree.*; | [
"java.util",
"javax.swing"
] | java.util; javax.swing; | 1,479,087 |
protected void changeBackgroundColorForResizing() {
getWindow().setBackgroundDrawable(new ColorDrawable(
ApiCompatibilityUtils.getColor(getResources(), R.color.resizing_background_color)));
} | void function() { getWindow().setBackgroundDrawable(new ColorDrawable( ApiCompatibilityUtils.getColor(getResources(), R.color.resizing_background_color))); } | /**
* Change the Window background color that will be used as the resizing background color on
* Android N+ multi-window mode. Note that subclasses can override this behavior accordingly in
* case there is already a Window background Drawable and don't want it to be replaced with the
* ColorDrawable.
*/ | Change the Window background color that will be used as the resizing background color on Android N+ multi-window mode. Note that subclasses can override this behavior accordingly in case there is already a Window background Drawable and don't want it to be replaced with the ColorDrawable | changeBackgroundColorForResizing | {
"repo_name": "nwjs/chromium.src",
"path": "chrome/android/java/src/org/chromium/chrome/browser/app/ChromeActivity.java",
"license": "bsd-3-clause",
"size": 127899
} | [
"android.graphics.drawable.ColorDrawable",
"org.chromium.base.ApiCompatibilityUtils"
] | import android.graphics.drawable.ColorDrawable; import org.chromium.base.ApiCompatibilityUtils; | import android.graphics.drawable.*; import org.chromium.base.*; | [
"android.graphics",
"org.chromium.base"
] | android.graphics; org.chromium.base; | 1,111,624 |
public IEntityLock newWriteLock(EntityIdentifier entityID, String owner, int durationSecs)
throws LockingException
{
return lockService.newLock(entityID.getType(), entityID.getKey(), IEntityLockService.WRITE_LOCK, owner, durationSecs);
} | IEntityLock function(EntityIdentifier entityID, String owner, int durationSecs) throws LockingException { return lockService.newLock(entityID.getType(), entityID.getKey(), IEntityLockService.WRITE_LOCK, owner, durationSecs); } | /**
* Returns a write lock for the <code>IBasicEntity</code>, owner and duration.
* @return org.jasig.portal.concurrency.locking.IEntityLock
* @param entityID EntityIdentifier
* @param owner String
* @param durationSecs int
* @exception LockingException
*/ | Returns a write lock for the <code>IBasicEntity</code>, owner and duration | newWriteLock | {
"repo_name": "drewwills/uPortal",
"path": "uportal-war/src/main/java/org/jasig/portal/services/EntityLockService.java",
"license": "apache-2.0",
"size": 8456
} | [
"org.jasig.portal.EntityIdentifier",
"org.jasig.portal.concurrency.IEntityLock",
"org.jasig.portal.concurrency.IEntityLockService",
"org.jasig.portal.concurrency.LockingException"
] | import org.jasig.portal.EntityIdentifier; import org.jasig.portal.concurrency.IEntityLock; import org.jasig.portal.concurrency.IEntityLockService; import org.jasig.portal.concurrency.LockingException; | import org.jasig.portal.*; import org.jasig.portal.concurrency.*; | [
"org.jasig.portal"
] | org.jasig.portal; | 929,691 |
//-----------------------------------------------------------------------
public Period multipliedBy(int scalar) {
if (this == ZERO || scalar == 1) {
return this;
}
int[] values = getValues(); // cloned
for (int i = 0; i < values.length; i++) {
values[i] = FieldUtils.safeMultiply(values[i], scalar);
}
return new Period(values, getPeriodType());
}
| Period function(int scalar) { if (this == ZERO scalar == 1) { return this; } int[] values = getValues(); for (int i = 0; i < values.length; i++) { values[i] = FieldUtils.safeMultiply(values[i], scalar); } return new Period(values, getPeriodType()); } | /**
* Returns a new instance with each element in this period multiplied
* by the specified scalar.
*
* @param scalar the scalar to multiply by, not null
* @return a {@code Period} based on this period with the amounts multiplied by the scalar, never null
* @throws ArithmeticException if the capacity of any field is exceeded
* @since 2.1
*/ | Returns a new instance with each element in this period multiplied by the specified scalar | multipliedBy | {
"repo_name": "likecool21/joda-time-2.3-Testing",
"path": "src/main/java/org/joda/time/Period.java",
"license": "apache-2.0",
"size": 72737
} | [
"org.joda.time.field.FieldUtils"
] | import org.joda.time.field.FieldUtils; | import org.joda.time.field.*; | [
"org.joda.time"
] | org.joda.time; | 2,659,259 |
public void computeDerivatives(final double t, final double[] y, final double[] yDot)
throws DerivativeException {
if (++evaluations > maxEvaluations) {
throw new DerivativeException(new MaxEvaluationsExceededException(maxEvaluations));
}
equations.computeDerivatives(t, y, yDot);
}
| void function(final double t, final double[] y, final double[] yDot) throws DerivativeException { if (++evaluations > maxEvaluations) { throw new DerivativeException(new MaxEvaluationsExceededException(maxEvaluations)); } equations.computeDerivatives(t, y, yDot); } | /** Compute the derivatives and check the number of evaluations.
* @param t current value of the independent <I>time</I> variable
* @param y array containing the current value of the state vector
* @param yDot placeholder array where to put the time derivative of the state vector
* @throws DerivativeException this exception is propagated to the caller if the
* underlying user function triggers one
*/ | Compute the derivatives and check the number of evaluations | computeDerivatives | {
"repo_name": "SpoonLabs/astor",
"path": "examples/Math-issue-340/src/main/java/org/apache/commons/math/ode/AbstractIntegrator.java",
"license": "gpl-2.0",
"size": 9821
} | [
"org.apache.commons.math.MaxEvaluationsExceededException"
] | import org.apache.commons.math.MaxEvaluationsExceededException; | import org.apache.commons.math.*; | [
"org.apache.commons"
] | org.apache.commons; | 563,048 |
@InterfaceAudience.LimitedPrivate({"hive, pig"})
public T setFromConfiguration(Configuration conf); | @InterfaceAudience.LimitedPrivate({STR}) T function(Configuration conf); | /**
* Used to build out a configuration from an existing Hadoop {@link
* org.apache.hadoop.conf.Configuration}. This is a private API is present only for
* compatibility and ease of use for existing systems which rely heavily on Configuration.
*
* @param conf
* @return
*/ | Used to build out a configuration from an existing Hadoop <code>org.apache.hadoop.conf.Configuration</code>. This is a private API is present only for compatibility and ease of use for existing systems which rely heavily on Configuration | setFromConfiguration | {
"repo_name": "apache/incubator-tez",
"path": "tez-runtime-library/src/main/java/org/apache/tez/runtime/library/conf/BaseConfigurer.java",
"license": "apache-2.0",
"size": 2386
} | [
"org.apache.hadoop.classification.InterfaceAudience",
"org.apache.hadoop.conf.Configuration"
] | import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; | import org.apache.hadoop.classification.*; import org.apache.hadoop.conf.*; | [
"org.apache.hadoop"
] | org.apache.hadoop; | 2,837,299 |
public static String toString(Collection<? extends MimeType> mimeTypes) {
StringBuilder builder = new StringBuilder();
for (Iterator<? extends MimeType> iterator = mimeTypes.iterator(); iterator.hasNext();) {
MimeType mimeType = iterator.next();
mimeType.appendTo(builder);
if (iterator.hasNext()) {
builder.append(", ");
}
}
return builder.toString();
}
/**
* Sorts the given list of {@code MimeType} objects by specificity.
* <p>Given two mime types:
* <ol>
* <li>if either mime type has a {@linkplain MimeType#isWildcardType() wildcard type},
* then the mime type without the wildcard is ordered before the other.</li>
* <li>if the two mime types have different {@linkplain MimeType#getType() types},
* then they are considered equal and remain their current order.</li>
* <li>if either mime type has a {@linkplain MimeType#isWildcardSubtype() wildcard subtype} | static String function(Collection<? extends MimeType> mimeTypes) { StringBuilder builder = new StringBuilder(); for (Iterator<? extends MimeType> iterator = mimeTypes.iterator(); iterator.hasNext();) { MimeType mimeType = iterator.next(); mimeType.appendTo(builder); if (iterator.hasNext()) { builder.append(STR); } } return builder.toString(); } /** * Sorts the given list of {@code MimeType} objects by specificity. * <p>Given two mime types: * <ol> * <li>if either mime type has a {@linkplain MimeType#isWildcardType() wildcard type}, * then the mime type without the wildcard is ordered before the other.</li> * <li>if the two mime types have different {@linkplain MimeType#getType() types}, * then they are considered equal and remain their current order.</li> * <li>if either mime type has a {@linkplain MimeType#isWildcardSubtype() wildcard subtype} | /**
* Return a string representation of the given list of {@code MimeType} objects.
* @param mimeTypes the string to parse
* @return the list of mime types
* @throws IllegalArgumentException if the String cannot be parsed
*/ | Return a string representation of the given list of MimeType objects | toString | {
"repo_name": "qobel/esoguproject",
"path": "spring-framework/spring-core/src/main/java/org/springframework/util/MimeTypeUtils.java",
"license": "apache-2.0",
"size": 11921
} | [
"java.util.Collection",
"java.util.Iterator"
] | import java.util.Collection; import java.util.Iterator; | import java.util.*; | [
"java.util"
] | java.util; | 960,696 |
public String getFilePathFromMediaUri(Context context, Uri uri) {
if ( uri == null ) {
return null;
}
String[] projection = { MediaStore.Images.Media.DATA };
Cursor cursor = context.getContentResolver().query(
uri, projection, null, null, null);
String path = "";
if (cursor != null) {
cursor.moveToFirst();
int columnIndex = cursor.getColumnIndex(projection[0]);
path = cursor.getString(columnIndex);
cursor.close();
}
return path;
} | String function(Context context, Uri uri) { if ( uri == null ) { return null; } String[] projection = { MediaStore.Images.Media.DATA }; Cursor cursor = context.getContentResolver().query( uri, projection, null, null, null); String path = ""; if (cursor != null) { cursor.moveToFirst(); int columnIndex = cursor.getColumnIndex(projection[0]); path = cursor.getString(columnIndex); cursor.close(); } return path; } | /**
* Methods to obtain files paths
*/ | Methods to obtain files paths | getFilePathFromMediaUri | {
"repo_name": "belatrix/AndroidAllStars",
"path": "app/src/main/java/com/belatrixsf/connect/utils/MediaUtils.java",
"license": "mit",
"size": 8347
} | [
"android.content.Context",
"android.database.Cursor",
"android.net.Uri",
"android.provider.MediaStore"
] | import android.content.Context; import android.database.Cursor; import android.net.Uri; import android.provider.MediaStore; | import android.content.*; import android.database.*; import android.net.*; import android.provider.*; | [
"android.content",
"android.database",
"android.net",
"android.provider"
] | android.content; android.database; android.net; android.provider; | 1,048,118 |
public void setNoteType(CoiNoteType noteType) {
this.noteType = noteType;
} | void function(CoiNoteType noteType) { this.noteType = noteType; } | /**
* Sets the noteType attribute value.
* @param noteType The noteType to set.
*/ | Sets the noteType attribute value | setNoteType | {
"repo_name": "ColostateResearchServices/kc",
"path": "coeus-impl/src/main/java/org/kuali/kra/coi/notesandattachments/notes/CoiDisclosureNotepad.java",
"license": "agpl-3.0",
"size": 10789
} | [
"org.kuali.kra.coi.CoiNoteType"
] | import org.kuali.kra.coi.CoiNoteType; | import org.kuali.kra.coi.*; | [
"org.kuali.kra"
] | org.kuali.kra; | 270,532 |
protected List<String> getHints(WebSession s)
{
List<String> hints = new ArrayList<String>();
hints.add("The application is taking your input and inserting it at the end of a pre-formed SQL command.");
hints.add("This is the code for the query being built and issued by WebGoat:<br><br> "
+ "\"SELECT * FROM employee WHERE userid = \" + userId + \" and password = \" + password");
hints.add("Compound SQL statements can be made by joining multiple tests with keywords like AND and OR. "
+ "Try appending a SQL statement that always resolves to true");
// Stage 1
hints.add("You may need to use WebScarab to remove a field length limit to fit your attack.");
hints.add("Try entering a password of [ smith' OR '1' = '1 ].");
// Stage 2
hints
.add("Many of WebGoat's database queries are already parameterized. Search the project for PreparedStatement.");
// Stage 3
hints.add("Try entering an employee_id of [ 101 OR 1=1 ORDER BY salary ].");
// Stage 4
return hints;
}
| List<String> function(WebSession s) { List<String> hints = new ArrayList<String>(); hints.add(STR); hints.add(STR + "\"SELECT * FROM employee WHERE userid = \STR and password = \STR); hints.add(STR + STR); hints.add(STR); hints.add(STR); hints .add(STR); hints.add(STR); return hints; } | /**
* Gets the hints attribute of the DirectoryScreen object
*
* @return The hints value
*/ | Gets the hints attribute of the DirectoryScreen object | getHints | {
"repo_name": "paulnguyen/cmpe279",
"path": "eclipse/Webgoat/src/org/owasp/webgoat/lessons/SQLInjection/SQLInjection.java",
"license": "apache-2.0",
"size": 9756
} | [
"java.util.ArrayList",
"java.util.List",
"org.owasp.webgoat.session.WebSession"
] | import java.util.ArrayList; import java.util.List; import org.owasp.webgoat.session.WebSession; | import java.util.*; import org.owasp.webgoat.session.*; | [
"java.util",
"org.owasp.webgoat"
] | java.util; org.owasp.webgoat; | 1,446,189 |
public static boolean doesPatternMatch(QName qName, QName pattern) {
// Validate pattern only generates warnings if the pattern qname
// does not align with the specification.
validatePattern(pattern);
// Try the official pattern match algorithm
boolean match = doesPatternMatch_Official(qName, pattern);
// Customers may be dependent on the old algorithm, so this is retained.
if (!match) {
if (log.isDebugEnabled()) {
log.debug("The offical matching algorithm failed. Re-attempting with the prior algorithm");
}
match = doesPatternMatch_Old(qName, pattern);
if (log.isDebugEnabled()) {
log.debug("The old matching algorithm returns " + match);
}
}
return match;
}
| static boolean function(QName qName, QName pattern) { validatePattern(pattern); boolean match = doesPatternMatch_Official(qName, pattern); if (!match) { if (log.isDebugEnabled()) { log.debug(STR); } match = doesPatternMatch_Old(qName, pattern); if (log.isDebugEnabled()) { log.debug(STR + match); } } return match; } | /**
* Returns true of the specified qName matches the pattern.
* Some customers may have become dependent on the older
* algorithm. So first the "official" algorithm is used
* and if that fails, the older algorithm is used.
* @param qName QName
* @param pattern QName
* @return true or false
*/ | Returns true of the specified qName matches the pattern. Some customers may have become dependent on the older algorithm. So first the "official" algorithm is used and if that fails, the older algorithm is used | doesPatternMatch | {
"repo_name": "manuranga/wso2-axis2",
"path": "modules/jaxws/src/org/apache/axis2/jaxws/spi/handler/BaseHandlerResolver.java",
"license": "apache-2.0",
"size": 16166
} | [
"javax.xml.namespace.QName"
] | import javax.xml.namespace.QName; | import javax.xml.namespace.*; | [
"javax.xml"
] | javax.xml; | 134,992 |
private void enableResourceValueNotifications() throws IhcExecption {
logger.debug("Subscripe resource runtime value notifications");
if (ihc != null) {
if (ihc.getConnectionState() != ConnectionState.CONNECTED) {
logger.debug("Controller is connecting, abort subscribe");
return;
}
List<Integer> resourceIdList = new ArrayList<Integer>();
for (IhcBindingProvider provider : providers) {
for (String itemName : provider.getItemNames()) {
resourceIdList.add(provider.getResourceIdForInBinding(itemName));
}
}
if (resourceIdList.size() > 0) {
logger.debug("Enable runtime notfications for {} resources", resourceIdList.size());
try {
ihc.enableRuntimeValueNotifications(resourceIdList);
} catch (IhcExecption e) {
logger.debug("Reconnection request");
setReconnectRequest(true);
}
}
} else {
logger.warn("Controller is not initialized!");
logger.debug("Reconnection request");
setReconnectRequest(true);
}
setValueNotificationRequest(false);
} | void function() throws IhcExecption { logger.debug(STR); if (ihc != null) { if (ihc.getConnectionState() != ConnectionState.CONNECTED) { logger.debug(STR); return; } List<Integer> resourceIdList = new ArrayList<Integer>(); for (IhcBindingProvider provider : providers) { for (String itemName : provider.getItemNames()) { resourceIdList.add(provider.getResourceIdForInBinding(itemName)); } } if (resourceIdList.size() > 0) { logger.debug(STR, resourceIdList.size()); try { ihc.enableRuntimeValueNotifications(resourceIdList); } catch (IhcExecption e) { logger.debug(STR); setReconnectRequest(true); } } } else { logger.warn(STR); logger.debug(STR); setReconnectRequest(true); } setValueNotificationRequest(false); } | /**
* Order resource value notifications from IHC controller.
*/ | Order resource value notifications from IHC controller | enableResourceValueNotifications | {
"repo_name": "paolodenti/openhab",
"path": "bundles/binding/org.openhab.binding.ihc/src/main/java/org/openhab/binding/ihc/internal/IhcBinding.java",
"license": "epl-1.0",
"size": 21510
} | [
"java.util.ArrayList",
"java.util.List",
"org.openhab.binding.ihc.IhcBindingProvider",
"org.openhab.binding.ihc.ws.IhcClient",
"org.openhab.binding.ihc.ws.IhcExecption"
] | import java.util.ArrayList; import java.util.List; import org.openhab.binding.ihc.IhcBindingProvider; import org.openhab.binding.ihc.ws.IhcClient; import org.openhab.binding.ihc.ws.IhcExecption; | import java.util.*; import org.openhab.binding.ihc.*; import org.openhab.binding.ihc.ws.*; | [
"java.util",
"org.openhab.binding"
] | java.util; org.openhab.binding; | 681,704 |
return new PredicatedList<T>(list, predicate);
}
//-----------------------------------------------------------------------
protected PredicatedList(final List<E> list, final Predicate<? super E> predicate) {
super(list, predicate);
} | return new PredicatedList<T>(list, predicate); } protected PredicatedList(final List<E> list, final Predicate<? super E> predicate) { super(list, predicate); } | /**
* Factory method to create a predicated (validating) list.
* <p>
* If there are any elements already in the list being decorated, they
* are validated.
*
* @param <T> the type of the elements in the list
* @param list the list to decorate, must not be null
* @param predicate the predicate to use for validation, must not be null
* @return a new predicated list
* @throws IllegalArgumentException if list or predicate is null
* @throws IllegalArgumentException if the list contains invalid elements
* @since 4.0
*/ | Factory method to create a predicated (validating) list. If there are any elements already in the list being decorated, they are validated | predicatedList | {
"repo_name": "gonmarques/commons-collections",
"path": "src/main/java/org/apache/commons/collections4/list/PredicatedList.java",
"license": "apache-2.0",
"size": 5959
} | [
"java.util.List",
"org.apache.commons.collections4.Predicate"
] | import java.util.List; import org.apache.commons.collections4.Predicate; | import java.util.*; import org.apache.commons.collections4.*; | [
"java.util",
"org.apache.commons"
] | java.util; org.apache.commons; | 386,946 |
@Test(expected = ParserException.class)
public void processRevisionInValidSyntax() throws IOException, ParserException {
YangNode node = manager.getDataModel("src/test/resources/RevisionInValidSyntax.yang");
} | @Test(expected = ParserException.class) void function() throws IOException, ParserException { YangNode node = manager.getDataModel(STR); } | /**
* Checks if the syntax of revision is correct.
*/ | Checks if the syntax of revision is correct | processRevisionInValidSyntax | {
"repo_name": "sonu283304/onos",
"path": "utils/yangutils/src/test/java/org/onosproject/yangutils/parser/impl/listeners/RevisionListenerTest.java",
"license": "apache-2.0",
"size": 2297
} | [
"java.io.IOException",
"org.junit.Test",
"org.onosproject.yangutils.datamodel.YangNode",
"org.onosproject.yangutils.parser.exceptions.ParserException"
] | import java.io.IOException; import org.junit.Test; import org.onosproject.yangutils.datamodel.YangNode; import org.onosproject.yangutils.parser.exceptions.ParserException; | import java.io.*; import org.junit.*; import org.onosproject.yangutils.datamodel.*; import org.onosproject.yangutils.parser.exceptions.*; | [
"java.io",
"org.junit",
"org.onosproject.yangutils"
] | java.io; org.junit; org.onosproject.yangutils; | 1,323,381 |
private static void verifyAncestry(Class<?> tClass) {
if (!TBase.class.isAssignableFrom(tClass)) {
throw new ClassCastException(tClass.getName() + " is not a Thrift class");
}
} | static void function(Class<?> tClass) { if (!TBase.class.isAssignableFrom(tClass)) { throw new ClassCastException(tClass.getName() + STR); } } | /**
* Verify that clazz is a Thrift class. i.e. is a subclass of TBase
*/ | Verify that clazz is a Thrift class. i.e. is a subclass of TBase | verifyAncestry | {
"repo_name": "ketralnis/elephant-bird",
"path": "src/java/com/twitter/elephantbird/util/ThriftUtils.java",
"license": "apache-2.0",
"size": 3774
} | [
"org.apache.thrift.TBase"
] | import org.apache.thrift.TBase; | import org.apache.thrift.*; | [
"org.apache.thrift"
] | org.apache.thrift; | 1,115,539 |
private boolean checkSave() {
if(currentJuridicalPerson.getId()==null){
Messagebox.show("Перед добавлением, сохраните клиента.", "Внимание!",Messagebox.OK,Messagebox.EXCLAMATION);
}
return currentJuridicalPerson.getId()!=null;
} | boolean function() { if(currentJuridicalPerson.getId()==null){ Messagebox.show(STR, STR,Messagebox.OK,Messagebox.EXCLAMATION); } return currentJuridicalPerson.getId()!=null; } | /**
* return true if possible add child object (if currentJuridicalPerson not new)
* @return
*/ | return true if possible add child object (if currentJuridicalPerson not new) | checkSave | {
"repo_name": "dmrzh/simplex-crm-b2b",
"path": "src/main/java/ru/dev_server/client/viewmodel/JuridicalPersonVM.java",
"license": "agpl-3.0",
"size": 18965
} | [
"org.zkoss.zhtml.Messagebox"
] | import org.zkoss.zhtml.Messagebox; | import org.zkoss.zhtml.*; | [
"org.zkoss.zhtml"
] | org.zkoss.zhtml; | 2,261,589 |
public static java.util.List extractGpList(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GPCollection voCollection)
{
return extractGpList(domainFactory, voCollection, null, new HashMap());
}
| static java.util.List function(ims.domain.ILightweightDomainFactory domainFactory, ims.core.vo.GPCollection voCollection) { return extractGpList(domainFactory, voCollection, null, new HashMap()); } | /**
* Create the ims.core.resource.people.domain.objects.Gp list from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/ | Create the ims.core.resource.people.domain.objects.Gp list from the value object collection | extractGpList | {
"repo_name": "open-health-hub/openmaxims-linux",
"path": "openmaxims_workspace/ValueObjects/src/ims/core/vo/domain/GPAssembler.java",
"license": "agpl-3.0",
"size": 17842
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 2,422,080 |
public List<ColumnIdent> partitionedBy() {
return partitionedBy;
} | List<ColumnIdent> function() { return partitionedBy; } | /**
* column names of columns this table is partitioned by (in dotted syntax).
* <p>
* guaranteed to be in the same order as defined in CREATE TABLE statement
*
* @return always a list, never null
*/ | column names of columns this table is partitioned by (in dotted syntax). guaranteed to be in the same order as defined in CREATE TABLE statement | partitionedBy | {
"repo_name": "crate/crate",
"path": "server/src/main/java/io/crate/metadata/doc/DocTableInfo.java",
"license": "apache-2.0",
"size": 15646
} | [
"io.crate.metadata.ColumnIdent",
"java.util.List"
] | import io.crate.metadata.ColumnIdent; import java.util.List; | import io.crate.metadata.*; import java.util.*; | [
"io.crate.metadata",
"java.util"
] | io.crate.metadata; java.util; | 1,148,695 |
public RelBuilder antiJoin(RexNode... conditions) {
return antiJoin(ImmutableList.copyOf(conditions));
} | RelBuilder function(RexNode... conditions) { return antiJoin(ImmutableList.copyOf(conditions)); } | /** Creates an anti-join.
*
* @see #antiJoin(Iterable) */ | Creates an anti-join | antiJoin | {
"repo_name": "xhoong/incubator-calcite",
"path": "core/src/main/java/org/apache/calcite/tools/RelBuilder.java",
"license": "apache-2.0",
"size": 108300
} | [
"com.google.common.collect.ImmutableList",
"org.apache.calcite.rex.RexNode"
] | import com.google.common.collect.ImmutableList; import org.apache.calcite.rex.RexNode; | import com.google.common.collect.*; import org.apache.calcite.rex.*; | [
"com.google.common",
"org.apache.calcite"
] | com.google.common; org.apache.calcite; | 1,988,071 |
protected void append(Object o) throws IOException
{
if (o instanceof byte[])
{
m_outputStream.write((byte[]) o);
}
else if (o instanceof String)
{
m_outputStream.write(((String) o).getBytes());
}
else
{
throw new UnsupportedOperationException("Cannot write this object to an output stream");
}
} | void function(Object o) throws IOException { if (o instanceof byte[]) { m_outputStream.write((byte[]) o); } else if (o instanceof String) { m_outputStream.write(((String) o).getBytes()); } else { throw new UnsupportedOperationException(STR); } } | /**
* Writes a new object to the output stream.
*
* @param o
* The object to write to the output stream. Can either be a byte array
* or a string. Any other kind of object will throw an exception.
* @throws IOException
* If writing to the output stream cannot be done
*/ | Writes a new object to the output stream | append | {
"repo_name": "lif-labs/beepbeep-3",
"path": "Core/src/ca/uqac/lif/cep/io/WriteOutputStream.java",
"license": "lgpl-3.0",
"size": 2825
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,350,456 |
public Simplex neighborOpposite (Object vertex, Simplex simplex) {
if (!simplex.contains(vertex))
throw new IllegalArgumentException("Bad vertex; not in simplex");
SimplexLoop: for (Iterator it = ((Set) neighbors.get(simplex)).iterator();
it.hasNext();) {
Simplex s = (Simplex) it.next();
for (Iterator otherIt = simplex.iterator(); otherIt.hasNext(); ) {
Object v = otherIt.next();
if (v.equals(vertex)) continue;
if (!s.contains(v)) continue SimplexLoop;
}
return s;
}
return null;
} | Simplex function (Object vertex, Simplex simplex) { if (!simplex.contains(vertex)) throw new IllegalArgumentException(STR); SimplexLoop: for (Iterator it = ((Set) neighbors.get(simplex)).iterator(); it.hasNext();) { Simplex s = (Simplex) it.next(); for (Iterator otherIt = simplex.iterator(); otherIt.hasNext(); ) { Object v = otherIt.next(); if (v.equals(vertex)) continue; if (!s.contains(v)) continue SimplexLoop; } return s; } return null; } | /**
* Report neighbor opposite the given vertex of simplex.
* @param vertex a vertex of simplex
* @param simplex we want the neighbor of this Simplex
* @return the neighbor opposite vertex of simplex; null if none
* @throws IllegalArgumentException if vertex is not in this Simplex
*/ | Report neighbor opposite the given vertex of simplex | neighborOpposite | {
"repo_name": "iCarto/siga",
"path": "libTopology/src/org/gvsig/jts/voronoi/chew/Triangulation.java",
"license": "gpl-3.0",
"size": 6512
} | [
"java.util.Iterator",
"java.util.Set"
] | import java.util.Iterator; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 953,555 |
public boolean updateOvenData(@Nullable String postData, Helper helper, String thingUID) {
String statusDescr = "";
boolean resultOk = false;
String error = "", errorDetail = "";
if (config.hostIP == null || config.hostPIN == null) {
return false;
}
String urlStr = "http://" + config.hostIP + "/status.cgi";
// Run the HTTP POST request and get the JSON response from Oven
String response = null;
Properties httpHeader = new Properties();
if (postData != null) {
try {
InputStream targetStream = new ByteArrayInputStream(postData.getBytes("UTF-8"));
refreshOvenConnection(helper, thingUID);
httpHeader = createHeader(postData);
response = HttpUtil.executeUrl("POST", urlStr, httpHeader, targetStream, "application/json", 10000);
resultOk = true;
logger.debug("Execute POST request with content to {} with header: {}", urlStr, httpHeader.toString());
} catch (UnsupportedEncodingException e1) {
logger.debug("Wrong encoding found. Only UTF-8 is supported.");
statusDescr = "Encoding of oven is not supported. Only UTF-8 is supported.";
resultOk = false;
} catch (IOException e) {
logger.debug("Error processiong POST request {}", urlStr);
statusDescr = "Cannot execute command on Stove. Please verify connection and Thing Status";
resultOk = false;
}
} else {
try {
refreshOvenConnection(helper, thingUID);
httpHeader = createHeader(null);
response = HttpUtil.executeUrl("POST", urlStr, httpHeader, null, "", 10000);
resultOk = true;
logger.debug("Execute POST request to {} with header: {}", urlStr, httpHeader.toString());
} catch (IOException e) {
logger.debug("Error processiong POST request {}", e.getMessage());
String message = e.getMessage();
if (message != null && message.contains("Authentication challenge without WWW-Authenticate ")) {
statusDescr = "Cannot connect to stove. Given PIN: " + config.hostPIN + " is incorrect!";
}
resultOk = false;
}
}
if (resultOk) {
logger.debug("OvenData = {}", response);
ovenData = gson.fromJson(response, HaasSohnpelletstoveJsonDataDTO.class);
} else {
logger.debug("Setting thing '{}' to OFFLINE: Error '{}': {}", thingUID, error, errorDetail);
ovenData = new HaasSohnpelletstoveJsonDataDTO();
}
helper.setStatusDescription(statusDescr);
return resultOk;
} | boolean function(@Nullable String postData, Helper helper, String thingUID) { String statusDescr = STRSTRSTRhttp: String response = null; Properties httpHeader = new Properties(); if (postData != null) { try { InputStream targetStream = new ByteArrayInputStream(postData.getBytes("UTF-8")); refreshOvenConnection(helper, thingUID); httpHeader = createHeader(postData); response = HttpUtil.executeUrl("POST", urlStr, httpHeader, targetStream, STR, 10000); resultOk = true; logger.debug(STR, urlStr, httpHeader.toString()); } catch (UnsupportedEncodingException e1) { logger.debug(STR); statusDescr = STR; resultOk = false; } catch (IOException e) { logger.debug(STR, urlStr); statusDescr = STR; resultOk = false; } } else { try { refreshOvenConnection(helper, thingUID); httpHeader = createHeader(null); response = HttpUtil.executeUrl("POST", urlStr, httpHeader, null, STRExecute POST request to {} with header: {}", urlStr, httpHeader.toString()); } catch (IOException e) { logger.debug(STR, e.getMessage()); String message = e.getMessage(); if (message != null && message.contains("Authentication challenge without WWW-Authenticate STRCannot connect to stove. Given PIN: STR is incorrect!STROvenData = {}STRSetting thing '{}' to OFFLINE: Error '{}': {}", thingUID, error, errorDetail); ovenData = new HaasSohnpelletstoveJsonDataDTO(); } helper.setStatusDescription(statusDescr); return resultOk; } | /**
* Gets the status of the oven
*
* @return true if success or false in case of error
*/ | Gets the status of the oven | updateOvenData | {
"repo_name": "paulianttila/openhab2",
"path": "bundles/org.openhab.binding.haassohnpelletstove/src/main/java/org/openhab/binding/haassohnpelletstove/internal/HaasSohnpelletstoveJSONCommunication.java",
"license": "epl-1.0",
"size": 8669
} | [
"java.io.ByteArrayInputStream",
"java.io.IOException",
"java.io.InputStream",
"java.io.UnsupportedEncodingException",
"java.util.Properties",
"org.eclipse.jdt.annotation.Nullable",
"org.openhab.core.io.net.http.HttpUtil"
] | import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.Properties; import org.eclipse.jdt.annotation.Nullable; import org.openhab.core.io.net.http.HttpUtil; | import java.io.*; import java.util.*; import org.eclipse.jdt.annotation.*; import org.openhab.core.io.net.http.*; | [
"java.io",
"java.util",
"org.eclipse.jdt",
"org.openhab.core"
] | java.io; java.util; org.eclipse.jdt; org.openhab.core; | 290,311 |
public void start() throws IOException;
| void function() throws IOException; | /** Starts the server part. This must be idempotent, i.e. not start a
* second instance if it is already running.
*/ | Starts the server part. This must be idempotent, i.e. not start a second instance if it is already running | start | {
"repo_name": "mobilesec/openuat",
"path": "src/org/openuat/channel/main/HostAuthenticationServer.java",
"license": "lgpl-3.0",
"size": 2306
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 2,640,892 |
public static void shutdownDfs(MiniDFSCluster cluster) {
if (cluster != null) {
LOG.info("Shutting down Mini DFS ");
try {
cluster.shutdown();
} catch (Exception e) {
/// Can get a java.lang.reflect.UndeclaredThrowableException thrown
// here because of an InterruptedException. Don't let exceptions in
// here be cause of test failure.
}
try {
FileSystem fs = cluster.getFileSystem();
if (fs != null) {
LOG.info("Shutting down FileSystem");
fs.close();
}
FileSystem.closeAll();
} catch (IOException e) {
LOG.error("error closing file system", e);
}
}
} | static void function(MiniDFSCluster cluster) { if (cluster != null) { LOG.info(STR); try { cluster.shutdown(); } catch (Exception e) { } try { FileSystem fs = cluster.getFileSystem(); if (fs != null) { LOG.info(STR); fs.close(); } FileSystem.closeAll(); } catch (IOException e) { LOG.error(STR, e); } } } | /**
* Common method to close down a MiniDFSCluster and the associated file system
*
* @param cluster
*/ | Common method to close down a MiniDFSCluster and the associated file system | shutdownDfs | {
"repo_name": "francisliu/hbase",
"path": "hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestCase.java",
"license": "apache-2.0",
"size": 16485
} | [
"java.io.IOException",
"org.apache.hadoop.fs.FileSystem",
"org.apache.hadoop.hdfs.MiniDFSCluster"
] | import java.io.IOException; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hdfs.MiniDFSCluster; | import java.io.*; import org.apache.hadoop.fs.*; import org.apache.hadoop.hdfs.*; | [
"java.io",
"org.apache.hadoop"
] | java.io; org.apache.hadoop; | 1,406,717 |
private void performIndexActions(Progress progress, String transactionId) {
List<IndexAction> indexActions =
dataService
.findAll(INDEX_ACTION, createQueryGetAllIndexActions(transactionId), IndexAction.class)
.collect(toList());
try {
boolean success = true;
int count = 0;
for (IndexAction indexAction : indexActions) {
success &= performAction(progress, count++, indexAction);
}
if (success) {
progress.progress(count, "Executed all index actions, cleaning up the actions...");
dataService.delete(INDEX_ACTION, indexActions.stream());
dataService.deleteById(INDEX_ACTION_GROUP, transactionId);
progress.progress(count, "Cleaned up the actions.");
}
} catch (Exception ex) {
LOG.error("Error performing index actions", ex);
throw ex;
} finally {
progress.status("Refresh index start");
indexService.refreshIndex();
progress.status("Refresh index done");
}
} | void function(Progress progress, String transactionId) { List<IndexAction> indexActions = dataService .findAll(INDEX_ACTION, createQueryGetAllIndexActions(transactionId), IndexAction.class) .collect(toList()); try { boolean success = true; int count = 0; for (IndexAction indexAction : indexActions) { success &= performAction(progress, count++, indexAction); } if (success) { progress.progress(count, STR); dataService.delete(INDEX_ACTION, indexActions.stream()); dataService.deleteById(INDEX_ACTION_GROUP, transactionId); progress.progress(count, STR); } } catch (Exception ex) { LOG.error(STR, ex); throw ex; } finally { progress.status(STR); indexService.refreshIndex(); progress.status(STR); } } | /**
* Performs the IndexActions.
*
* @param progress {@link Progress} instance to log progress information to
*/ | Performs the IndexActions | performIndexActions | {
"repo_name": "dennishendriksen/molgenis",
"path": "molgenis-data-index/src/main/java/org/molgenis/data/index/job/IndexJobService.java",
"license": "lgpl-3.0",
"size": 8486
} | [
"java.util.List",
"org.molgenis.data.index.meta.IndexAction",
"org.molgenis.jobs.Progress"
] | import java.util.List; import org.molgenis.data.index.meta.IndexAction; import org.molgenis.jobs.Progress; | import java.util.*; import org.molgenis.data.index.meta.*; import org.molgenis.jobs.*; | [
"java.util",
"org.molgenis.data",
"org.molgenis.jobs"
] | java.util; org.molgenis.data; org.molgenis.jobs; | 1,550,924 |
@Test
public void javabeanTesterFollowedFileInfo() {
JavaBeanTester.builder(FollowedFileInfoController.class)
.skip("applicationContext", "supportedMethods").test();
} | void function() { JavaBeanTester.builder(FollowedFileInfoController.class) .skip(STR, STR).test(); } | /**
* Javabean tester followed file info.
*/ | Javabean tester followed file info | javabeanTesterFollowedFileInfo | {
"repo_name": "dougwm/psi-probe",
"path": "core/src/test/java/psiprobe/controllers/logs/LogHandlerControllerTest.java",
"license": "gpl-2.0",
"size": 1738
} | [
"com.codebox.bean.JavaBeanTester"
] | import com.codebox.bean.JavaBeanTester; | import com.codebox.bean.*; | [
"com.codebox.bean"
] | com.codebox.bean; | 534,861 |
public static TrackSampleTable parseStbl(Track track, Atom.ContainerAtom stblAtom)
throws ParserException {
// Array of sample sizes.
ParsableByteArray stsz = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz).data;
// Entries are byte offsets of chunks.
boolean chunkOffsetsAreLongs = false;
Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco);
if (chunkOffsetsAtom == null) {
chunkOffsetsAreLongs = true;
chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_co64);
}
ParsableByteArray chunkOffsets = chunkOffsetsAtom.data;
// Entries are (chunk number, number of samples per chunk, sample description index).
ParsableByteArray stsc = stblAtom.getLeafAtomOfType(Atom.TYPE_stsc).data;
// Entries are (number of samples, timestamp delta between those samples).
ParsableByteArray stts = stblAtom.getLeafAtomOfType(Atom.TYPE_stts).data;
// Entries are the indices of samples that are synchronization samples.
Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss);
ParsableByteArray stss = stssAtom != null ? stssAtom.data : null;
// Entries are (number of samples, timestamp offset).
Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts);
ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null;
// Skip full atom.
stsz.setPosition(Atom.FULL_HEADER_SIZE);
int fixedSampleSize = stsz.readUnsignedIntToInt();
int sampleCount = stsz.readUnsignedIntToInt();
if (sampleCount == 0) {
return new TrackSampleTable(new long[0], new int[0], 0, new long[0], new int[0]);
}
// Prepare to read chunk information.
ChunkIterator chunkIterator = new ChunkIterator(stsc, chunkOffsets, chunkOffsetsAreLongs);
// Prepare to read sample timestamps.
stts.setPosition(Atom.FULL_HEADER_SIZE);
int remainingTimestampDeltaChanges = stts.readUnsignedIntToInt() - 1;
int remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
int timestampDeltaInTimeUnits = stts.readUnsignedIntToInt();
// Prepare to read sample timestamp offsets, if ctts is present.
int remainingSamplesAtTimestampOffset = 0;
int remainingTimestampOffsetChanges = 0;
int timestampOffset = 0;
if (ctts != null) {
ctts.setPosition(Atom.FULL_HEADER_SIZE);
remainingTimestampOffsetChanges = ctts.readUnsignedIntToInt();
}
int nextSynchronizationSampleIndex = -1;
int remainingSynchronizationSamples = 0;
if (stss != null) {
stss.setPosition(Atom.FULL_HEADER_SIZE);
remainingSynchronizationSamples = stss.readUnsignedIntToInt();
nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1;
}
// True if we can rechunk fixed-sample-size data. Note that we only rechunk raw audio.
boolean isRechunkable =
fixedSampleSize != 0
&& MimeTypes.AUDIO_RAW.equals(track.mediaFormat.mimeType)
&& remainingTimestampDeltaChanges == 0
&& remainingTimestampOffsetChanges == 0
&& remainingSynchronizationSamples == 0;
long[] offsets;
int[] sizes;
int maximumSize = 0;
long[] timestamps;
int[] flags;
if (!isRechunkable) {
offsets = new long[sampleCount];
sizes = new int[sampleCount];
timestamps = new long[sampleCount];
flags = new int[sampleCount];
long timestampTimeUnits = 0;
long offset = 0;
int remainingSamplesInChunk = 0;
for (int i = 0; i < sampleCount; i++) {
// Advance to the next chunk if necessary.
while (remainingSamplesInChunk == 0) {
Assertions.checkState(chunkIterator.moveNext());
offset = chunkIterator.offset;
remainingSamplesInChunk = chunkIterator.numSamples;
}
// Add on the timestamp offset if ctts is present.
if (ctts != null) {
while (remainingSamplesAtTimestampOffset == 0 && remainingTimestampOffsetChanges > 0) {
remainingSamplesAtTimestampOffset = ctts.readUnsignedIntToInt();
// The BMFF spec (ISO 14496-12) states that sample offsets should be unsigned integers
// in version 0 ctts boxes, however some streams violate the spec and use signed
// integers instead. It's safe to always parse sample offsets as signed integers here,
// because unsigned integers will still be parsed correctly (unless their top bit is
// set, which is never true in practice because sample offsets are always small).
timestampOffset = ctts.readInt();
remainingTimestampOffsetChanges--;
}
remainingSamplesAtTimestampOffset--;
}
offsets[i] = offset;
sizes[i] = fixedSampleSize == 0 ? stsz.readUnsignedIntToInt() : fixedSampleSize;
if (sizes[i] > maximumSize) {
maximumSize = sizes[i];
}
timestamps[i] = timestampTimeUnits + timestampOffset;
// All samples are synchronization samples if the stss is not present.
flags[i] = stss == null ? C.SAMPLE_FLAG_SYNC : 0;
if (i == nextSynchronizationSampleIndex) {
flags[i] = C.SAMPLE_FLAG_SYNC;
remainingSynchronizationSamples--;
if (remainingSynchronizationSamples > 0) {
nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1;
}
}
// Add on the duration of this sample.
timestampTimeUnits += timestampDeltaInTimeUnits;
remainingSamplesAtTimestampDelta--;
if (remainingSamplesAtTimestampDelta == 0 && remainingTimestampDeltaChanges > 0) {
remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt();
timestampDeltaInTimeUnits = stts.readUnsignedIntToInt();
remainingTimestampDeltaChanges--;
}
offset += sizes[i];
remainingSamplesInChunk--;
}
// Check all the expected samples have been seen.
Assertions.checkArgument(remainingSynchronizationSamples == 0);
Assertions.checkArgument(remainingSamplesAtTimestampDelta == 0);
Assertions.checkArgument(remainingSamplesInChunk == 0);
Assertions.checkArgument(remainingTimestampDeltaChanges == 0);
Assertions.checkArgument(remainingTimestampOffsetChanges == 0);
} else {
long[] chunkOffsetsBytes = new long[chunkIterator.length];
int[] chunkSampleCounts = new int[chunkIterator.length];
while (chunkIterator.moveNext()) {
chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset;
chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples;
}
FixedSampleSizeRechunker.Results rechunkedResults = FixedSampleSizeRechunker.rechunk(
fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits);
offsets = rechunkedResults.offsets;
sizes = rechunkedResults.sizes;
maximumSize = rechunkedResults.maximumSize;
timestamps = rechunkedResults.timestamps;
flags = rechunkedResults.flags;
}
if (track.editListDurations == null) {
Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale);
return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags);
}
// See the BMFF spec (ISO 14496-12) subsection 8.6.6. Edit lists that truncate audio and
// require prerolling from a sync sample after reordering are not supported. This
// implementation handles simple discarding/delaying of samples. The extractor may place
// further restrictions on what edited streams are playable.
if (track.editListDurations.length == 1 && track.editListDurations[0] == 0) {
// The current version of the spec leaves handling of an edit with zero segment_duration in
// unfragmented files open to interpretation. We handle this as a special case and include all
// samples in the edit.
for (int i = 0; i < timestamps.length; i++) {
timestamps[i] = Util.scaleLargeTimestamp(timestamps[i] - track.editListMediaTimes[0],
C.MICROS_PER_SECOND, track.timescale);
}
return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags);
}
// Count the number of samples after applying edits.
int editedSampleCount = 0;
int nextSampleIndex = 0;
boolean copyMetadata = false;
for (int i = 0; i < track.editListDurations.length; i++) {
long mediaTime = track.editListMediaTimes[i];
if (mediaTime != -1) {
long duration = Util.scaleLargeTimestamp(track.editListDurations[i], track.timescale,
track.movieTimescale);
int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true);
int endIndex = Util.binarySearchCeil(timestamps, mediaTime + duration, true, false);
editedSampleCount += endIndex - startIndex;
copyMetadata |= nextSampleIndex != startIndex;
nextSampleIndex = endIndex;
}
}
copyMetadata |= editedSampleCount != sampleCount;
// Calculate edited sample timestamps and update the corresponding metadata arrays.
long[] editedOffsets = copyMetadata ? new long[editedSampleCount] : offsets;
int[] editedSizes = copyMetadata ? new int[editedSampleCount] : sizes;
int editedMaximumSize = copyMetadata ? 0 : maximumSize;
int[] editedFlags = copyMetadata ? new int[editedSampleCount] : flags;
long[] editedTimestamps = new long[editedSampleCount];
long pts = 0;
int sampleIndex = 0;
for (int i = 0; i < track.editListDurations.length; i++) {
long mediaTime = track.editListMediaTimes[i];
long duration = track.editListDurations[i];
if (mediaTime != -1) {
long endMediaTime = mediaTime + Util.scaleLargeTimestamp(duration, track.timescale,
track.movieTimescale);
int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true);
int endIndex = Util.binarySearchCeil(timestamps, endMediaTime, true, false);
if (copyMetadata) {
int count = endIndex - startIndex;
System.arraycopy(offsets, startIndex, editedOffsets, sampleIndex, count);
System.arraycopy(sizes, startIndex, editedSizes, sampleIndex, count);
System.arraycopy(flags, startIndex, editedFlags, sampleIndex, count);
}
for (int j = startIndex; j < endIndex; j++) {
long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale);
long timeInSegmentUs = Util.scaleLargeTimestamp(timestamps[j] - mediaTime,
C.MICROS_PER_SECOND, track.timescale);
editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs;
if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) {
editedMaximumSize = sizes[j];
}
sampleIndex++;
}
}
pts += duration;
}
boolean hasSyncSample = false;
for (int i = 0; i < editedFlags.length && !hasSyncSample; i++) {
hasSyncSample |= (editedFlags[i] & C.SAMPLE_FLAG_SYNC) != 0;
}
if (!hasSyncSample) {
throw new ParserException("The edited sample sequence does not contain a sync sample.");
}
return new TrackSampleTable(editedOffsets, editedSizes, editedMaximumSize, editedTimestamps,
editedFlags);
} | static TrackSampleTable function(Track track, Atom.ContainerAtom stblAtom) throws ParserException { ParsableByteArray stsz = stblAtom.getLeafAtomOfType(Atom.TYPE_stsz).data; boolean chunkOffsetsAreLongs = false; Atom.LeafAtom chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stco); if (chunkOffsetsAtom == null) { chunkOffsetsAreLongs = true; chunkOffsetsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_co64); } ParsableByteArray chunkOffsets = chunkOffsetsAtom.data; ParsableByteArray stsc = stblAtom.getLeafAtomOfType(Atom.TYPE_stsc).data; ParsableByteArray stts = stblAtom.getLeafAtomOfType(Atom.TYPE_stts).data; Atom.LeafAtom stssAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_stss); ParsableByteArray stss = stssAtom != null ? stssAtom.data : null; Atom.LeafAtom cttsAtom = stblAtom.getLeafAtomOfType(Atom.TYPE_ctts); ParsableByteArray ctts = cttsAtom != null ? cttsAtom.data : null; stsz.setPosition(Atom.FULL_HEADER_SIZE); int fixedSampleSize = stsz.readUnsignedIntToInt(); int sampleCount = stsz.readUnsignedIntToInt(); if (sampleCount == 0) { return new TrackSampleTable(new long[0], new int[0], 0, new long[0], new int[0]); } ChunkIterator chunkIterator = new ChunkIterator(stsc, chunkOffsets, chunkOffsetsAreLongs); stts.setPosition(Atom.FULL_HEADER_SIZE); int remainingTimestampDeltaChanges = stts.readUnsignedIntToInt() - 1; int remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt(); int timestampDeltaInTimeUnits = stts.readUnsignedIntToInt(); int remainingSamplesAtTimestampOffset = 0; int remainingTimestampOffsetChanges = 0; int timestampOffset = 0; if (ctts != null) { ctts.setPosition(Atom.FULL_HEADER_SIZE); remainingTimestampOffsetChanges = ctts.readUnsignedIntToInt(); } int nextSynchronizationSampleIndex = -1; int remainingSynchronizationSamples = 0; if (stss != null) { stss.setPosition(Atom.FULL_HEADER_SIZE); remainingSynchronizationSamples = stss.readUnsignedIntToInt(); nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1; } boolean isRechunkable = fixedSampleSize != 0 && MimeTypes.AUDIO_RAW.equals(track.mediaFormat.mimeType) && remainingTimestampDeltaChanges == 0 && remainingTimestampOffsetChanges == 0 && remainingSynchronizationSamples == 0; long[] offsets; int[] sizes; int maximumSize = 0; long[] timestamps; int[] flags; if (!isRechunkable) { offsets = new long[sampleCount]; sizes = new int[sampleCount]; timestamps = new long[sampleCount]; flags = new int[sampleCount]; long timestampTimeUnits = 0; long offset = 0; int remainingSamplesInChunk = 0; for (int i = 0; i < sampleCount; i++) { while (remainingSamplesInChunk == 0) { Assertions.checkState(chunkIterator.moveNext()); offset = chunkIterator.offset; remainingSamplesInChunk = chunkIterator.numSamples; } if (ctts != null) { while (remainingSamplesAtTimestampOffset == 0 && remainingTimestampOffsetChanges > 0) { remainingSamplesAtTimestampOffset = ctts.readUnsignedIntToInt(); timestampOffset = ctts.readInt(); remainingTimestampOffsetChanges--; } remainingSamplesAtTimestampOffset--; } offsets[i] = offset; sizes[i] = fixedSampleSize == 0 ? stsz.readUnsignedIntToInt() : fixedSampleSize; if (sizes[i] > maximumSize) { maximumSize = sizes[i]; } timestamps[i] = timestampTimeUnits + timestampOffset; flags[i] = stss == null ? C.SAMPLE_FLAG_SYNC : 0; if (i == nextSynchronizationSampleIndex) { flags[i] = C.SAMPLE_FLAG_SYNC; remainingSynchronizationSamples--; if (remainingSynchronizationSamples > 0) { nextSynchronizationSampleIndex = stss.readUnsignedIntToInt() - 1; } } timestampTimeUnits += timestampDeltaInTimeUnits; remainingSamplesAtTimestampDelta--; if (remainingSamplesAtTimestampDelta == 0 && remainingTimestampDeltaChanges > 0) { remainingSamplesAtTimestampDelta = stts.readUnsignedIntToInt(); timestampDeltaInTimeUnits = stts.readUnsignedIntToInt(); remainingTimestampDeltaChanges--; } offset += sizes[i]; remainingSamplesInChunk--; } Assertions.checkArgument(remainingSynchronizationSamples == 0); Assertions.checkArgument(remainingSamplesAtTimestampDelta == 0); Assertions.checkArgument(remainingSamplesInChunk == 0); Assertions.checkArgument(remainingTimestampDeltaChanges == 0); Assertions.checkArgument(remainingTimestampOffsetChanges == 0); } else { long[] chunkOffsetsBytes = new long[chunkIterator.length]; int[] chunkSampleCounts = new int[chunkIterator.length]; while (chunkIterator.moveNext()) { chunkOffsetsBytes[chunkIterator.index] = chunkIterator.offset; chunkSampleCounts[chunkIterator.index] = chunkIterator.numSamples; } FixedSampleSizeRechunker.Results rechunkedResults = FixedSampleSizeRechunker.rechunk( fixedSampleSize, chunkOffsetsBytes, chunkSampleCounts, timestampDeltaInTimeUnits); offsets = rechunkedResults.offsets; sizes = rechunkedResults.sizes; maximumSize = rechunkedResults.maximumSize; timestamps = rechunkedResults.timestamps; flags = rechunkedResults.flags; } if (track.editListDurations == null) { Util.scaleLargeTimestampsInPlace(timestamps, C.MICROS_PER_SECOND, track.timescale); return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags); } if (track.editListDurations.length == 1 && track.editListDurations[0] == 0) { for (int i = 0; i < timestamps.length; i++) { timestamps[i] = Util.scaleLargeTimestamp(timestamps[i] - track.editListMediaTimes[0], C.MICROS_PER_SECOND, track.timescale); } return new TrackSampleTable(offsets, sizes, maximumSize, timestamps, flags); } int editedSampleCount = 0; int nextSampleIndex = 0; boolean copyMetadata = false; for (int i = 0; i < track.editListDurations.length; i++) { long mediaTime = track.editListMediaTimes[i]; if (mediaTime != -1) { long duration = Util.scaleLargeTimestamp(track.editListDurations[i], track.timescale, track.movieTimescale); int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true); int endIndex = Util.binarySearchCeil(timestamps, mediaTime + duration, true, false); editedSampleCount += endIndex - startIndex; copyMetadata = nextSampleIndex != startIndex; nextSampleIndex = endIndex; } } copyMetadata = editedSampleCount != sampleCount; long[] editedOffsets = copyMetadata ? new long[editedSampleCount] : offsets; int[] editedSizes = copyMetadata ? new int[editedSampleCount] : sizes; int editedMaximumSize = copyMetadata ? 0 : maximumSize; int[] editedFlags = copyMetadata ? new int[editedSampleCount] : flags; long[] editedTimestamps = new long[editedSampleCount]; long pts = 0; int sampleIndex = 0; for (int i = 0; i < track.editListDurations.length; i++) { long mediaTime = track.editListMediaTimes[i]; long duration = track.editListDurations[i]; if (mediaTime != -1) { long endMediaTime = mediaTime + Util.scaleLargeTimestamp(duration, track.timescale, track.movieTimescale); int startIndex = Util.binarySearchCeil(timestamps, mediaTime, true, true); int endIndex = Util.binarySearchCeil(timestamps, endMediaTime, true, false); if (copyMetadata) { int count = endIndex - startIndex; System.arraycopy(offsets, startIndex, editedOffsets, sampleIndex, count); System.arraycopy(sizes, startIndex, editedSizes, sampleIndex, count); System.arraycopy(flags, startIndex, editedFlags, sampleIndex, count); } for (int j = startIndex; j < endIndex; j++) { long ptsUs = Util.scaleLargeTimestamp(pts, C.MICROS_PER_SECOND, track.movieTimescale); long timeInSegmentUs = Util.scaleLargeTimestamp(timestamps[j] - mediaTime, C.MICROS_PER_SECOND, track.timescale); editedTimestamps[sampleIndex] = ptsUs + timeInSegmentUs; if (copyMetadata && editedSizes[sampleIndex] > editedMaximumSize) { editedMaximumSize = sizes[j]; } sampleIndex++; } } pts += duration; } boolean hasSyncSample = false; for (int i = 0; i < editedFlags.length && !hasSyncSample; i++) { hasSyncSample = (editedFlags[i] & C.SAMPLE_FLAG_SYNC) != 0; } if (!hasSyncSample) { throw new ParserException(STR); } return new TrackSampleTable(editedOffsets, editedSizes, editedMaximumSize, editedTimestamps, editedFlags); } | /**
* Parses an stbl atom (defined in 14496-12).
*
* @param track Track to which this sample table corresponds.
* @param stblAtom stbl (sample table) atom to parse.
* @return Sample table described by the stbl atom.
* @throws ParserException If the resulting sample sequence does not contain a sync sample.
*/ | Parses an stbl atom (defined in 14496-12) | parseStbl | {
"repo_name": "pittenga/ExoPlayer",
"path": "library/src/main/java/com/google/android/exoplayer/extractor/mp4/AtomParsers.java",
"license": "apache-2.0",
"size": 46387
} | [
"com.google.android.exoplayer.ParserException",
"com.google.android.exoplayer.util.Assertions",
"com.google.android.exoplayer.util.MimeTypes",
"com.google.android.exoplayer.util.ParsableByteArray",
"com.google.android.exoplayer.util.Util"
] | import com.google.android.exoplayer.ParserException; import com.google.android.exoplayer.util.Assertions; import com.google.android.exoplayer.util.MimeTypes; import com.google.android.exoplayer.util.ParsableByteArray; import com.google.android.exoplayer.util.Util; | import com.google.android.exoplayer.*; import com.google.android.exoplayer.util.*; | [
"com.google.android"
] | com.google.android; | 1,572,527 |
public void setCertificateValidator(final Supplier<CertificateValidator> certificateValidator) {
this.certificateValidator = certificateValidator;
} | void function(final Supplier<CertificateValidator> certificateValidator) { this.certificateValidator = certificateValidator; } | /**
* Validator for client certificates
*/ | Validator for client certificates | setCertificateValidator | {
"repo_name": "Fabryprog/camel",
"path": "components/camel-milo/src/main/java/org/apache/camel/component/milo/server/MiloServerComponent.java",
"license": "apache-2.0",
"size": 16939
} | [
"java.util.function.Supplier",
"org.eclipse.milo.opcua.stack.core.application.CertificateValidator"
] | import java.util.function.Supplier; import org.eclipse.milo.opcua.stack.core.application.CertificateValidator; | import java.util.function.*; import org.eclipse.milo.opcua.stack.core.application.*; | [
"java.util",
"org.eclipse.milo"
] | java.util; org.eclipse.milo; | 2,853,104 |
public static ServerInfo getServerInfo(final RpcController controller,
final AdminService.BlockingInterface admin)
throws IOException {
GetServerInfoRequest request = buildGetServerInfoRequest();
try {
GetServerInfoResponse response = admin.getServerInfo(controller, request);
return response.getServerInfo();
} catch (ServiceException se) {
throw getRemoteException(se);
}
}
private static GetServerInfoRequest GET_SERVER_INFO_REQUEST =
GetServerInfoRequest.newBuilder().build(); | static ServerInfo function(final RpcController controller, final AdminService.BlockingInterface admin) throws IOException { GetServerInfoRequest request = buildGetServerInfoRequest(); try { GetServerInfoResponse response = admin.getServerInfo(controller, request); return response.getServerInfo(); } catch (ServiceException se) { throw getRemoteException(se); } } private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = GetServerInfoRequest.newBuilder().build(); | /**
* A helper to get the info of a region server using admin protocol.
* @return the server name
*/ | A helper to get the info of a region server using admin protocol | getServerInfo | {
"repo_name": "HubSpot/hbase",
"path": "hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java",
"license": "apache-2.0",
"size": 71418
} | [
"com.google.protobuf.RpcController",
"com.google.protobuf.ServiceException",
"java.io.IOException",
"org.apache.hadoop.hbase.protobuf.generated.AdminProtos"
] | import com.google.protobuf.RpcController; import com.google.protobuf.ServiceException; import java.io.IOException; import org.apache.hadoop.hbase.protobuf.generated.AdminProtos; | import com.google.protobuf.*; import java.io.*; import org.apache.hadoop.hbase.protobuf.generated.*; | [
"com.google.protobuf",
"java.io",
"org.apache.hadoop"
] | com.google.protobuf; java.io; org.apache.hadoop; | 2,296,941 |
@Test
public void testAcquireTokenValidateAuthorityReturnsInValid() throws InterruptedException, IOException {
final String successfulDRSProbe =
"{\n"
+
" \"DeviceRegistrationService\": {\n"
+
" \"RegistrationEndpoint\": \"https://fs.lindft6.com/EnrollmentServer/DeviceEnrollmentWebService.svc\",\n"
+
" \"RegistrationResourceId\": \"urn:ms-drs:24E21DCF-106E-4FCE-821E-7204CD49A809\",\n"
+
" \"ServiceVersion\": \"1.0\"\n"
+
" },\n"
+
" \"AuthenticationService\": {\n"
+
" \"OAuth2\": {\n"
+
" \"AuthCodeEndpoint\": \"https://fs.lindft6.com/adfs/oauth2/authorize\",\n"
+
" \"TokenEndpoint\": \"https://fs.lindft6.com/adfs/oauth2/token\"\n"
+
" }\n"
+
" },\n"
+
" \"IdentityProviderService\": {\n"
+
" \"PassiveAuthEndpoint\": \"https://fs.lindft6.com/adfs/ls\"\n"
+
" }\n"
+
"}";
final HttpURLConnection mockedConnection = Mockito.mock(HttpURLConnection.class);
Util.prepareMockedUrlConnection(mockedConnection);
Mockito.when(
mockedConnection.getInputStream()
).thenReturn(
Util.createInputStream(successfulDRSProbe)
).thenReturn(
Util.createInputStream("")
);
Mockito.when(
mockedConnection.getResponseCode()
).thenReturn(
HttpURLConnection.HTTP_OK
).thenReturn(
HttpURLConnection.HTTP_NOT_FOUND
);
final FileMockContext mockContext = new FileMockContext(InstrumentationRegistry.getContext());
final String invalidAuthority = "https://fs.lindft6.com/adfs";
final AuthenticationContext context = new AuthenticationContext(mockContext,
invalidAuthority, true);
final MockActivity testActivity = new MockActivity();
final CountDownLatch signal = new CountDownLatch(1);
MockAuthenticationCallback callback = new MockAuthenticationCallback(signal);
context.acquireToken(testActivity.getTestActivity(), "resource", "clientid", "redirectUri", "[email protected]",
callback);
signal.await(CONTEXT_REQUEST_TIME_OUT, TimeUnit.MILLISECONDS);
// Check response in callback result
assertNotNull("Error is not null", callback.getException());
assertEquals("NOT_VALID_URL", ADALError.DEVELOPER_AUTHORITY_IS_NOT_VALID_INSTANCE,
((AuthenticationException) callback.getException()).getCode());
assertTrue(
"Activity was not attempted to start with request code",
AuthenticationConstants.UIRequest.BROWSER_FLOW != testActivity.mStartActivityRequestCode);
// Sync test
try {
context.acquireTokenSilentSync("resource", "clientid", "[email protected]");
Assert.fail("Validation should throw");
} catch (AuthenticationException exc) {
// AD FS cannot be validated in silent sync calls because no UPN is available
assertEquals("NOT_VALID_URL", ADALError.DEVELOPER_AUTHORITY_CAN_NOT_BE_VALIDED,
exc.getCode());
}
clearCache(context);
} | void function() throws InterruptedException, IOException { final String successfulDRSProbe = "{\n" + STRDeviceRegistrationService\STR + STRRegistrationEndpoint\STRhttps: + STRRegistrationResourceId\STRurn:ms-drs:24E21DCF-106E-4FCE-821E-7204CD49A809\",\n" + STRServiceVersion\STR1.0\"\n" + STR + STRAuthenticationService\STR + STROAuth2\STR + STRAuthCodeEndpoint\STRhttps: + STRTokenEndpoint\STRhttps: + STR + STR + STRIdentityProviderService\STR + STRPassiveAuthEndpoint\STRhttps: + STR + "}"; final HttpURLConnection mockedConnection = Mockito.mock(HttpURLConnection.class); Util.prepareMockedUrlConnection(mockedConnection); Mockito.when( mockedConnection.getInputStream() ).thenReturn( Util.createInputStream(successfulDRSProbe) ).thenReturn( Util.createInputStream(STRhttps: final AuthenticationContext context = new AuthenticationContext(mockContext, invalidAuthority, true); final MockActivity testActivity = new MockActivity(); final CountDownLatch signal = new CountDownLatch(1); MockAuthenticationCallback callback = new MockAuthenticationCallback(signal); context.acquireToken(testActivity.getTestActivity(), STR, STR, STR, STR, callback); signal.await(CONTEXT_REQUEST_TIME_OUT, TimeUnit.MILLISECONDS); assertNotNull(STR, callback.getException()); assertEquals(STR, ADALError.DEVELOPER_AUTHORITY_IS_NOT_VALID_INSTANCE, ((AuthenticationException) callback.getException()).getCode()); assertTrue( STR, AuthenticationConstants.UIRequest.BROWSER_FLOW != testActivity.mStartActivityRequestCode); try { context.acquireTokenSilentSync(STR, STR, STR); Assert.fail(STR); } catch (AuthenticationException exc) { assertEquals(STR, ADALError.DEVELOPER_AUTHORITY_CAN_NOT_BE_VALIDED, exc.getCode()); } clearCache(context); } | /**
* Invalid authority returns
*
* @throws InterruptedException
*/ | Invalid authority returns | testAcquireTokenValidateAuthorityReturnsInValid | {
"repo_name": "iambmelt/azure-activedirectory-library-for-android",
"path": "adal/src/androidTest/java/com/microsoft/aad/adal/AuthenticationContextTest.java",
"license": "apache-2.0",
"size": 147982
} | [
"java.io.IOException",
"java.net.HttpURLConnection",
"java.util.concurrent.CountDownLatch",
"java.util.concurrent.TimeUnit",
"junit.framework.Assert",
"org.junit.Assert",
"org.mockito.Mockito"
] | import java.io.IOException; import java.net.HttpURLConnection; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import junit.framework.Assert; import org.junit.Assert; import org.mockito.Mockito; | import java.io.*; import java.net.*; import java.util.concurrent.*; import junit.framework.*; import org.junit.*; import org.mockito.*; | [
"java.io",
"java.net",
"java.util",
"junit.framework",
"org.junit",
"org.mockito"
] | java.io; java.net; java.util; junit.framework; org.junit; org.mockito; | 462,114 |
public void setPlayer( Player player ) {
this.player = player;
} | void function( Player player ) { this.player = player; } | /**
* Sets the player.
* @param player
*/ | Sets the player | setPlayer | {
"repo_name": "Salaboy/drools-game-engine",
"path": "drools-game-engine-core-api/src/main/java/org/drools/game/core/api/BaseCommand.java",
"license": "apache-2.0",
"size": 1291
} | [
"org.drools.game.model.api.Player"
] | import org.drools.game.model.api.Player; | import org.drools.game.model.api.*; | [
"org.drools.game"
] | org.drools.game; | 214,002 |
protected void updateExpendedAmount(String debitCreditCode, SufficientFundBalances bal, KualiDecimal amount) {
if (KFSConstants.GL_CREDIT_CODE.equals(debitCreditCode)) {
bal.setAccountActualExpenditureAmt(bal.getAccountActualExpenditureAmt().subtract(amount));
} else if (KFSConstants.GL_DEBIT_CODE.equals(debitCreditCode) || KFSConstants.GL_BUDGET_CODE.equals(debitCreditCode)) {
bal.setAccountActualExpenditureAmt(bal.getAccountActualExpenditureAmt().add(amount));
}
} | void function(String debitCreditCode, SufficientFundBalances bal, KualiDecimal amount) { if (KFSConstants.GL_CREDIT_CODE.equals(debitCreditCode)) { bal.setAccountActualExpenditureAmt(bal.getAccountActualExpenditureAmt().subtract(amount)); } else if (KFSConstants.GL_DEBIT_CODE.equals(debitCreditCode) KFSConstants.GL_BUDGET_CODE.equals(debitCreditCode)) { bal.setAccountActualExpenditureAmt(bal.getAccountActualExpenditureAmt().add(amount)); } } | /**
* Updates the expenditure amount of a given sufficient funds balance record
*
* @param debitCreditCode whether the the amount should be debited or credited to the SF balance
* @param bal a sufficient funds balance to update
* @param amount the amount to debit or credit
*/ | Updates the expenditure amount of a given sufficient funds balance record | updateExpendedAmount | {
"repo_name": "quikkian-ua-devops/will-financials",
"path": "kfs-core/src/main/java/org/kuali/kfs/gl/batch/service/impl/PostSufficientFundBalances.java",
"license": "agpl-3.0",
"size": 13249
} | [
"org.kuali.kfs.gl.businessobject.SufficientFundBalances",
"org.kuali.kfs.sys.KFSConstants",
"org.kuali.rice.core.api.util.type.KualiDecimal"
] | import org.kuali.kfs.gl.businessobject.SufficientFundBalances; import org.kuali.kfs.sys.KFSConstants; import org.kuali.rice.core.api.util.type.KualiDecimal; | import org.kuali.kfs.gl.businessobject.*; import org.kuali.kfs.sys.*; import org.kuali.rice.core.api.util.type.*; | [
"org.kuali.kfs",
"org.kuali.rice"
] | org.kuali.kfs; org.kuali.rice; | 463,741 |
protected void appendElementText(OdfElement ele) {
Node node = ele.getFirstChild();
while (node != null) {
if (node.getNodeType() == Node.TEXT_NODE) {
mTextBuilder.append(node.getNodeValue());
} else if (node.getNodeType() == Node.ELEMENT_NODE) {
OdfElement element = (OdfElement) node;
element.accept(this);
}
node = node.getNextSibling();
}
} | void function(OdfElement ele) { Node node = ele.getFirstChild(); while (node != null) { if (node.getNodeType() == Node.TEXT_NODE) { mTextBuilder.append(node.getNodeValue()); } else if (node.getNodeType() == Node.ELEMENT_NODE) { OdfElement element = (OdfElement) node; element.accept(this); } node = node.getNextSibling(); } } | /**
* Append the text content of this element to string buffer.
*
* @param ele
* the ODF element whose text will be appended.
*/ | Append the text content of this element to string buffer | appendElementText | {
"repo_name": "jbjonesjr/geoproponis",
"path": "external/simple-odf-0.8.1-incubating-sources/org/odftoolkit/simple/common/TextExtractor.java",
"license": "gpl-2.0",
"size": 8189
} | [
"org.odftoolkit.odfdom.pkg.OdfElement",
"org.w3c.dom.Node"
] | import org.odftoolkit.odfdom.pkg.OdfElement; import org.w3c.dom.Node; | import org.odftoolkit.odfdom.pkg.*; import org.w3c.dom.*; | [
"org.odftoolkit.odfdom",
"org.w3c.dom"
] | org.odftoolkit.odfdom; org.w3c.dom; | 267,105 |
public com.mozu.api.contracts.customer.CustomerAuthTicket refreshUserAuthTicket(String refreshToken, String responseFields) throws Exception
{
MozuClient<com.mozu.api.contracts.customer.CustomerAuthTicket> client = com.mozu.api.clients.commerce.customer.CustomerAuthTicketClient.refreshUserAuthTicketClient( refreshToken, responseFields);
client.setContext(_apiContext);
client.executeRequest();
return client.getResult();
} | com.mozu.api.contracts.customer.CustomerAuthTicket function(String refreshToken, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.customer.CustomerAuthTicket> client = com.mozu.api.clients.commerce.customer.CustomerAuthTicketClient.refreshUserAuthTicketClient( refreshToken, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); } | /**
* Refreshes an existing authentication ticket for a customer account by providing the refresh token string.
* <p><pre><code>
* CustomerAuthTicket customerauthticket = new CustomerAuthTicket();
* CustomerAuthTicket customerAuthTicket = customerauthticket.refreshUserAuthTicket( refreshToken, responseFields);
* </code></pre></p>
* @param refreshToken Alphanumeric string used for access tokens. This token refreshes access for accounts by generating a new developer or application account authentication ticket after an access token expires.
* @param responseFields Use this field to include those fields which are not included by default.
* @return com.mozu.api.contracts.customer.CustomerAuthTicket
* @see com.mozu.api.contracts.customer.CustomerAuthTicket
*/ | Refreshes an existing authentication ticket for a customer account by providing the refresh token string. <code><code> CustomerAuthTicket customerauthticket = new CustomerAuthTicket(); CustomerAuthTicket customerAuthTicket = customerauthticket.refreshUserAuthTicket( refreshToken, responseFields); </code></code> | refreshUserAuthTicket | {
"repo_name": "bhewett/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/resources/commerce/customer/CustomerAuthTicketResource.java",
"license": "mit",
"size": 13490
} | [
"com.mozu.api.MozuClient"
] | import com.mozu.api.MozuClient; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 2,135,627 |
private static void addSimpleProperty(final QueryBuilderContext ctx,
final SimplePropertyHandler propHandler) {
// get property persistence
final ResourcePropertyPersistence propPersistence =
getPropertyPersistence(ctx, propHandler);
if (propPersistence == null)
return;
// single of collection?
if (propHandler.isSingleValued())
addSingleSimpleProperty(ctx, propHandler, propPersistence);
else // collection
addBranch(ctx, createCollectionSimplePropertyBranch(ctx,
propHandler, propPersistence),
ctx.isSelected(ctx.getPropertyPath(propHandler.getName()),
propHandler));
} | static void function(final QueryBuilderContext ctx, final SimplePropertyHandler propHandler) { final ResourcePropertyPersistence propPersistence = getPropertyPersistence(ctx, propHandler); if (propPersistence == null) return; if (propHandler.isSingleValued()) addSingleSimpleProperty(ctx, propHandler, propPersistence); else addBranch(ctx, createCollectionSimplePropertyBranch(ctx, propHandler, propPersistence), ctx.isSelected(ctx.getPropertyPath(propHandler.getName()), propHandler)); } | /**
* Add simple property.
*
* @param ctx Query builder context.
* @param propHandler Property handler.
*/ | Add simple property | addSimpleProperty | {
"repo_name": "boylesoftware/thymes2",
"path": "src/main/java/org/bsworks/x2/services/persistence/impl/jdbc/QueryBuilder.java",
"license": "apache-2.0",
"size": 109833
} | [
"org.bsworks.x2.resource.ResourcePropertyPersistence",
"org.bsworks.x2.resource.SimplePropertyHandler"
] | import org.bsworks.x2.resource.ResourcePropertyPersistence; import org.bsworks.x2.resource.SimplePropertyHandler; | import org.bsworks.x2.resource.*; | [
"org.bsworks.x2"
] | org.bsworks.x2; | 1,280,993 |
private InputStream keyStoreFile() throws IOException {
File abs = new File(keyStorePath);
if (abs.exists())
return new FileInputStream(abs);
URL clsPthRes = KeystoreEncryptionSpi.class.getClassLoader().getResource(keyStorePath);
if (clsPthRes != null)
return clsPthRes.openStream();
return null;
} | InputStream function() throws IOException { File abs = new File(keyStorePath); if (abs.exists()) return new FileInputStream(abs); URL clsPthRes = KeystoreEncryptionSpi.class.getClassLoader().getResource(keyStorePath); if (clsPthRes != null) return clsPthRes.openStream(); return null; } | /**
* {@code keyStorePath} could be absolute path or path to classpath resource.
*
* @return File for {@code keyStorePath}.
*/ | keyStorePath could be absolute path or path to classpath resource | keyStoreFile | {
"repo_name": "nizhikov/ignite",
"path": "modules/core/src/main/java/org/apache/ignite/spi/encryption/keystore/KeystoreEncryptionSpi.java",
"license": "apache-2.0",
"size": 14479
} | [
"java.io.File",
"java.io.FileInputStream",
"java.io.IOException",
"java.io.InputStream"
] | import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; | import java.io.*; | [
"java.io"
] | java.io; | 1,371,163 |
public HealthResponse health() {
return call(
new GetRequest(path("health")),
HealthResponse.parser());
} | HealthResponse function() { return call( new GetRequest(path(STR)), HealthResponse.parser()); } | /**
*
* This is part of the internal API.
* This is a GET request.
* @see <a href="https://next.sonarqube.com/sonarqube/web_api/api/system/health">Further information about this action online (including a response example)</a>
* @since 6.6
*/ | This is part of the internal API. This is a GET request | health | {
"repo_name": "Godin/sonar",
"path": "sonar-ws/src/main/java/org/sonarqube/ws/client/system/SystemService.java",
"license": "lgpl-3.0",
"size": 5811
} | [
"org.sonarqube.ws.System",
"org.sonarqube.ws.client.GetRequest"
] | import org.sonarqube.ws.System; import org.sonarqube.ws.client.GetRequest; | import org.sonarqube.ws.*; import org.sonarqube.ws.client.*; | [
"org.sonarqube.ws"
] | org.sonarqube.ws; | 613,294 |
public static void removeReferencedOutsideClosure(HollowReadStateEngine stateEngine, Map<String, BitSet> matches) {
List<HollowSchema> orderedSchemas = HollowSchemaSorter.dependencyOrderedSchemaList(stateEngine);
Collections.reverse(orderedSchemas);
for(HollowSchema referencedSchema : orderedSchemas) {
if(matches.containsKey(referencedSchema.getName())) {
for(HollowSchema referencerSchema : orderedSchemas) {
if(referencerSchema == referencedSchema)
break;
if(matches.containsKey(referencedSchema.getName()) && matches.get(referencedSchema.getName()).cardinality() > 0)
traverseReferencesOutsideClosure(stateEngine, referencerSchema.getName(), referencedSchema.getName(), matches, REMOVE_REFERENCED_OUTSIDE_CLOSURE);
}
}
}
} | static void function(HollowReadStateEngine stateEngine, Map<String, BitSet> matches) { List<HollowSchema> orderedSchemas = HollowSchemaSorter.dependencyOrderedSchemaList(stateEngine); Collections.reverse(orderedSchemas); for(HollowSchema referencedSchema : orderedSchemas) { if(matches.containsKey(referencedSchema.getName())) { for(HollowSchema referencerSchema : orderedSchemas) { if(referencerSchema == referencedSchema) break; if(matches.containsKey(referencedSchema.getName()) && matches.get(referencedSchema.getName()).cardinality() > 0) traverseReferencesOutsideClosure(stateEngine, referencerSchema.getName(), referencedSchema.getName(), matches, REMOVE_REFERENCED_OUTSIDE_CLOSURE); } } } } | /**
* Remove any records from the given selection which are referenced by other records not in the selection.
* @param stateEngine the state engine
* @param matches the matches
*/ | Remove any records from the given selection which are referenced by other records not in the selection | removeReferencedOutsideClosure | {
"repo_name": "Netflix/hollow",
"path": "hollow/src/main/java/com/netflix/hollow/tools/traverse/TransitiveSetTraverser.java",
"license": "apache-2.0",
"size": 18451
} | [
"com.netflix.hollow.core.read.engine.HollowReadStateEngine",
"com.netflix.hollow.core.schema.HollowSchema",
"com.netflix.hollow.core.schema.HollowSchemaSorter",
"java.util.BitSet",
"java.util.Collections",
"java.util.List",
"java.util.Map"
] | import com.netflix.hollow.core.read.engine.HollowReadStateEngine; import com.netflix.hollow.core.schema.HollowSchema; import com.netflix.hollow.core.schema.HollowSchemaSorter; import java.util.BitSet; import java.util.Collections; import java.util.List; import java.util.Map; | import com.netflix.hollow.core.read.engine.*; import com.netflix.hollow.core.schema.*; import java.util.*; | [
"com.netflix.hollow",
"java.util"
] | com.netflix.hollow; java.util; | 2,029,080 |
private Component initComponent(String name, boolean autoCreateComponents) {
Component component = null;
if (autoCreateComponents) {
try {
if (log.isDebugEnabled()) {
log.debug("Using ComponentResolver: {} to resolve component with name: {}", getComponentResolver(), name);
}
// Mark the component as being created so we can detect circular
// requests.
//
// In spring apps, the component resolver may trigger a new getComponent
// because of the underlying bean factory and as the endpoints are
// registered as singleton, the spring factory creates the bean
// and then check the type so the getComponent is always triggered.
//
// Simple circular dependency:
//
// <camelContext id="camel" xmlns="http://camel.apache.org/schema/spring">
// <route>
// <from id="twitter" uri="twitter://timeline/home?type=polling"/>
// <log message="Got ${body}"/>
// </route>
// </camelContext>
//
// Complex circular dependency:
//
// <camelContext id="camel" xmlns="http://camel.apache.org/schema/spring">
// <route>
// <from id="log" uri="seda:test"/>
// <to id="seda" uri="log:test"/>
// </route>
// </camelContext>
//
// This would freeze the app (lock or infinite loop).
//
// See https://issues.apache.org/jira/browse/CAMEL-11225
componentsInCreation.get().add(name);
component = getComponentResolver().resolveComponent(name, this);
if (component != null) {
component.setCamelContext(this);
postInitComponent(name, component);
}
} catch (Exception e) {
throw new RuntimeCamelException("Cannot auto create component: " + name, e);
}
}
return component;
} | Component function(String name, boolean autoCreateComponents) { Component component = null; if (autoCreateComponents) { try { if (log.isDebugEnabled()) { log.debug(STR, getComponentResolver(), name); } componentsInCreation.get().add(name); component = getComponentResolver().resolveComponent(name, this); if (component != null) { component.setCamelContext(this); postInitComponent(name, component); } } catch (Exception e) { throw new RuntimeCamelException(STR + name, e); } } return component; } | /**
* Function to initialize a component and auto start. Returns null if the autoCreateComponents is disabled
*/ | Function to initialize a component and auto start. Returns null if the autoCreateComponents is disabled | initComponent | {
"repo_name": "punkhorn/camel-upstream",
"path": "core/camel-core/src/main/java/org/apache/camel/impl/AbstractCamelContext.java",
"license": "apache-2.0",
"size": 188124
} | [
"org.apache.camel.Component",
"org.apache.camel.RuntimeCamelException"
] | import org.apache.camel.Component; import org.apache.camel.RuntimeCamelException; | import org.apache.camel.*; | [
"org.apache.camel"
] | org.apache.camel; | 72,464 |
private void createXMLDoc (Document doc) {
int loc;
String kwd;
String type;
String val;
Element item;
Element elem;
Element query = doc.getDocumentElement();
// create query header (object attributes)
query.appendChild(queryHeader.toXML(doc));
// create query attributes
XML.add(query, "queryResultModeId", resultModeId);
XML.add(query, "queryPropogationType", propogationType);
XML.add(query, "queryPropogationLevels", propogationLevels);
XML.add(query, "queryMimeAccept", mimeAccept);
XML.add(query, "queryMaxResults", String.valueOf(maxResults));
XML.add(query, "queryResults", String.valueOf(numResults));
XML.add(query, "queryKWQString", kwqString);
// create and load queryStatistics
elem = doc.createElement("queryStatistics");
query.appendChild(elem);
for (Iterator i = statistics.iterator(); i.hasNext();) {
Statistic s = (Statistic) i.next();
elem.appendChild(s.toXML(doc));
}
// create and load querySelectSet
elem = doc.createElement("querySelectSet");
query.appendChild(elem);
for (Iterator i = selectElementSet.iterator(); i.hasNext();) {
QueryElement queryElement = (QueryElement) i.next();
elem.appendChild(queryElement.toXML(doc));
}
// create and load queryFromSet
elem = doc.createElement("queryFromSet");
query.appendChild(elem);
for (Iterator i = fromElementSet.iterator(); i.hasNext();) {
QueryElement queryElement = (QueryElement) i.next();
elem.appendChild(queryElement.toXML(doc));
}
// create and load queryWhereSet
elem = doc.createElement("queryWhereSet");
query.appendChild(elem);
for (Iterator i = whereElementSet.iterator(); i.hasNext();) {
QueryElement queryElement = (QueryElement) i.next();
elem.appendChild(queryElement.toXML(doc));
}
query.appendChild(result.toXML(doc));
} | void function (Document doc) { int loc; String kwd; String type; String val; Element item; Element elem; Element query = doc.getDocumentElement(); query.appendChild(queryHeader.toXML(doc)); XML.add(query, STR, resultModeId); XML.add(query, STR, propogationType); XML.add(query, STR, propogationLevels); XML.add(query, STR, mimeAccept); XML.add(query, STR, String.valueOf(maxResults)); XML.add(query, STR, String.valueOf(numResults)); XML.add(query, STR, kwqString); elem = doc.createElement(STR); query.appendChild(elem); for (Iterator i = statistics.iterator(); i.hasNext();) { Statistic s = (Statistic) i.next(); elem.appendChild(s.toXML(doc)); } elem = doc.createElement(STR); query.appendChild(elem); for (Iterator i = selectElementSet.iterator(); i.hasNext();) { QueryElement queryElement = (QueryElement) i.next(); elem.appendChild(queryElement.toXML(doc)); } elem = doc.createElement(STR); query.appendChild(elem); for (Iterator i = fromElementSet.iterator(); i.hasNext();) { QueryElement queryElement = (QueryElement) i.next(); elem.appendChild(queryElement.toXML(doc)); } elem = doc.createElement(STR); query.appendChild(elem); for (Iterator i = whereElementSet.iterator(); i.hasNext();) { QueryElement queryElement = (QueryElement) i.next(); elem.appendChild(queryElement.toXML(doc)); } query.appendChild(result.toXML(doc)); } | /**
* Creates a XML DOM structure from a keyword query string.
* Set up the basic structure then calls the query parser.
*/ | Creates a XML DOM structure from a keyword query string. Set up the basic structure then calls the query parser | createXMLDoc | {
"repo_name": "OSBI/oodt",
"path": "xmlquery/src/main/java/org/apache/oodt/xmlquery/XMLQuery.java",
"license": "apache-2.0",
"size": 32092
} | [
"java.util.Iterator",
"org.apache.oodt.commons.util.XML",
"org.w3c.dom.Document",
"org.w3c.dom.Element"
] | import java.util.Iterator; import org.apache.oodt.commons.util.XML; import org.w3c.dom.Document; import org.w3c.dom.Element; | import java.util.*; import org.apache.oodt.commons.util.*; import org.w3c.dom.*; | [
"java.util",
"org.apache.oodt",
"org.w3c.dom"
] | java.util; org.apache.oodt; org.w3c.dom; | 1,358,780 |
Reader getCharacterStream(String columnLabel) throws SQLException; | Reader getCharacterStream(String columnLabel) throws SQLException; | /**
* Retrieves the value of the designated column in the current row
* of this <code>ResultSet</code> object as a
* <code>java.io.Reader</code> object.
*
* @param columnLabel the label for the column specified with the SQL AS clause. If the SQL AS clause was not specified, then the label is the name of the column
* @return a <code>java.io.Reader</code> object that contains the column
* value; if the value is SQL <code>NULL</code>, the value returned is
* <code>null</code> in the Java programming language
* @exception SQLException if the columnLabel is not valid;
* if a database access error occurs or this method is
* called on a closed result set
* @since 1.2
*/ | Retrieves the value of the designated column in the current row of this <code>ResultSet</code> object as a <code>java.io.Reader</code> object | getCharacterStream | {
"repo_name": "wangsongpeng/jdk-src",
"path": "src/main/java/java/sql/ResultSet.java",
"license": "apache-2.0",
"size": 207055
} | [
"java.io.Reader"
] | import java.io.Reader; | import java.io.*; | [
"java.io"
] | java.io; | 2,618,536 |
public static java.util.Set extractFollowUpNoteSet(ims.domain.ILightweightDomainFactory domainFactory, ims.RefMan.vo.FollowUpNoteVoCollection voCollection)
{
return extractFollowUpNoteSet(domainFactory, voCollection, null, new HashMap());
}
| static java.util.Set function(ims.domain.ILightweightDomainFactory domainFactory, ims.RefMan.vo.FollowUpNoteVoCollection voCollection) { return extractFollowUpNoteSet(domainFactory, voCollection, null, new HashMap()); } | /**
* Create the ims.RefMan.domain.objects.FollowUpNote set from the value object collection.
* @param domainFactory - used to create existing (persistent) domain objects.
* @param voCollection - the collection of value objects
*/ | Create the ims.RefMan.domain.objects.FollowUpNote set from the value object collection | extractFollowUpNoteSet | {
"repo_name": "FreudianNM/openMAXIMS",
"path": "Source Library/openmaxims_workspace/ValueObjects/src/ims/RefMan/vo/domain/FollowUpNoteVoAssembler.java",
"license": "agpl-3.0",
"size": 20747
} | [
"java.util.HashMap"
] | import java.util.HashMap; | import java.util.*; | [
"java.util"
] | java.util; | 1,650,780 |
int layoutHistoryCategory(ResearchMainCategory mcat, int column) {
List<ResearchType> prods = commons.player().productionHistory.get(mcat);
if (prods == null) {
prods = Collections.emptyList();
}
Set<ResearchType> currProds = commons.player().productionLineTypes(mcat);
if (currProds == null) {
currProds = Collections.emptySet();
}
while (historyLines.size() <= column) {
historyLines.add(new ArrayList<ProductionHistoryLine>());
}
boolean changed = false;
int w = 0;
List<ProductionHistoryLine> list = historyLines.get(column);
int i = 0;
for (ResearchType prod : prods) {
// skip running production
if (currProds != null && currProds.contains(prod)) {
continue;
}
if (prod.nobuild) {
continue;
}
ProductionHistoryLine psl;
if (list.size() > i) {
psl = list.get(i);
} else {
// add necessary new lines
psl = new ProductionHistoryLine();
list.add(psl);
add(psl);
changed = true;
}
psl.rt = prod;
w = Math.max(w, psl.update());
i++;
}
// remove unnecessary lines
for (int j = list.size() - 1; j >= i; j--) {
ProductionHistoryLine psl = list.remove(j);
this.components.remove(psl);
changed = true;
}
if (changed) {
commons.control().moveMouse();
}
return w;
}
| int layoutHistoryCategory(ResearchMainCategory mcat, int column) { List<ResearchType> prods = commons.player().productionHistory.get(mcat); if (prods == null) { prods = Collections.emptyList(); } Set<ResearchType> currProds = commons.player().productionLineTypes(mcat); if (currProds == null) { currProds = Collections.emptySet(); } while (historyLines.size() <= column) { historyLines.add(new ArrayList<ProductionHistoryLine>()); } boolean changed = false; int w = 0; List<ProductionHistoryLine> list = historyLines.get(column); int i = 0; for (ResearchType prod : prods) { if (currProds != null && currProds.contains(prod)) { continue; } if (prod.nobuild) { continue; } ProductionHistoryLine psl; if (list.size() > i) { psl = list.get(i); } else { psl = new ProductionHistoryLine(); list.add(psl); add(psl); changed = true; } psl.rt = prod; w = Math.max(w, psl.update()); i++; } for (int j = list.size() - 1; j >= i; j--) { ProductionHistoryLine psl = list.remove(j); this.components.remove(psl); changed = true; } if (changed) { commons.control().moveMouse(); } return w; } | /**
* Add/remove lines of process.
* @param mcat the main category
* @param column the column
* @return the maximum width of the column based on the contents
*/ | Add/remove lines of process | layoutHistoryCategory | {
"repo_name": "p-smith/open-ig",
"path": "src/hu/openig/screen/panels/QuickProductionPanel.java",
"license": "lgpl-3.0",
"size": 22323
} | [
"hu.openig.model.ResearchMainCategory",
"hu.openig.model.ResearchType",
"java.util.ArrayList",
"java.util.Collections",
"java.util.List",
"java.util.Set"
] | import hu.openig.model.ResearchMainCategory; import hu.openig.model.ResearchType; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; | import hu.openig.model.*; import java.util.*; | [
"hu.openig.model",
"java.util"
] | hu.openig.model; java.util; | 2,275,670 |
public String generateNamedAttributeJspFragment(Node.NamedAttribute n,
String tagHandlerVar) throws JasperException {
String varName = n.getTemporaryVariableName();
out.printin("javax.servlet.jsp.tagext.JspFragment " + varName
+ " = ");
generateJspFragment(n, tagHandlerVar);
out.println(";");
return varName;
}
} | String function(Node.NamedAttribute n, String tagHandlerVar) throws JasperException { String varName = n.getTemporaryVariableName(); out.printin(STR + varName + STR); generateJspFragment(n, tagHandlerVar); out.println(";"); return varName; } } | /**
* Similar to generateNamedAttributeValue, but create a JspFragment
* instead.
*
* @param n
* The parent node of the named attribute
* @param tagHandlerVar
* The variable the tag handler is stored in, so the fragment
* knows its parent tag.
* @return The name of the temporary variable the fragment is stored in.
*/ | Similar to generateNamedAttributeValue, but create a JspFragment instead | generateNamedAttributeJspFragment | {
"repo_name": "barreiro/jastow",
"path": "src/main/java/org/apache/jasper/compiler/Generator.java",
"license": "apache-2.0",
"size": 173664
} | [
"org.apache.jasper.JasperException",
"org.apache.jasper.compiler.Node"
] | import org.apache.jasper.JasperException; import org.apache.jasper.compiler.Node; | import org.apache.jasper.*; import org.apache.jasper.compiler.*; | [
"org.apache.jasper"
] | org.apache.jasper; | 2,718,211 |
public void createAttributeField( AttributeField attributeField )
{
if ( ( attributeField != null ) && ( attributeField.getAttribute( ) != null ) )
{
int nId = AttributeFieldHome.create( attributeField );
attributeField.setIdField( nId );
}
} | void function( AttributeField attributeField ) { if ( ( attributeField != null ) && ( attributeField.getAttribute( ) != null ) ) { int nId = AttributeFieldHome.create( attributeField ); attributeField.setIdField( nId ); } } | /**
* Create a new attribute field
*
* @param attributeField
* the attribute field
*/ | Create a new attribute field | createAttributeField | {
"repo_name": "lutece-platform/lutece-core",
"path": "src/java/fr/paris/lutece/portal/service/user/attribute/AttributeFieldService.java",
"license": "bsd-3-clause",
"size": 4098
} | [
"fr.paris.lutece.portal.business.user.attribute.AttributeField",
"fr.paris.lutece.portal.business.user.attribute.AttributeFieldHome"
] | import fr.paris.lutece.portal.business.user.attribute.AttributeField; import fr.paris.lutece.portal.business.user.attribute.AttributeFieldHome; | import fr.paris.lutece.portal.business.user.attribute.*; | [
"fr.paris.lutece"
] | fr.paris.lutece; | 1,187,757 |
public static String getLocalClassPath(ClassLoader loader)
{
for (; loader != null; loader = loader.getParent()) {
if (loader instanceof EnvironmentClassLoader) {
return ((EnvironmentClassLoader) loader).getLocalClassPath();
}
}
return CauchoSystem.getClassPath();
} | static String function(ClassLoader loader) { for (; loader != null; loader = loader.getParent()) { if (loader instanceof EnvironmentClassLoader) { return ((EnvironmentClassLoader) loader).getLocalClassPath(); } } return CauchoSystem.getClassPath(); } | /**
* Returns the classpath for the environment level.
*/ | Returns the classpath for the environment level | getLocalClassPath | {
"repo_name": "dwango/quercus",
"path": "src/main/java/com/caucho/loader/Environment.java",
"license": "gpl-2.0",
"size": 25883
} | [
"com.caucho.server.util.CauchoSystem"
] | import com.caucho.server.util.CauchoSystem; | import com.caucho.server.util.*; | [
"com.caucho.server"
] | com.caucho.server; | 2,852,987 |
private void removeListeners() {
if (myListeners.size() > 0 && myInstance != null) {
Set<Entry<Object[], Method>> listeners = myListeners.entrySet();
Method[] methods = myInstance.getClass().getMethods();
for (Entry<Object[], Method> entry : listeners) {
try {
String methodName = entry.getValue().getName().replace("add", "remove");
for (int i = 0; i < methods.length; i++) {
if (methods[i].getName().equals(methodName)
&& methods[i].getParameterTypes().length == entry.getKey().length) {
try {
methods[i].invoke(myInstance, entry.getKey());
} catch (Exception e) {
LOG.log(Level.SEVERE, "Cannot remove listener", e);
}
}
}
} catch (Exception e) {
LOG.log(Level.SEVERE, "Cannot enumerate methods to remove listeners", e);
}
}
}
}
}
private static final Logger LOG = Logger.getLogger(Settings.class.getName());
private static PluginManager myPluginManager;
private Settings() {
myProperties = new Properties();
myOverrideProperties = new Properties();
try {
File preferencesFile = getPreferencesFile();
if (preferencesFile.exists())
myProperties.loadFromXML(new FileInputStream(preferencesFile));
} catch (Exception e) {
LOG.log(Level.SEVERE, "Could not load the (existing) preferences-file", e);
e.printStackTrace();
}
}
private static Settings instance; | void function() { if (myListeners.size() > 0 && myInstance != null) { Set<Entry<Object[], Method>> listeners = myListeners.entrySet(); Method[] methods = myInstance.getClass().getMethods(); for (Entry<Object[], Method> entry : listeners) { try { String methodName = entry.getValue().getName().replace("add", STR); for (int i = 0; i < methods.length; i++) { if (methods[i].getName().equals(methodName) && methods[i].getParameterTypes().length == entry.getKey().length) { try { methods[i].invoke(myInstance, entry.getKey()); } catch (Exception e) { LOG.log(Level.SEVERE, STR, e); } } } } catch (Exception e) { LOG.log(Level.SEVERE, STR, e); } } } } } private static final Logger LOG = Logger.getLogger(Settings.class.getName()); private static PluginManager myPluginManager; private Settings() { myProperties = new Properties(); myOverrideProperties = new Properties(); try { File preferencesFile = getPreferencesFile(); if (preferencesFile.exists()) myProperties.loadFromXML(new FileInputStream(preferencesFile)); } catch (Exception e) { LOG.log(Level.SEVERE, STR, e); e.printStackTrace(); } } private static Settings instance; | /**
* The plugin to use has changed.
* Try our best to remove all known listeners from
* the old instance.
*/ | The plugin to use has changed. Try our best to remove all known listeners from the old instance | removeListeners | {
"repo_name": "xafero/travelingsales",
"path": "libosm/src/main/java/org/openstreetmap/osm/Settings.java",
"license": "gpl-3.0",
"size": 39466
} | [
"java.io.File",
"java.io.FileInputStream",
"java.lang.reflect.Method",
"java.util.Map",
"java.util.Properties",
"java.util.Set",
"java.util.logging.Level",
"java.util.logging.Logger",
"org.java.plugin.PluginManager"
] | import java.io.File; import java.io.FileInputStream; import java.lang.reflect.Method; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.logging.Level; import java.util.logging.Logger; import org.java.plugin.PluginManager; | import java.io.*; import java.lang.reflect.*; import java.util.*; import java.util.logging.*; import org.java.plugin.*; | [
"java.io",
"java.lang",
"java.util",
"org.java.plugin"
] | java.io; java.lang; java.util; org.java.plugin; | 2,315,831 |
Iterable<String> head() throws IOException; | Iterable<String> head() throws IOException; | /**
* All lines above the body.
* @return List of lines
* @throws IOException If something goes wrong
*/ | All lines above the body | head | {
"repo_name": "ekondrashev/takes",
"path": "src/main/java/org/takes/Request.java",
"license": "mit",
"size": 2243
} | [
"java.io.IOException"
] | import java.io.IOException; | import java.io.*; | [
"java.io"
] | java.io; | 974,022 |
public static CommonIEsProvideCapabilities fromPerUnaligned(byte[] encodedBytes) {
CommonIEsProvideCapabilities result = new CommonIEsProvideCapabilities();
result.decodePerUnaligned(new BitStreamReader(encodedBytes));
return result;
} | static CommonIEsProvideCapabilities function(byte[] encodedBytes) { CommonIEsProvideCapabilities result = new CommonIEsProvideCapabilities(); result.decodePerUnaligned(new BitStreamReader(encodedBytes)); return result; } | /**
* Creates a new CommonIEsProvideCapabilities from encoded stream.
*/ | Creates a new CommonIEsProvideCapabilities from encoded stream | fromPerUnaligned | {
"repo_name": "google/supl-client",
"path": "src/main/java/com/google/location/suplclient/asn1/supl2/lpp/CommonIEsProvideCapabilities.java",
"license": "apache-2.0",
"size": 4632
} | [
"com.google.location.suplclient.asn1.base.BitStreamReader"
] | import com.google.location.suplclient.asn1.base.BitStreamReader; | import com.google.location.suplclient.asn1.base.*; | [
"com.google.location"
] | com.google.location; | 2,550,932 |
@Test
public void literals() {
System.out.println("[UNITTEST] ***** TEST: Statements with literals *****");
IOntologyBackend oro = new OpenRobotsOntology(conf);
Statement tmp;
//First, create statements with valid literals
try {
tmp = oro.createStatement("oro:fish oro:isFemale true");
assertTrue("The datatype has not been recognized!", tmp.getBoolean());
tmp = oro.createStatement("oro:fish oro:isFemale true^^xsd:boolean");
assertTrue("The datatype has not been recognized!", tmp.getBoolean());
tmp = oro.createStatement("oro:fish oro:isFemale \"true\"^^xsd:boolean");
assertTrue("The datatype has not been recognized!", tmp.getBoolean());
tmp = oro.createStatement("oro:fish oro:isFemale 'true'^^xsd:boolean");
assertTrue("The datatype has not been recognized!", tmp.getBoolean());
tmp = oro.createStatement("oro:fish oro:isFemale 1^^xsd:boolean");
assertTrue("The datatype has not been recognized!", tmp.getBoolean());
tmp = oro.createStatement("oro:fish oro:age 150");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Integer.class);
tmp = oro.createStatement("oro:fish oro:age \"150\"^^xsd:int");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Integer.class);
tmp = oro.createStatement("oro:fish oro:age 150^^xsd:int");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Integer.class);
tmp = oro.createStatement("oro:fish oro:age \"150\"^^<http://www.w3.org/2001/XMLSchema#int>");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Integer.class);
tmp = oro.createStatement("oro:fish oro:age 150^^xsd:integer");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == java.math.BigInteger.class);
tmp = oro.createStatement("oro:fish oro:age 150.0");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Double.class);
tmp = oro.createStatement("oro:fish oro:age \"150\"^^xsd:double");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Double.class);
tmp = oro.createStatement("oro:fish oro:age 150^^xsd:double");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Double.class);
tmp = oro.createStatement("oro:fish oro:age \"150\"^^xsd:float");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == Float.class);
tmp = oro.createStatement("oro:fish oro:name Dudule");
assertTrue("Dudule should'nt be recognized as a string here, but as an instance.", tmp.getObject().isResource());
tmp = oro.createStatement("oro:fish oro:name 'Dudule'");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == String.class);
tmp = oro.createStatement("oro:fish oro:name \"Dudule\"");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == String.class);
tmp = oro.createStatement("oro:fish oro:name Dudule^^xsd:string");
assertTrue("The datatype has not been recognized!", tmp.getLiteral().getDatatype().getJavaClass() == String.class);
} catch (IllegalStatementException e) {
fail("Error while creating statements with valid literals!");
}
//Then, create statements with invalid literals
try {
tmp = oro.createStatement("oro:fish oro:isFemale true^^xsd:int");
fail("Statements with invalid literals have been created!");
} catch (IllegalStatementException e) {
}
try {
tmp = oro.createStatement("oro:fish oro:age 150.0^^xsd:int");
fail("Statements with invalid literals have been created!");
} catch (IllegalStatementException e) {
}
System.out.println("[UNITTEST] ***** Test successful *****");
}
| void function() { System.out.println(STR); IOntologyBackend oro = new OpenRobotsOntology(conf); Statement tmp; try { tmp = oro.createStatement(STR); assertTrue(STR, tmp.getBoolean()); tmp = oro.createStatement(STR); assertTrue(STR, tmp.getBoolean()); tmp = oro.createStatement(STRtrue\STR); assertTrue(STR, tmp.getBoolean()); tmp = oro.createStatement(STR); assertTrue(STR, tmp.getBoolean()); tmp = oro.createStatement(STR); assertTrue(STR, tmp.getBoolean()); tmp = oro.createStatement(STR); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Integer.class); tmp = oro.createStatement(STR150\STR); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Integer.class); tmp = oro.createStatement(STR); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Integer.class); tmp = oro.createStatement(STR150\"^^<http: assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Integer.class); tmp = oro.createStatement("oro:fish oro:age 150^^xsd:integer"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == java.math.BigInteger.class); tmp = oro.createStatement("oro:fish oro:age 150.0"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Double.class); tmp = oro.createStatement(STR150\"^^xsd:double"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Double.class); tmp = oro.createStatement("oro:fish oro:age 150^^xsd:double"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Double.class); tmp = oro.createStatement(STR150\"^^xsd:float"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == Float.class); tmp = oro.createStatement("oro:fish oro:name DuduleSTRDudule should'nt be recognized as a string here, but as an instance.STRoro:fish oro:name 'Dudule'"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == String.class); tmp = oro.createStatement("oro:fish oro:name \STR"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == String.class); tmp = oro.createStatement("oro:fish oro:name Dudule^^xsd:string"); assertTrue(STR, tmp.getLiteral().getDatatype().getJavaClass() == String.class); } catch (IllegalStatementException e) { fail("Error while creating statements with valid literals!STRoro:fish oro:isFemale true^^xsd:intSTRStatements with invalid literals have been created!STRoro:fish oro:age 150.0^^xsd:intSTRStatements with invalid literals have been created!STR[UNITTEST] ***** Test successful *****"); } | /**
* This test try to create statements with various types of literals.
*/ | This test try to create statements with various types of literals | literals | {
"repo_name": "severin-lemaignan/oro-server",
"path": "src/laas/openrobots/ontology/tests/OpenRobotsOntologyTest.java",
"license": "isc",
"size": 91339
} | [
"com.hp.hpl.jena.rdf.model.Statement",
"org.junit.Assert",
"org.junit.Test"
] | import com.hp.hpl.jena.rdf.model.Statement; import org.junit.Assert; import org.junit.Test; | import com.hp.hpl.jena.rdf.model.*; import org.junit.*; | [
"com.hp.hpl",
"org.junit"
] | com.hp.hpl; org.junit; | 1,756,920 |
public RuleSetBuilder<K, T> rule() {
return new RuleSetBuilder<K, T>(this, new HashMap<K, Set<Condition>>());
} | RuleSetBuilder<K, T> function() { return new RuleSetBuilder<K, T>(this, new HashMap<K, Set<Condition>>()); } | /**
* Create a new rule in the rule builder.
*
* @return A rule set builder to specify a rule for the rule map.
*/ | Create a new rule in the rule builder | rule | {
"repo_name": "defunct/winnow",
"path": "src/main/java/com/goodworkalan/winnow/RuleMapBuilder.java",
"license": "gpl-3.0",
"size": 4210
} | [
"java.util.HashMap",
"java.util.Set"
] | import java.util.HashMap; import java.util.Set; | import java.util.*; | [
"java.util"
] | java.util; | 133,107 |
default Bounds<Double> getBounds(int index) {
DoubleSolution solution = this;
Double lowerBound = solution.getLowerBound(index);
Double upperBound = solution.getUpperBound(index);
return Bounds.create(lowerBound, upperBound);
} | default Bounds<Double> getBounds(int index) { DoubleSolution solution = this; Double lowerBound = solution.getLowerBound(index); Double upperBound = solution.getUpperBound(index); return Bounds.create(lowerBound, upperBound); } | /**
* It is often the case that we use both bounds together. Searching twice the
* same index may be counter productive in this case. This methods allows to
* offer this optimization, although its default implementation just delegates
* to the separate methods.
*/ | It is often the case that we use both bounds together. Searching twice the same index may be counter productive in this case. This methods allows to offer this optimization, although its default implementation just delegates to the separate methods | getBounds | {
"repo_name": "matthieu-vergne/jMetal",
"path": "jmetal-core/src/main/java/org/uma/jmetal/solution/doublesolution/DoubleSolution.java",
"license": "mit",
"size": 1192
} | [
"org.uma.jmetal.util.bounds.Bounds"
] | import org.uma.jmetal.util.bounds.Bounds; | import org.uma.jmetal.util.bounds.*; | [
"org.uma.jmetal"
] | org.uma.jmetal; | 2,306,011 |
private void detruir() throws StopRequestException
{
checkInterruption();
List<AiTile> a= dangerZone.findDesctructibleTiles();
List<AiTile> b= findClearNeighbors(a);
List<AiTile> dst= dangerZone.findTilesForDestruct(b);
List<AiTile> dest=aligner(dst);
AiTile tile = null;
AiTile kare=null;
Iterator<AiTile> it =dest.iterator();
if (it.hasNext())
{
if(!hero.getTile().equals(targetTile))
kare=it.next();
else
kare=null;
if(kare!=null){
as=new Astar (dangerZone,hero.getCol(),hero.getLine(),kare.getCol(),kare.getLine());
if (as.findSecurePath())
{
Deque<Integer> deque = as.getPath();
findTargetTile(deque);
Deque<Integer> deq = as.getPath();
if(!deq.isEmpty()) {
int tempx = deq.poll(), tempy = deq.poll();
tile= zone.getTile(tempy,tempx );
}
if (tile ==hero.getTile()){
if (!deq.isEmpty()) {
int tempx = deq.poll(); int tempy = deq.poll();
tile= zone.getTile(tempy,tempx );
}}}
nextTile=tile;
}}
else
nextTile=currentTile;
}
| void function() throws StopRequestException { checkInterruption(); List<AiTile> a= dangerZone.findDesctructibleTiles(); List<AiTile> b= findClearNeighbors(a); List<AiTile> dst= dangerZone.findTilesForDestruct(b); List<AiTile> dest=aligner(dst); AiTile tile = null; AiTile kare=null; Iterator<AiTile> it =dest.iterator(); if (it.hasNext()) { if(!hero.getTile().equals(targetTile)) kare=it.next(); else kare=null; if(kare!=null){ as=new Astar (dangerZone,hero.getCol(),hero.getLine(),kare.getCol(),kare.getLine()); if (as.findSecurePath()) { Deque<Integer> deque = as.getPath(); findTargetTile(deque); Deque<Integer> deq = as.getPath(); if(!deq.isEmpty()) { int tempx = deq.poll(), tempy = deq.poll(); tile= zone.getTile(tempy,tempx ); } if (tile ==hero.getTile()){ if (!deq.isEmpty()) { int tempx = deq.poll(); int tempy = deq.poll(); tile= zone.getTile(tempy,tempx ); }}} nextTile=tile; }} else nextTile=currentTile; } | /*****************************************************************
* La methode qui precise la tile prochaine pour aller a une bonus
* @throws StopRequestException
* Description manquante !
*/ | La methode qui precise la tile prochaine pour aller a une bonus | detruir | {
"repo_name": "vlabatut/totalboumboum",
"path": "resources/ai/org/totalboumboum/ai/v200910/ais/bektasmazilyah/v5/BektasMazilyah.java",
"license": "gpl-2.0",
"size": 27016
} | [
"java.util.Deque",
"java.util.Iterator",
"java.util.List",
"org.totalboumboum.ai.v200910.adapter.communication.StopRequestException",
"org.totalboumboum.ai.v200910.adapter.data.AiTile"
] | import java.util.Deque; import java.util.Iterator; import java.util.List; import org.totalboumboum.ai.v200910.adapter.communication.StopRequestException; import org.totalboumboum.ai.v200910.adapter.data.AiTile; | import java.util.*; import org.totalboumboum.ai.v200910.adapter.communication.*; import org.totalboumboum.ai.v200910.adapter.data.*; | [
"java.util",
"org.totalboumboum.ai"
] | java.util; org.totalboumboum.ai; | 987,908 |
field.setAccessible(true);
Field modifiersField = Field.class.getDeclaredField("modifiers");
modifiersField.setAccessible(true);
modifiersField.setInt(field, field.getModifiers() & 0xFFFFFFEF);
return field;
} | field.setAccessible(true); Field modifiersField = Field.class.getDeclaredField(STR); modifiersField.setAccessible(true); modifiersField.setInt(field, field.getModifiers() & 0xFFFFFFEF); return field; } | /**
* Sets the field accessible and removes final modifiers
*
* @param field Field to set accessible
* @return the Field
* @throws ReflectiveOperationException (usually never)
*/ | Sets the field accessible and removes final modifiers | setAccessible | {
"repo_name": "tastybento/greenhouses",
"path": "src/org/inventivetalent/reflection/util/AccessUtil.java",
"license": "gpl-2.0",
"size": 3162
} | [
"java.lang.reflect.Field"
] | import java.lang.reflect.Field; | import java.lang.reflect.*; | [
"java.lang"
] | java.lang; | 1,647,465 |
@WebMethod
@Path("/copySiteContentForTool")
@Produces("text/plain")
@GET
public String copySiteContentForTool(
@WebParam(name = "sessionid", partName = "sessionid") @QueryParam("sessionid") String sessionid,
@WebParam(name = "sourcesiteid", partName = "sourcesiteid") @QueryParam("sourcesiteid") String sourcesiteid,
@WebParam(name = "destinationsiteid", partName = "destinationsiteid") @QueryParam("destinationsiteid") String destinationsiteid,
@WebParam(name = "toolid", partName = "toolid") @QueryParam("toolid") String toolid) {
Session session = establishSession(sessionid);
try {
//check if both sites exist
Site site = siteService.getSite(sourcesiteid);
site = siteService.getSite(destinationsiteid);
// If not admin, check maintainer membership in the source site
if (!securityService.isSuperUser(session.getUserId()) && !securityService.unlock(SiteService.SECURE_UPDATE_SITE, site.getReference())) {
LOG.warn("WS copyResources(): Permission denied. Must be super user to copy a site in which you are not a maintainer.");
throw new RuntimeException("WS copyResources(): Permission denied. Must be super user to copy a site in which you are not a maintainer.");
}
//transfer content
transferCopyEntities(
toolid,
contentHostingService.getSiteCollection(sourcesiteid),
contentHostingService.getSiteCollection(destinationsiteid));
} catch (Exception e) {
LOG.error("WS copySiteContentForTool(): " + e.getClass().getName() + " : " + e.getMessage(), e);
return e.getClass().getName() + " : " + e.getMessage();
}
return "success";
} | @Path(STR) @Produces(STR) String function( @WebParam(name = STR, partName = STR) @QueryParam(STR) String sessionid, @WebParam(name = STR, partName = STR) @QueryParam(STR) String sourcesiteid, @WebParam(name = STR, partName = STR) @QueryParam(STR) String destinationsiteid, @WebParam(name = STR, partName = STR) @QueryParam(STR) String toolid) { Session session = establishSession(sessionid); try { Site site = siteService.getSite(sourcesiteid); site = siteService.getSite(destinationsiteid); if (!securityService.isSuperUser(session.getUserId()) && !securityService.unlock(SiteService.SECURE_UPDATE_SITE, site.getReference())) { LOG.warn(STR); throw new RuntimeException(STR); } transferCopyEntities( toolid, contentHostingService.getSiteCollection(sourcesiteid), contentHostingService.getSiteCollection(destinationsiteid)); } catch (Exception e) { LOG.error(STR + e.getClass().getName() + STR + e.getMessage(), e); return e.getClass().getName() + STR + e.getMessage(); } return STR; } | /**
* Copy the content from a site to another site for only the content of the specified tool
*
* @param sessionid the id of a valid session
* @param sourcesiteid the id of the source site
* @param destinationsiteid the id of the destiny site
* @param toolid the tool id for which content should be copied
* @return success or exception message
*/ | Copy the content from a site to another site for only the content of the specified tool | copySiteContentForTool | {
"repo_name": "duke-compsci290-spring2016/sakai",
"path": "webservices/cxf/src/java/org/sakaiproject/webservices/SakaiScript.java",
"license": "apache-2.0",
"size": 213301
} | [
"javax.jws.WebParam",
"javax.ws.rs.Path",
"javax.ws.rs.Produces",
"javax.ws.rs.QueryParam",
"org.sakaiproject.site.api.Site",
"org.sakaiproject.site.api.SiteService",
"org.sakaiproject.tool.api.Session"
] | import javax.jws.WebParam; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import org.sakaiproject.site.api.Site; import org.sakaiproject.site.api.SiteService; import org.sakaiproject.tool.api.Session; | import javax.jws.*; import javax.ws.rs.*; import org.sakaiproject.site.api.*; import org.sakaiproject.tool.api.*; | [
"javax.jws",
"javax.ws",
"org.sakaiproject.site",
"org.sakaiproject.tool"
] | javax.jws; javax.ws; org.sakaiproject.site; org.sakaiproject.tool; | 1,087,362 |
@ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION)
PollerFlux<PollResult<PrivateLinkServiceInner>, PrivateLinkServiceInner> beginCreateOrUpdateAsync(
String resourceGroupName, String serviceName, PrivateLinkServiceInner parameters); | @ServiceMethod(returns = ReturnType.LONG_RUNNING_OPERATION) PollerFlux<PollResult<PrivateLinkServiceInner>, PrivateLinkServiceInner> beginCreateOrUpdateAsync( String resourceGroupName, String serviceName, PrivateLinkServiceInner parameters); | /**
* Creates or updates an private link service in the specified resource group.
*
* @param resourceGroupName The name of the resource group.
* @param serviceName The name of the private link service.
* @param parameters Parameters supplied to the create or update private link service operation.
* @throws IllegalArgumentException thrown if parameters fail the validation.
* @throws com.azure.resourcemanager.network.models.ErrorException thrown if the request is rejected by server.
* @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent.
* @return the {@link PollerFlux} for polling of private link service resource.
*/ | Creates or updates an private link service in the specified resource group | beginCreateOrUpdateAsync | {
"repo_name": "Azure/azure-sdk-for-java",
"path": "sdk/resourcemanager/azure-resourcemanager-network/src/main/java/com/azure/resourcemanager/network/fluent/PrivateLinkServicesClient.java",
"license": "mit",
"size": 60022
} | [
"com.azure.core.annotation.ReturnType",
"com.azure.core.annotation.ServiceMethod",
"com.azure.core.management.polling.PollResult",
"com.azure.core.util.polling.PollerFlux",
"com.azure.resourcemanager.network.fluent.models.PrivateLinkServiceInner"
] | import com.azure.core.annotation.ReturnType; import com.azure.core.annotation.ServiceMethod; import com.azure.core.management.polling.PollResult; import com.azure.core.util.polling.PollerFlux; import com.azure.resourcemanager.network.fluent.models.PrivateLinkServiceInner; | import com.azure.core.annotation.*; import com.azure.core.management.polling.*; import com.azure.core.util.polling.*; import com.azure.resourcemanager.network.fluent.models.*; | [
"com.azure.core",
"com.azure.resourcemanager"
] | com.azure.core; com.azure.resourcemanager; | 236,196 |
protected void getScriptsAt(List<Script> scripts, String scriptRoot, String relativeLocation) {
File currentLocation = getScriptFile(scriptRoot + "/" + relativeLocation);
if (currentLocation.isFile() && isScriptFile(currentLocation)) {
Script script = createScript(currentLocation, relativeLocation);
scripts.add(script);
return;
}
// recursively scan sub folders for script files
if (currentLocation.isDirectory()) {
for (File subLocation : currentLocation.listFiles()) {
getScriptsAt(scripts, scriptRoot,
"".equals(relativeLocation) ? subLocation.getName()
: relativeLocation + "/" + subLocation.getName());
}
}
} | void function(List<Script> scripts, String scriptRoot, String relativeLocation) { File currentLocation = getScriptFile(scriptRoot + "/" + relativeLocation); if (currentLocation.isFile() && isScriptFile(currentLocation)) { Script script = createScript(currentLocation, relativeLocation); scripts.add(script); return; } if (currentLocation.isDirectory()) { for (File subLocation : currentLocation.listFiles()) { getScriptsAt(scripts, scriptRoot, STR/" + subLocation.getName()); } } } | /**
* Adds all scripts available in the given directory or one of its subdirectories to the
* given List of files
*
* @param relativeLocation The current script location, not null
* @param scriptRoot The indexes of the current parent folders, not null
* @param scripts The list to which the available script have to be added
*/ | Adds all scripts available in the given directory or one of its subdirectories to the given List of files | getScriptsAt | {
"repo_name": "ferronrsmith/easyrec",
"path": "easyrec-testutils/src/main/java/org/easyrec/test/unitils/SpringResourceScriptSource.java",
"license": "gpl-3.0",
"size": 5913
} | [
"java.io.File",
"java.util.List",
"org.unitils.dbmaintainer.script.Script"
] | import java.io.File; import java.util.List; import org.unitils.dbmaintainer.script.Script; | import java.io.*; import java.util.*; import org.unitils.dbmaintainer.script.*; | [
"java.io",
"java.util",
"org.unitils.dbmaintainer"
] | java.io; java.util; org.unitils.dbmaintainer; | 1,953,546 |
public void reAddToWaitlist(AbstractState s); | void function(AbstractState s); | /**
* Re-add a state to the waitlist which is already contained in the reached set.
*/ | Re-add a state to the waitlist which is already contained in the reached set | reAddToWaitlist | {
"repo_name": "TommesDee/cpachecker",
"path": "src/org/sosy_lab/cpachecker/core/reachedset/ReachedSet.java",
"license": "apache-2.0",
"size": 2701
} | [
"org.sosy_lab.cpachecker.core.interfaces.AbstractState"
] | import org.sosy_lab.cpachecker.core.interfaces.AbstractState; | import org.sosy_lab.cpachecker.core.interfaces.*; | [
"org.sosy_lab.cpachecker"
] | org.sosy_lab.cpachecker; | 27,344 |
@Override
public PIDSourceType getPIDSourceType()
{
return pidSourceType;
}
| PIDSourceType function() { return pidSourceType; } | /**
* Gets the internal PID Controller's current PIDSourceType
* @return the internal PID Controller's current PIDSourceType
*/ | Gets the internal PID Controller's current PIDSourceType | getPIDSourceType | {
"repo_name": "KHS-Robotics/Demonator6",
"path": "src/org/usfirst/frc/team4342/robot/subsystems/TankDrive.java",
"license": "mit",
"size": 10489
} | [
"edu.wpi.first.wpilibj.PIDSourceType"
] | import edu.wpi.first.wpilibj.PIDSourceType; | import edu.wpi.first.wpilibj.*; | [
"edu.wpi.first"
] | edu.wpi.first; | 2,318,007 |
public final StoredClassCatalog getClassCatalog() {
return javaCatalog;
} | final StoredClassCatalog function() { return javaCatalog; } | /**
* Return the class catalog.
*/ | Return the class catalog | getClassCatalog | {
"repo_name": "racker/omnibus",
"path": "source/db-5.0.26.NC/examples_java/src/collections/ship/entity/SampleDatabase.java",
"license": "apache-2.0",
"size": 12180
} | [
"com.sleepycat.bind.serial.StoredClassCatalog"
] | import com.sleepycat.bind.serial.StoredClassCatalog; | import com.sleepycat.bind.serial.*; | [
"com.sleepycat.bind"
] | com.sleepycat.bind; | 226,479 |
public void initIncomingLinks(ReferencesTableSettings settings); | void function(ReferencesTableSettings settings); | /**
* Init the incomingLinks
* @param settings settings for the incomingLinks ReferencesTable
*/ | Init the incomingLinks | initIncomingLinks | {
"repo_name": "prabushi/devstudio-tooling-esb",
"path": "plugins/org.wso2.developerstudio.eclipse.gmf.esb.edit/src-gen/org/wso2/developerstudio/eclipse/gmf/esb/parts/FaultMediatorInputConnectorPropertiesEditionPart.java",
"license": "apache-2.0",
"size": 1636
} | [
"org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings"
] | import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; | import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.*; | [
"org.eclipse.emf"
] | org.eclipse.emf; | 2,350,627 |
public com.mozu.api.contracts.mzdb.EntityCollection getEntities(String entityListFullName, Integer pageSize, Integer startIndex, String filter, String sortBy, String responseFields) throws Exception
{
MozuClient<com.mozu.api.contracts.mzdb.EntityCollection> client = com.mozu.api.clients.platform.entitylists.EntityClient.getEntitiesClient( entityListFullName, pageSize, startIndex, filter, sortBy, responseFields);
client.setContext(_apiContext);
client.executeRequest();
return client.getResult();
}
| com.mozu.api.contracts.mzdb.EntityCollection function(String entityListFullName, Integer pageSize, Integer startIndex, String filter, String sortBy, String responseFields) throws Exception { MozuClient<com.mozu.api.contracts.mzdb.EntityCollection> client = com.mozu.api.clients.platform.entitylists.EntityClient.getEntitiesClient( entityListFullName, pageSize, startIndex, filter, sortBy, responseFields); client.setContext(_apiContext); client.executeRequest(); return client.getResult(); } | /**
*
* <p><pre><code>
* Entity entity = new Entity();
* EntityCollection entityCollection = entity.getEntities( entityListFullName, pageSize, startIndex, filter, sortBy, responseFields);
* </code></pre></p>
* @param entityListFullName The full name of the EntityList including namespace in name@nameSpace format
* @param filter A set of filter expressions representing the search parameters for a query. This parameter is optional. Refer to [Sorting and Filtering](../../../../Developer/api-guides/sorting-filtering.htm) for a list of supported filters.
* @param pageSize When creating paged results from a query, this value indicates the zero-based offset in the complete result set where the returned entities begin. For example, with this parameter set to 25, to get the 51st through the 75th items, set startIndex to 50.
* @param responseFields Filtering syntax appended to an API call to increase or decrease the amount of data returned inside a JSON object. This parameter should only be used to retrieve data. Attempting to update data using this parameter may cause data loss.
* @param sortBy The element to sort the results by and the channel in which the results appear. Either ascending (a-z) or descending (z-a) channel. Optional. Refer to [Sorting and Filtering](../../../../Developer/api-guides/sorting-filtering.htm) for more information.
* @param startIndex When creating paged results from a query, this value indicates the zero-based offset in the complete result set where the returned entities begin. For example, with pageSize set to 25, to get the 51st through the 75th items, set this parameter to 50.
* @return com.mozu.api.contracts.mzdb.EntityCollection
* @see com.mozu.api.contracts.mzdb.EntityCollection
*/ | <code><code> Entity entity = new Entity(); EntityCollection entityCollection = entity.getEntities( entityListFullName, pageSize, startIndex, filter, sortBy, responseFields); </code></code> | getEntities | {
"repo_name": "Mozu/mozu-java",
"path": "mozu-javaasync-core/src/main/java/com/mozu/api/resources/platform/entitylists/EntityResource.java",
"license": "mit",
"size": 19274
} | [
"com.mozu.api.MozuClient"
] | import com.mozu.api.MozuClient; | import com.mozu.api.*; | [
"com.mozu.api"
] | com.mozu.api; | 2,237,725 |
public void testInvalidClassName() {
String metadataFile = PATH + "eclipselink-oxm.xml";
InputStream iStream = getClass().getClassLoader().getResourceAsStream(metadataFile);
if (iStream == null) {
fail("Couldn't load metadata file [" + metadataFile + "]");
}
HashMap<String, Source> metadataSourceMap = new HashMap<String, Source>();
metadataSourceMap.put(CONTEXT_PATH, new StreamSource(iStream));
Map<String, Map<String, Source>> properties = new HashMap<String, Map<String, Source>>();
properties.put(JAXBContextFactory.ECLIPSELINK_OXM_XML_KEY, metadataSourceMap);
try {
JAXBContextFactory.createContext(CONTEXT_PATH, getClass().getClassLoader(), properties);
} catch (JAXBException e) {
return;
} catch (Exception x) {
}
fail("The expected JAXBException was not thrown.");
} | void function() { String metadataFile = PATH + STR; InputStream iStream = getClass().getClassLoader().getResourceAsStream(metadataFile); if (iStream == null) { fail(STR + metadataFile + "]"); } HashMap<String, Source> metadataSourceMap = new HashMap<String, Source>(); metadataSourceMap.put(CONTEXT_PATH, new StreamSource(iStream)); Map<String, Map<String, Source>> properties = new HashMap<String, Map<String, Source>>(); properties.put(JAXBContextFactory.ECLIPSELINK_OXM_XML_KEY, metadataSourceMap); try { JAXBContextFactory.createContext(CONTEXT_PATH, getClass().getClassLoader(), properties); } catch (JAXBException e) { return; } catch (Exception x) { } fail(STR); } | /**
* Tests declaration of a non-existent class via eclipselink-oxm.xml
*
* Negative test.
*/ | Tests declaration of a non-existent class via eclipselink-oxm.xml Negative test | testInvalidClassName | {
"repo_name": "RallySoftware/eclipselink.runtime",
"path": "moxy/eclipselink.moxy.test/src/org/eclipse/persistence/testing/jaxb/externalizedmetadata/exceptions/contextfactory/ExceptionHandlingTestCases.java",
"license": "epl-1.0",
"size": 11167
} | [
"java.io.InputStream",
"java.util.HashMap",
"java.util.Map",
"javax.xml.transform.Source",
"javax.xml.transform.stream.StreamSource",
"org.eclipse.persistence.exceptions.JAXBException",
"org.eclipse.persistence.jaxb.JAXBContextFactory"
] | import java.io.InputStream; import java.util.HashMap; import java.util.Map; import javax.xml.transform.Source; import javax.xml.transform.stream.StreamSource; import org.eclipse.persistence.exceptions.JAXBException; import org.eclipse.persistence.jaxb.JAXBContextFactory; | import java.io.*; import java.util.*; import javax.xml.transform.*; import javax.xml.transform.stream.*; import org.eclipse.persistence.exceptions.*; import org.eclipse.persistence.jaxb.*; | [
"java.io",
"java.util",
"javax.xml",
"org.eclipse.persistence"
] | java.io; java.util; javax.xml; org.eclipse.persistence; | 84,803 |
@Test
public void testFileOutputFormat() throws Exception {
final String moduleName = "HadoopOutputTest-testFileOutputFormat";
final LembosNodeEnvironment env = new LembosNodeEnvironment(moduleName,
new File(TestUtils.getModulePath(moduleName)),
null);
env.initialize();
// Call JavaScript-based Tests
env.callFunctionSync((Function)env.getModule().get("testFileOutputFormat", env.getModule()), new Object[0]);
} | void function() throws Exception { final String moduleName = STR; final LembosNodeEnvironment env = new LembosNodeEnvironment(moduleName, new File(TestUtils.getModulePath(moduleName)), null); env.initialize(); env.callFunctionSync((Function)env.getModule().get(STR, env.getModule()), new Object[0]); } | /**
* Tests the usage of {@link FileOutputFormatWrap}.
*
* @throws Exception if anything goes wrong
*/ | Tests the usage of <code>FileOutputFormatWrap</code> | testFileOutputFormat | {
"repo_name": "apigee/lembos",
"path": "src/test/java/io/apigee/lembos/node/modules/HadoopOutputTest.java",
"license": "apache-2.0",
"size": 4464
} | [
"io.apigee.lembos.mapreduce.LembosNodeEnvironment",
"io.apigee.lembos.utils.TestUtils",
"java.io.File",
"org.mozilla.javascript.Function"
] | import io.apigee.lembos.mapreduce.LembosNodeEnvironment; import io.apigee.lembos.utils.TestUtils; import java.io.File; import org.mozilla.javascript.Function; | import io.apigee.lembos.mapreduce.*; import io.apigee.lembos.utils.*; import java.io.*; import org.mozilla.javascript.*; | [
"io.apigee.lembos",
"java.io",
"org.mozilla.javascript"
] | io.apigee.lembos; java.io; org.mozilla.javascript; | 1,835,385 |
public String toString(String enc) throws UnsupportedEncodingException {
return new String(buf, 0, count, enc);
} | String function(String enc) throws UnsupportedEncodingException { return new String(buf, 0, count, enc); } | /**
* Returns the contents of this ByteArrayOutputStream as a string converted
* according to the encoding declared in {@code enc}.
*
* @param enc
* a string representing the encoding to use when translating
* this stream to a string.
* @return this stream's current contents as an encoded string.
* @throws UnsupportedEncodingException
* if the provided encoding is not supported.
*/ | Returns the contents of this ByteArrayOutputStream as a string converted according to the encoding declared in enc | toString | {
"repo_name": "PaytmLabs/cassandra",
"path": "src/java/org/apache/cassandra/io/util/FastByteArrayOutputStream.java",
"license": "apache-2.0",
"size": 8966
} | [
"java.io.UnsupportedEncodingException"
] | import java.io.UnsupportedEncodingException; | import java.io.*; | [
"java.io"
] | java.io; | 892,571 |
@Override
public JComponent getTitleComponent() {
return panTitle;
} | JComponent function() { return panTitle; } | /**
* DOCUMENT ME!
*
* @return DOCUMENT ME!
*/ | DOCUMENT ME | getTitleComponent | {
"repo_name": "cismet/cids-custom-wuppertal",
"path": "src/main/java/de/cismet/cids/custom/objectrenderer/wunda_blau/AlkisPointRenderer.java",
"license": "lgpl-3.0",
"size": 118383
} | [
"javax.swing.JComponent"
] | import javax.swing.JComponent; | import javax.swing.*; | [
"javax.swing"
] | javax.swing; | 509,282 |
public void characters(XMLString text, Augmentations augs) throws XNIException {
// if type is union (XML Schema) it is possible that we receive
// character call with empty data
if (text.length == 0) {
return;
}
try {
// SAX1
if (fDocumentHandler != null) {
// REVISIT: should we support schema-normalized-value for SAX1 events
//
fDocumentHandler.characters(text.ch, text.offset, text.length);
}
// SAX2
if (fContentHandler != null) {
fContentHandler.characters(text.ch, text.offset, text.length);
}
}
catch (SAXException e) {
throw new XNIException(e);
}
} // characters(XMLString) | void function(XMLString text, Augmentations augs) throws XNIException { if (text.length == 0) { return; } try { if (fDocumentHandler != null) { fDocumentHandler.characters(text.ch, text.offset, text.length); } if (fContentHandler != null) { fContentHandler.characters(text.ch, text.offset, text.length); } } catch (SAXException e) { throw new XNIException(e); } } | /**
* Character content.
*
* @param text The content.
* @param augs Additional information that may include infoset augmentations
*
* @throws XNIException Thrown by handler to signal an error.
*/ | Character content | characters | {
"repo_name": "BIORIMP/biorimp",
"path": "BIO-RIMP/test_data/code/xerces/src/org/apache/xerces/parsers/AbstractSAXParser.java",
"license": "gpl-2.0",
"size": 91799
} | [
"org.apache.xerces.xni.Augmentations",
"org.apache.xerces.xni.XMLString",
"org.apache.xerces.xni.XNIException",
"org.xml.sax.SAXException"
] | import org.apache.xerces.xni.Augmentations; import org.apache.xerces.xni.XMLString; import org.apache.xerces.xni.XNIException; import org.xml.sax.SAXException; | import org.apache.xerces.xni.*; import org.xml.sax.*; | [
"org.apache.xerces",
"org.xml.sax"
] | org.apache.xerces; org.xml.sax; | 852,023 |
@Nullable
public String getMovieOverview() {
String res = getStringOrNull(MovieColumns.OVERVIEW);
return res;
} | String function() { String res = getStringOrNull(MovieColumns.OVERVIEW); return res; } | /**
* Get the {@code overview} value.
* Can be {@code null}.
*/ | Get the overview value. Can be null | getMovieOverview | {
"repo_name": "AhmadElMelegy/Popular-Movies-App",
"path": "app/src/main/java/com/melegy/movies/moviesapp/provider/trailer/TrailerCursor.java",
"license": "gpl-3.0",
"size": 4776
} | [
"com.melegy.movies.moviesapp.provider.movie.MovieColumns"
] | import com.melegy.movies.moviesapp.provider.movie.MovieColumns; | import com.melegy.movies.moviesapp.provider.movie.*; | [
"com.melegy.movies"
] | com.melegy.movies; | 476,992 |
void sort(String columnId, SortOrder sortOrder); | void sort(String columnId, SortOrder sortOrder); | /**
* Set the sort order operation to apply to the data set.
*
* @param columnId The name of the column to sort.
* @param sortOrder The sort order.
*/ | Set the sort order operation to apply to the data set | sort | {
"repo_name": "porcelli-forks/dashbuilder",
"path": "dashbuilder-client/dashbuilder-displayer-client/src/main/java/org/dashbuilder/displayer/client/DataSetHandler.java",
"license": "apache-2.0",
"size": 5541
} | [
"org.dashbuilder.dataset.sort.SortOrder"
] | import org.dashbuilder.dataset.sort.SortOrder; | import org.dashbuilder.dataset.sort.*; | [
"org.dashbuilder.dataset"
] | org.dashbuilder.dataset; | 521,795 |
public static Trades adaptTrades(List<VirtExTrade> virtExTrades, CurrencyPair currencyPair) {
List<Trade> tradesList = new ArrayList<Trade>();
for (VirtExTrade virtexTrade : virtExTrades) {
tradesList.add(adaptTrade(virtexTrade, currencyPair));
}
return new Trades(tradesList, TradeSortType.SortByID);
} | static Trades function(List<VirtExTrade> virtExTrades, CurrencyPair currencyPair) { List<Trade> tradesList = new ArrayList<Trade>(); for (VirtExTrade virtexTrade : virtExTrades) { tradesList.add(adaptTrade(virtexTrade, currencyPair)); } return new Trades(tradesList, TradeSortType.SortByID); } | /**
* Adapts a VirtExTrade[] to a Trades Object
*
* @param virtExTrades The VirtEx trade data
* @return The trades
*/ | Adapts a VirtExTrade[] to a Trades Object | adaptTrades | {
"repo_name": "SovietBear/xchange-clean",
"path": "xchange-cavirtex/src/main/java/com/xeiam/xchange/virtex/v2/VirtExAdapters.java",
"license": "mit",
"size": 3486
} | [
"com.xeiam.xchange.currency.CurrencyPair",
"com.xeiam.xchange.dto.marketdata.Trade",
"com.xeiam.xchange.dto.marketdata.Trades",
"com.xeiam.xchange.virtex.v2.dto.marketdata.VirtExTrade",
"java.util.ArrayList",
"java.util.List"
] | import com.xeiam.xchange.currency.CurrencyPair; import com.xeiam.xchange.dto.marketdata.Trade; import com.xeiam.xchange.dto.marketdata.Trades; import com.xeiam.xchange.virtex.v2.dto.marketdata.VirtExTrade; import java.util.ArrayList; import java.util.List; | import com.xeiam.xchange.currency.*; import com.xeiam.xchange.dto.marketdata.*; import com.xeiam.xchange.virtex.v2.dto.marketdata.*; import java.util.*; | [
"com.xeiam.xchange",
"java.util"
] | com.xeiam.xchange; java.util; | 2,890,915 |
public void setOnAccountChangeListener(OnAccountChangeListener onAccountChangeListener) {
mOnAccountChangeListener = onAccountChangeListener;
} | void function(OnAccountChangeListener onAccountChangeListener) { mOnAccountChangeListener = onAccountChangeListener; } | /**
* Registers a callback to be invoked when the current account has changed.
*
* @param onAccountChangeListener The callback that will run.
*/ | Registers a callback to be invoked when the current account has changed | setOnAccountChangeListener | {
"repo_name": "flaviojmendes/hefesto",
"path": "matlib/src/main/java/com/blunderer/materialdesignlibrary/views/ANavigationDrawerAccountsLayout.java",
"license": "gpl-3.0",
"size": 8741
} | [
"com.blunderer.materialdesignlibrary.listeners.OnAccountChangeListener"
] | import com.blunderer.materialdesignlibrary.listeners.OnAccountChangeListener; | import com.blunderer.materialdesignlibrary.listeners.*; | [
"com.blunderer.materialdesignlibrary"
] | com.blunderer.materialdesignlibrary; | 2,802,771 |
public List<ChannelData> getChannelsMetadata(SecurityContext ctx,
long pixelsID)
throws DSOutOfServiceException, DSAccessException;
| List<ChannelData> function(SecurityContext ctx, long pixelsID) throws DSOutOfServiceException, DSAccessException; | /**
* Retrieves the channel data for the specified pixels set.
*
* @param ctx The security context.
* @param pixelsID The id of pixels set.
* @return A list of channels.
* @throws DSOutOfServiceException If the connection is broken, or logged in
* @throws DSAccessException If an error occurred while trying to
* retrieve data from OMERO service.
*/ | Retrieves the channel data for the specified pixels set | getChannelsMetadata | {
"repo_name": "jballanc/openmicroscopy",
"path": "components/insight/SRC/org/openmicroscopy/shoola/env/data/OmeroMetadataService.java",
"license": "gpl-2.0",
"size": 30267
} | [
"java.util.List",
"org.openmicroscopy.shoola.env.data.util.SecurityContext"
] | import java.util.List; import org.openmicroscopy.shoola.env.data.util.SecurityContext; | import java.util.*; import org.openmicroscopy.shoola.env.data.util.*; | [
"java.util",
"org.openmicroscopy.shoola"
] | java.util; org.openmicroscopy.shoola; | 1,572,827 |
List<Long> queryForLongs(String sql, Object...args);
| List<Long> queryForLongs(String sql, Object...args); | /**
* Find Long list by select column.
*/ | Find Long list by select column | queryForLongs | {
"repo_name": "ineunetOS/knife",
"path": "knife-dao/src/main/java/com/ineunet/knife/persist/dao/IJdbcDao.java",
"license": "apache-2.0",
"size": 3743
} | [
"java.util.List"
] | import java.util.List; | import java.util.*; | [
"java.util"
] | java.util; | 141,399 |
@Override
public Context getContext() {
return getActivity();
} | Context function() { return getActivity(); } | /**
* Method which returns the Context
*
* @return The context
* @see Context
*/ | Method which returns the Context | getContext | {
"repo_name": "JosBarranquero/AssistApp",
"path": "app/src/main/java/com/bitbits/assistapp/fragments/Messaging_Fragment.java",
"license": "gpl-2.0",
"size": 5512
} | [
"android.content.Context"
] | import android.content.Context; | import android.content.*; | [
"android.content"
] | android.content; | 1,050,765 |
public void unbindProxy(ServiceProxy proxy)
{
// meant to be overridden
} | void function(ServiceProxy proxy) { } | /**
* Implementing subclasses should use this method to release the proxy.
* @param proxy the proxy to release.
*/ | Implementing subclasses should use this method to release the proxy | unbindProxy | {
"repo_name": "hieupham007/Titanium_Mobile",
"path": "android/titanium/src/java/org/appcelerator/titanium/TiBaseService.java",
"license": "apache-2.0",
"size": 2241
} | [
"org.appcelerator.titanium.proxy.ServiceProxy"
] | import org.appcelerator.titanium.proxy.ServiceProxy; | import org.appcelerator.titanium.proxy.*; | [
"org.appcelerator.titanium"
] | org.appcelerator.titanium; | 40,274 |
public static TextFieldTrait get(PropertyDeclaration property) {
return find(property).orElseThrow(NoSuchElementException::new);
} | static TextFieldTrait function(PropertyDeclaration property) { return find(property).orElseThrow(NoSuchElementException::new); } | /**
* Returns the trait for the property.
* @param property the property
* @return the trait
*/ | Returns the trait for the property | get | {
"repo_name": "ashigeru/asakusafw",
"path": "directio-project/asakusa-directio-dmdl/src/main/java/com/asakusafw/dmdl/directio/text/TextFieldTrait.java",
"license": "apache-2.0",
"size": 6070
} | [
"com.asakusafw.dmdl.semantics.PropertyDeclaration",
"java.util.NoSuchElementException"
] | import com.asakusafw.dmdl.semantics.PropertyDeclaration; import java.util.NoSuchElementException; | import com.asakusafw.dmdl.semantics.*; import java.util.*; | [
"com.asakusafw.dmdl",
"java.util"
] | com.asakusafw.dmdl; java.util; | 1,786,521 |
// ===========================================================================
public void _glue(CommandContext ci)
{
gluer.glue(getIds(ci));
} | void function(CommandContext ci) { gluer.glue(getIds(ci)); } | /**
* Requests the framework to glue one or more services.
* It requires 1 parameter:
*
* - a partial class name. All classes matching the given name will be glued.
*
* @param ci The current context.
*/ | Requests the framework to glue one or more services. It requires 1 parameter: - a partial class name. All classes matching the given name will be glued | _glue | {
"repo_name": "gevaerts/Gluewine",
"path": "imp/src/java/org/gluewine/console/impl/SystemCommandProvider.java",
"license": "apache-2.0",
"size": 27950
} | [
"org.gluewine.console.CommandContext"
] | import org.gluewine.console.CommandContext; | import org.gluewine.console.*; | [
"org.gluewine.console"
] | org.gluewine.console; | 674,530 |
@Override
public void writeToNBT(NBTTagCompound tCompound) {
super.writeToNBT(tCompound);
for (int i = 0; i < 20; i++) {
if (allInventories[i] != null) {
NBTTagCompound tc = new NBTTagCompound();
allInventories[i].writeToNBT(tc);
tCompound.setTag("inventory" + i, tc);
}
}
} | void function(NBTTagCompound tCompound) { super.writeToNBT(tCompound); for (int i = 0; i < 20; i++) { if (allInventories[i] != null) { NBTTagCompound tc = new NBTTagCompound(); allInventories[i].writeToNBT(tc); tCompound.setTag(STR + i, tc); } } } | /**
* This function gets called whenever the world/chunk is saved
*/ | This function gets called whenever the world/chunk is saved | writeToNBT | {
"repo_name": "raulsmail/GlowPower",
"path": "src/main/java/com/bluepowermod/tileentities/tier1/TileBuffer.java",
"license": "gpl-3.0",
"size": 4966
} | [
"net.minecraft.nbt.NBTTagCompound"
] | import net.minecraft.nbt.NBTTagCompound; | import net.minecraft.nbt.*; | [
"net.minecraft.nbt"
] | net.minecraft.nbt; | 243,425 |
public Klass getKlass(int suiteID) {
Assert.that(suiteID < classes.length);
return classes[suiteID];
} | Klass function(int suiteID) { Assert.that(suiteID < classes.length); return classes[suiteID]; } | /**
* Gets the class in this suite corresponding to a given class number.
*
* @param suiteID the class number of the class to retrieve
* @return the class corresponding to <code>suiteID</code>
*/ | Gets the class in this suite corresponding to a given class number | getKlass | {
"repo_name": "squawk-mirror/squawk",
"path": "cldc/src/com/sun/squawk/Suite.java",
"license": "gpl-2.0",
"size": 50789
} | [
"com.sun.squawk.util.Assert"
] | import com.sun.squawk.util.Assert; | import com.sun.squawk.util.*; | [
"com.sun.squawk"
] | com.sun.squawk; | 281,598 |
protected ClientApplicationInfo getClientApplicationInfo() {
return clientApplicationInfo;
} | ClientApplicationInfo function() { return clientApplicationInfo; } | /**
* Get the client application info.
*
* @return the client application info
*/ | Get the client application info | getClientApplicationInfo | {
"repo_name": "geomajas/geomajas-project-deskmanager",
"path": "gwt/src/main/java/org/geomajas/plugin/deskmanager/client/gwt/geodesk/AbstractGwtUserApplication.java",
"license": "agpl-3.0",
"size": 3272
} | [
"org.geomajas.configuration.client.ClientApplicationInfo"
] | import org.geomajas.configuration.client.ClientApplicationInfo; | import org.geomajas.configuration.client.*; | [
"org.geomajas.configuration"
] | org.geomajas.configuration; | 1,374,593 |
@Override
public Map<Bytes, Map<Column, Bytes>> get(Collection<Bytes> rows, Set<Column> columns) {
Map<Bytes, Map<Column, Bytes>> rowColVal = txb.get(rows, columns);
for (Map.Entry<Bytes, Map<Column, Bytes>> rowEntry : rowColVal.entrySet()) {
for (Map.Entry<Column, Bytes> colEntry : rowEntry.getValue().entrySet()) {
txLog.filteredAdd(
LogEntry.newGet(rowEntry.getKey(), colEntry.getKey(), colEntry.getValue()), filter);
}
}
return rowColVal;
} | Map<Bytes, Map<Column, Bytes>> function(Collection<Bytes> rows, Set<Column> columns) { Map<Bytes, Map<Column, Bytes>> rowColVal = txb.get(rows, columns); for (Map.Entry<Bytes, Map<Column, Bytes>> rowEntry : rowColVal.entrySet()) { for (Map.Entry<Column, Bytes> colEntry : rowEntry.getValue().entrySet()) { txLog.filteredAdd( LogEntry.newGet(rowEntry.getKey(), colEntry.getKey(), colEntry.getValue()), filter); } } return rowColVal; } | /**
* Logs GETs for returned Row/Columns. Requests that return no data will not be logged.
*/ | Logs GETs for returned Row/Columns. Requests that return no data will not be logged | get | {
"repo_name": "mikewalch/fluo-recipes",
"path": "modules/core/src/main/java/org/apache/fluo/recipes/transaction/RecordingTransactionBase.java",
"license": "apache-2.0",
"size": 8443
} | [
"java.util.Collection",
"java.util.Map",
"java.util.Set",
"org.apache.fluo.api.data.Bytes",
"org.apache.fluo.api.data.Column"
] | import java.util.Collection; import java.util.Map; import java.util.Set; import org.apache.fluo.api.data.Bytes; import org.apache.fluo.api.data.Column; | import java.util.*; import org.apache.fluo.api.data.*; | [
"java.util",
"org.apache.fluo"
] | java.util; org.apache.fluo; | 2,183,427 |
java.util.Vector list = new java.util.Vector();
for (int i = 0; i < fGrammars.length; i++) {
for (Entry entry = fGrammars[i] ; entry != null ; entry = entry.next) {
if (entry.desc.getGrammarType().equals(XMLGrammarDescription.XML_SCHEMA))
list.addElement(entry.grammar);
}
}
int size = list.size();
if (size == 0)
return null;
SchemaGrammar[] gs = new SchemaGrammar[size];
for (int i = 0; i < size; i++)
gs[i] = (SchemaGrammar)list.elementAt(i);
return new XSModelImpl(gs);
} | java.util.Vector list = new java.util.Vector(); for (int i = 0; i < fGrammars.length; i++) { for (Entry entry = fGrammars[i] ; entry != null ; entry = entry.next) { if (entry.desc.getGrammarType().equals(XMLGrammarDescription.XML_SCHEMA)) list.addElement(entry.grammar); } } int size = list.size(); if (size == 0) return null; SchemaGrammar[] gs = new SchemaGrammar[size]; for (int i = 0; i < size; i++) gs[i] = (SchemaGrammar)list.elementAt(i); return new XSModelImpl(gs); } | /**
* Return an <code>XSModel</code> that represents components in
* the schema grammars in this pool implementation.
*
* @return an <code>XSModel</code> representing this schema grammar
*/ | Return an <code>XSModel</code> that represents components in the schema grammars in this pool implementation | toXSModel | {
"repo_name": "TheTypoMaster/Scaper",
"path": "openjdk/jaxp/drop_included/jaxp_src/src/com/sun/org/apache/xerces/internal/impl/xs/util/XSGrammarPool.java",
"license": "gpl-2.0",
"size": 2202
} | [
"com.sun.org.apache.xerces.internal.impl.xs.SchemaGrammar",
"com.sun.org.apache.xerces.internal.impl.xs.XSModelImpl",
"com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription"
] | import com.sun.org.apache.xerces.internal.impl.xs.SchemaGrammar; import com.sun.org.apache.xerces.internal.impl.xs.XSModelImpl; import com.sun.org.apache.xerces.internal.xni.grammars.XMLGrammarDescription; | import com.sun.org.apache.xerces.internal.impl.xs.*; import com.sun.org.apache.xerces.internal.xni.grammars.*; | [
"com.sun.org"
] | com.sun.org; | 867,358 |
protected void setStringImpl(String value) throws SQLException {
init(new StreamSource(new StringReader(value)));
} | void function(String value) throws SQLException { init(new StreamSource(new StringReader(value))); } | /**
* Sets the XML value designated by this SQLXML instance using the given
* String representation. <p>
*
* @param value the XML value
* @throws SQLException if there is an error processing the XML value.
*/ | Sets the XML value designated by this SQLXML instance using the given String representation. | setStringImpl | {
"repo_name": "RabadanLab/Pegasus",
"path": "resources/hsqldb-2.2.7/hsqldb/src/org/hsqldb/jdbc/JDBCSQLXML.java",
"license": "mit",
"size": 120115
} | [
"java.io.StringReader",
"java.sql.SQLException",
"javax.xml.transform.stream.StreamSource"
] | import java.io.StringReader; import java.sql.SQLException; import javax.xml.transform.stream.StreamSource; | import java.io.*; import java.sql.*; import javax.xml.transform.stream.*; | [
"java.io",
"java.sql",
"javax.xml"
] | java.io; java.sql; javax.xml; | 2,379,458 |
public boolean deleteOnExit(Path f) throws AccessControlException,
IOException {
if (!this.util().exists(f)) {
return false;
}
synchronized (DELETE_ON_EXIT) {
if (DELETE_ON_EXIT.isEmpty() && !FINALIZER.isAlive()) {
Runtime.getRuntime().addShutdownHook(FINALIZER);
}
Set<Path> set = DELETE_ON_EXIT.get(this);
if (set == null) {
set = new TreeSet<Path>();
DELETE_ON_EXIT.put(this, set);
}
set.add(f);
}
return true;
} | boolean function(Path f) throws AccessControlException, IOException { if (!this.util().exists(f)) { return false; } synchronized (DELETE_ON_EXIT) { if (DELETE_ON_EXIT.isEmpty() && !FINALIZER.isAlive()) { Runtime.getRuntime().addShutdownHook(FINALIZER); } Set<Path> set = DELETE_ON_EXIT.get(this); if (set == null) { set = new TreeSet<Path>(); DELETE_ON_EXIT.put(this, set); } set.add(f); } return true; } | /**
* Mark a path to be deleted on JVM shutdown.
*
* @param f the existing path to delete.
*
* @return true if deleteOnExit is successful, otherwise false.
*
* @throws AccessControlException If access is denied
* @throws UnsupportedFileSystemException If file system for <code>f</code> is
* not supported
* @throws IOException If an I/O error occurred
*
* Exceptions applicable to file systems accessed over RPC:
* @throws RpcClientException If an exception occurred in the RPC client
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
*/ | Mark a path to be deleted on JVM shutdown | deleteOnExit | {
"repo_name": "dotunolafunmiloye/hadoop-common",
"path": "src/java/org/apache/hadoop/fs/FileContext.java",
"license": "apache-2.0",
"size": 90172
} | [
"java.io.IOException",
"java.util.Set",
"java.util.TreeSet",
"org.apache.hadoop.security.AccessControlException"
] | import java.io.IOException; import java.util.Set; import java.util.TreeSet; import org.apache.hadoop.security.AccessControlException; | import java.io.*; import java.util.*; import org.apache.hadoop.security.*; | [
"java.io",
"java.util",
"org.apache.hadoop"
] | java.io; java.util; org.apache.hadoop; | 413,194 |
public double calc_rmsd(Atom[] pro1, Atom[] pro2, int strLen, boolean storeTransform) throws StructureException {
Atom[] cod1 = getAtoms(pro1, strLen,false);
Atom[] cod2 = getAtoms(pro2, strLen,true);
assert(cod1.length == cod2.length);
SVDSuperimposer svd = new SVDSuperimposer(cod1, cod2);
Matrix matrix = svd.getRotation();
Atom shift = svd.getTranslation();
if ( storeTransform) {
r=matrix;
t = shift;
}
for (Atom a : cod2){
Calc.rotate(a.getGroup(), matrix);
Calc.shift(a.getGroup(), shift);
}
return SVDSuperimposer.getRMS(cod1, cod2);
} | double function(Atom[] pro1, Atom[] pro2, int strLen, boolean storeTransform) throws StructureException { Atom[] cod1 = getAtoms(pro1, strLen,false); Atom[] cod2 = getAtoms(pro2, strLen,true); assert(cod1.length == cod2.length); SVDSuperimposer svd = new SVDSuperimposer(cod1, cod2); Matrix matrix = svd.getRotation(); Atom shift = svd.getTranslation(); if ( storeTransform) { r=matrix; t = shift; } for (Atom a : cod2){ Calc.rotate(a.getGroup(), matrix); Calc.shift(a.getGroup(), shift); } return SVDSuperimposer.getRMS(cod1, cod2); } | /** superimpose and get rmsd
*
* @param pro1
* @param pro2
* @param strLen Number of atoms from pro1 and pro2 to use
* @param storeTransform Store rotation and shift matrices locally
* @return RMSD
* @throws StructureException
*/ | superimpose and get rmsd | calc_rmsd | {
"repo_name": "JolantaWojcik/biojavaOwn",
"path": "biojava3-structure/src/main/java/org/biojava/bio/structure/align/ce/CECalculator.java",
"license": "lgpl-2.1",
"size": 62416
} | [
"org.biojava.bio.structure.Atom",
"org.biojava.bio.structure.Calc",
"org.biojava.bio.structure.SVDSuperimposer",
"org.biojava.bio.structure.StructureException",
"org.biojava.bio.structure.jama.Matrix"
] | import org.biojava.bio.structure.Atom; import org.biojava.bio.structure.Calc; import org.biojava.bio.structure.SVDSuperimposer; import org.biojava.bio.structure.StructureException; import org.biojava.bio.structure.jama.Matrix; | import org.biojava.bio.structure.*; import org.biojava.bio.structure.jama.*; | [
"org.biojava.bio"
] | org.biojava.bio; | 46,579 |
public void setCreationDateTime(Timestamp created) {
this.creationDateTime = created;
} | void function(Timestamp created) { this.creationDateTime = created; } | /**
* Sets the creation date of this Notification entry
* @param created the creation date of this Notification entry
*/ | Sets the creation date of this Notification entry | setCreationDateTime | {
"repo_name": "sbower/kuali-rice-1",
"path": "impl/src/main/java/org/kuali/rice/kcb/bo/Message.java",
"license": "apache-2.0",
"size": 8018
} | [
"java.sql.Timestamp"
] | import java.sql.Timestamp; | import java.sql.*; | [
"java.sql"
] | java.sql; | 2,046,312 |
public static Proxy getCurrentProxyConfiguration(URI uri) throws Exception
{
if (!sSetupCalled && gContext == null)
throw new RuntimeException("you need to call setup() first");
Proxy proxyConfig;
if (deviceVersion >= 12) // Honeycomb 3.1
{
proxyConfig = getProxySelectorConfiguration(uri);
}
else
{
proxyConfig = getGlobalProxy();
}
if (proxyConfig == null)
{
proxyConfig = Proxy.NO_PROXY;
}
// ConnectivityManager connManager = (ConnectivityManager) gContext.getSystemService(Context.CONNECTIVITY_SERVICE);
// NetworkInfo activeNetInfo = connManager.getActiveNetworkInfo();
//// proxyConfig.currentNetworkInfo = activeNetInfo;
//
// if (activeNetInfo != null)
// {
// switch (activeNetInfo.getType())
// {
// case ConnectivityManager.TYPE_WIFI:
// WifiManager wifiManager = (WifiManager) gContext.getSystemService(Context.WIFI_SERVICE);
// WifiInfo wifiInfo = wifiManager.getConnectionInfo();
// List<WifiConfiguration> wifiConfigurations = wifiManager.getConfiguredNetworks();
// for (WifiConfiguration wc : wifiConfigurations)
// {
// if (wc.networkId == wifiInfo.getNetworkId())
// {
// proxyConfig.ap = new AccessPoint(wc);
// break;
// }
// }
// break;
// case ConnectivityManager.TYPE_MOBILE:
// break;
// default:
// throw new UnsupportedOperationException("Not yet implemented support for" + activeNetInfo.getTypeName() + " network type");
// }
// }
return proxyConfig;
} | static Proxy function(URI uri) throws Exception { if (!sSetupCalled && gContext == null) throw new RuntimeException(STR); Proxy proxyConfig; if (deviceVersion >= 12) { proxyConfig = getProxySelectorConfiguration(uri); } else { proxyConfig = getGlobalProxy(); } if (proxyConfig == null) { proxyConfig = Proxy.NO_PROXY; } return proxyConfig; } | /**
* Main entry point to access the proxy settings
*/ | Main entry point to access the proxy settings | getCurrentProxyConfiguration | {
"repo_name": "shouldit/android-proxy",
"path": "android-proxy-library/src/main/java/be/shouldit/proxy/lib/APL.java",
"license": "apache-2.0",
"size": 23533
} | [
"java.net.Proxy"
] | import java.net.Proxy; | import java.net.*; | [
"java.net"
] | java.net; | 1,938,297 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.