code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
public static void applyTransform(GrayU8 input , int transform[] , GrayU8 output ) {
output.reshape(input.width,input.height);
if( BoofConcurrency.USE_CONCURRENT ) {
ImplEnhanceHistogram_MT.applyTransform(input, transform, output);
} else {
ImplEnhanceHistogram.applyTransform(input, transform, output);
}
} } | public class class_name {
public static void applyTransform(GrayU8 input , int transform[] , GrayU8 output ) {
output.reshape(input.width,input.height);
if( BoofConcurrency.USE_CONCURRENT ) {
ImplEnhanceHistogram_MT.applyTransform(input, transform, output); // depends on control dependency: [if], data = [none]
} else {
ImplEnhanceHistogram.applyTransform(input, transform, output); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public void writeInferredDistributions(InstanceList instances,
File distributionsFile,
int numIterations, int thinning, int burnIn,
double threshold, int max) throws IOException {
PrintWriter out = new PrintWriter(distributionsFile);
out.print ("#doc source topic proportion ...\n");
IDSorter[] sortedTopics = new IDSorter[ numTopics ];
for (int topic = 0; topic < numTopics; topic++) {
// Initialize the sorters with dummy values
sortedTopics[topic] = new IDSorter(topic, topic);
}
if (max < 0 || max > numTopics) {
max = numTopics;
}
int doc = 0;
for (Instance instance: instances) {
double[] topicDistribution =
getSampledDistribution(instance, numIterations,
thinning, burnIn);
out.print (doc); out.print (' ');
// Print the Source field of the instance
if (instance.getSource() != null) {
out.print (instance.getSource());
}
else {
out.print ("null-source");
}
out.print (' ');
for (int topic = 0; topic < numTopics; topic++) {
sortedTopics[topic].set(topic, topicDistribution[topic]);
}
Arrays.sort(sortedTopics);
for (int i = 0; i < max; i++) {
if (sortedTopics[i].getWeight() < threshold) { break; }
out.print (sortedTopics[i].getID() + " " +
sortedTopics[i].getWeight() + " ");
}
out.print (" \n");
doc++;
}
out.close();
} } | public class class_name {
public void writeInferredDistributions(InstanceList instances,
File distributionsFile,
int numIterations, int thinning, int burnIn,
double threshold, int max) throws IOException {
PrintWriter out = new PrintWriter(distributionsFile);
out.print ("#doc source topic proportion ...\n");
IDSorter[] sortedTopics = new IDSorter[ numTopics ];
for (int topic = 0; topic < numTopics; topic++) {
// Initialize the sorters with dummy values
sortedTopics[topic] = new IDSorter(topic, topic);
}
if (max < 0 || max > numTopics) {
max = numTopics;
}
int doc = 0;
for (Instance instance: instances) {
double[] topicDistribution =
getSampledDistribution(instance, numIterations,
thinning, burnIn);
out.print (doc); out.print (' ');
// Print the Source field of the instance
if (instance.getSource() != null) {
out.print (instance.getSource()); // depends on control dependency: [if], data = [(instance.getSource()]
}
else {
out.print ("null-source"); // depends on control dependency: [if], data = [none]
}
out.print (' ');
for (int topic = 0; topic < numTopics; topic++) {
sortedTopics[topic].set(topic, topicDistribution[topic]); // depends on control dependency: [for], data = [topic]
}
Arrays.sort(sortedTopics);
for (int i = 0; i < max; i++) {
if (sortedTopics[i].getWeight() < threshold) { break; }
out.print (sortedTopics[i].getID() + " " +
sortedTopics[i].getWeight() + " "); // depends on control dependency: [for], data = [i]
}
out.print (" \n");
doc++;
}
out.close();
} } |
public class class_name {
public float getQ() {
String q = this.getParameter(Q_PARAM_NAME);
if(q != null) { // This is how the TCK expects to parse it. See AddressingServlet in TCK spec tests.
return Float.parseFloat(q);
} else { // I think this is not needed.
return ((SipURI)address.getURI()).getParameter(Q_PARAM_NAME) == null ? (float) -1.0 :
Float.parseFloat(((SipURI)address.getURI()).getParameter(Q_PARAM_NAME));
}
} } | public class class_name {
public float getQ() {
String q = this.getParameter(Q_PARAM_NAME);
if(q != null) { // This is how the TCK expects to parse it. See AddressingServlet in TCK spec tests.
return Float.parseFloat(q);
// depends on control dependency: [if], data = [(q]
} else { // I think this is not needed.
return ((SipURI)address.getURI()).getParameter(Q_PARAM_NAME) == null ? (float) -1.0 :
Float.parseFloat(((SipURI)address.getURI()).getParameter(Q_PARAM_NAME));
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public Map<String, Object> getHeaders() {
Map<String, Object> map = new HashMap<>();
map.put("Content-length", "0");
map.put(CONTENT_TYPE, contentType);
map.put("Accept", "application/json");
for (Map.Entry<String, Object> entry : extraHeaders.entrySet()) {
map.put(entry.getKey(), entry.getValue());
}
return map;
} } | public class class_name {
public Map<String, Object> getHeaders() {
Map<String, Object> map = new HashMap<>();
map.put("Content-length", "0");
map.put(CONTENT_TYPE, contentType);
map.put("Accept", "application/json");
for (Map.Entry<String, Object> entry : extraHeaders.entrySet()) {
map.put(entry.getKey(), entry.getValue()); // depends on control dependency: [for], data = [entry]
}
return map;
} } |
public class class_name {
private void complete(Symbol sym) throws CompletionFailure {
if (sym.kind == TYP) {
ClassSymbol c = (ClassSymbol)sym;
c.members_field = new Scope.ErrorScope(c); // make sure it's always defined
annotate.enterStart();
try {
completeOwners(c.owner);
completeEnclosing(c);
} finally {
// The flush needs to happen only after annotations
// are filled in.
annotate.enterDoneWithoutFlush();
}
fillIn(c);
} else if (sym.kind == PCK) {
PackageSymbol p = (PackageSymbol)sym;
try {
fillIn(p);
} catch (IOException ex) {
throw new CompletionFailure(sym, ex.getLocalizedMessage()).initCause(ex);
}
}
if (!filling)
annotate.flush(); // finish attaching annotations
} } | public class class_name {
private void complete(Symbol sym) throws CompletionFailure {
if (sym.kind == TYP) {
ClassSymbol c = (ClassSymbol)sym;
c.members_field = new Scope.ErrorScope(c); // make sure it's always defined
annotate.enterStart();
try {
completeOwners(c.owner); // depends on control dependency: [try], data = [none]
completeEnclosing(c); // depends on control dependency: [try], data = [none]
} finally {
// The flush needs to happen only after annotations
// are filled in.
annotate.enterDoneWithoutFlush();
}
fillIn(c);
} else if (sym.kind == PCK) {
PackageSymbol p = (PackageSymbol)sym;
try {
fillIn(p); // depends on control dependency: [try], data = [none]
} catch (IOException ex) {
throw new CompletionFailure(sym, ex.getLocalizedMessage()).initCause(ex);
} // depends on control dependency: [catch], data = [none]
}
if (!filling)
annotate.flush(); // finish attaching annotations
} } |
public class class_name {
private static int getTrimmedXStart(BufferedImage img) {
int height = img.getHeight();
int width = img.getWidth();
int xStart = width;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
if (img.getRGB(j, i) != Color.WHITE.getRGB() && j < xStart) {
xStart = j;
break;
}
}
}
return xStart;
} } | public class class_name {
private static int getTrimmedXStart(BufferedImage img) {
int height = img.getHeight();
int width = img.getWidth();
int xStart = width;
for (int i = 0; i < height; i++) {
for (int j = 0; j < width; j++) {
if (img.getRGB(j, i) != Color.WHITE.getRGB() && j < xStart) {
xStart = j; // depends on control dependency: [if], data = [none]
break;
}
}
}
return xStart;
} } |
public class class_name {
public String resolveEndpointUri(Message message, String defaultUri) {
Map<String, Object> headers = message.getHeaders();
String requestUri;
if (headers.containsKey(ENDPOINT_URI_HEADER_NAME)) {
requestUri = headers.get(ENDPOINT_URI_HEADER_NAME).toString();
} else if (StringUtils.hasText(defaultUri)) {
requestUri = defaultUri;
} else {
requestUri = defaultEndpointUri;
}
if (requestUri == null) {
throw new CitrusRuntimeException("Unable to resolve dynamic endpoint uri! Neither header entry '" +
ENDPOINT_URI_HEADER_NAME + "' nor default endpoint uri is set");
}
requestUri = appendRequestPath(requestUri, headers);
requestUri = appendQueryParams(requestUri, headers);
return requestUri;
} } | public class class_name {
public String resolveEndpointUri(Message message, String defaultUri) {
Map<String, Object> headers = message.getHeaders();
String requestUri;
if (headers.containsKey(ENDPOINT_URI_HEADER_NAME)) {
requestUri = headers.get(ENDPOINT_URI_HEADER_NAME).toString(); // depends on control dependency: [if], data = [none]
} else if (StringUtils.hasText(defaultUri)) {
requestUri = defaultUri; // depends on control dependency: [if], data = [none]
} else {
requestUri = defaultEndpointUri; // depends on control dependency: [if], data = [none]
}
if (requestUri == null) {
throw new CitrusRuntimeException("Unable to resolve dynamic endpoint uri! Neither header entry '" +
ENDPOINT_URI_HEADER_NAME + "' nor default endpoint uri is set");
}
requestUri = appendRequestPath(requestUri, headers);
requestUri = appendQueryParams(requestUri, headers);
return requestUri;
} } |
public class class_name {
public int[] asArray() {
int arr[] = new int[size()];
OrdinalIterator iter = iterator();
int ordinal = iter.nextOrdinal();
int i = 0;
while(ordinal != NO_MORE_ORDINALS) {
arr[i++] = ordinal;
ordinal = iter.nextOrdinal();
}
return arr;
} } | public class class_name {
public int[] asArray() {
int arr[] = new int[size()];
OrdinalIterator iter = iterator();
int ordinal = iter.nextOrdinal();
int i = 0;
while(ordinal != NO_MORE_ORDINALS) {
arr[i++] = ordinal; // depends on control dependency: [while], data = [none]
ordinal = iter.nextOrdinal(); // depends on control dependency: [while], data = [none]
}
return arr;
} } |
public class class_name {
public RawData as(final String mimeOrExtension) {
if (mimeOrExtension.contains(StringPool.SLASH)) {
this.mimeType = mimeOrExtension;
}
else {
this.mimeType = MimeTypes.getMimeType(mimeOrExtension);
}
return this;
} } | public class class_name {
public RawData as(final String mimeOrExtension) {
if (mimeOrExtension.contains(StringPool.SLASH)) {
this.mimeType = mimeOrExtension; // depends on control dependency: [if], data = [none]
}
else {
this.mimeType = MimeTypes.getMimeType(mimeOrExtension); // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
public static <A, B> ImmutableMultiset<B> transformedCopy(Multiset<A> ms,
Function<A, B> func) {
final ImmutableMultiset.Builder<B> ret = ImmutableMultiset.builder();
for (final Multiset.Entry<A> entry : ms.entrySet()) {
final B transformedElement = func.apply(entry.getElement());
ret.addCopies(transformedElement, entry.getCount());
}
return ret.build();
} } | public class class_name {
public static <A, B> ImmutableMultiset<B> transformedCopy(Multiset<A> ms,
Function<A, B> func) {
final ImmutableMultiset.Builder<B> ret = ImmutableMultiset.builder();
for (final Multiset.Entry<A> entry : ms.entrySet()) {
final B transformedElement = func.apply(entry.getElement());
ret.addCopies(transformedElement, entry.getCount()); // depends on control dependency: [for], data = [entry]
}
return ret.build();
} } |
public class class_name {
public Object compactValue(String activeProperty, Map<String, Object> value) {
// 1)
int numberMembers = value.size();
// 2)
if (value.containsKey(JsonLdConsts.INDEX)
&& JsonLdConsts.INDEX.equals(this.getContainer(activeProperty))) {
numberMembers--;
}
// 3)
if (numberMembers > 2) {
return value;
}
// 4)
final String typeMapping = getTypeMapping(activeProperty);
final String languageMapping = getLanguageMapping(activeProperty);
if (value.containsKey(JsonLdConsts.ID)) {
// 4.1)
if (numberMembers == 1 && JsonLdConsts.ID.equals(typeMapping)) {
return compactIri((String) value.get(JsonLdConsts.ID));
}
// 4.2)
if (numberMembers == 1 && JsonLdConsts.VOCAB.equals(typeMapping)) {
return compactIri((String) value.get(JsonLdConsts.ID), true);
}
// 4.3)
return value;
}
final Object valueValue = value.get(JsonLdConsts.VALUE);
// 5)
if (value.containsKey(JsonLdConsts.TYPE)
&& Obj.equals(value.get(JsonLdConsts.TYPE), typeMapping)) {
return valueValue;
}
// 6)
if (value.containsKey(JsonLdConsts.LANGUAGE)) {
// TODO: SPEC: doesn't specify to check default language as well
if (Obj.equals(value.get(JsonLdConsts.LANGUAGE), languageMapping) || Obj
.equals(value.get(JsonLdConsts.LANGUAGE), this.get(JsonLdConsts.LANGUAGE))) {
return valueValue;
}
}
// 7)
if (numberMembers == 1 && (!(valueValue instanceof String)
|| !this.containsKey(JsonLdConsts.LANGUAGE)
|| (termDefinitions.containsKey(activeProperty)
&& getTermDefinition(activeProperty).containsKey(JsonLdConsts.LANGUAGE)
&& languageMapping == null))) {
return valueValue;
}
// 8)
return value;
} } | public class class_name {
public Object compactValue(String activeProperty, Map<String, Object> value) {
// 1)
int numberMembers = value.size();
// 2)
if (value.containsKey(JsonLdConsts.INDEX)
&& JsonLdConsts.INDEX.equals(this.getContainer(activeProperty))) {
numberMembers--; // depends on control dependency: [if], data = [none]
}
// 3)
if (numberMembers > 2) {
return value; // depends on control dependency: [if], data = [none]
}
// 4)
final String typeMapping = getTypeMapping(activeProperty);
final String languageMapping = getLanguageMapping(activeProperty);
if (value.containsKey(JsonLdConsts.ID)) {
// 4.1)
if (numberMembers == 1 && JsonLdConsts.ID.equals(typeMapping)) {
return compactIri((String) value.get(JsonLdConsts.ID)); // depends on control dependency: [if], data = [none]
}
// 4.2)
if (numberMembers == 1 && JsonLdConsts.VOCAB.equals(typeMapping)) {
return compactIri((String) value.get(JsonLdConsts.ID), true); // depends on control dependency: [if], data = [none]
}
// 4.3)
return value; // depends on control dependency: [if], data = [none]
}
final Object valueValue = value.get(JsonLdConsts.VALUE);
// 5)
if (value.containsKey(JsonLdConsts.TYPE)
&& Obj.equals(value.get(JsonLdConsts.TYPE), typeMapping)) {
return valueValue; // depends on control dependency: [if], data = [none]
}
// 6)
if (value.containsKey(JsonLdConsts.LANGUAGE)) {
// TODO: SPEC: doesn't specify to check default language as well
if (Obj.equals(value.get(JsonLdConsts.LANGUAGE), languageMapping) || Obj
.equals(value.get(JsonLdConsts.LANGUAGE), this.get(JsonLdConsts.LANGUAGE))) {
return valueValue; // depends on control dependency: [if], data = [none]
}
}
// 7)
if (numberMembers == 1 && (!(valueValue instanceof String)
|| !this.containsKey(JsonLdConsts.LANGUAGE)
|| (termDefinitions.containsKey(activeProperty)
&& getTermDefinition(activeProperty).containsKey(JsonLdConsts.LANGUAGE)
&& languageMapping == null))) {
return valueValue; // depends on control dependency: [if], data = [none]
}
// 8)
return value;
} } |
public class class_name {
public ListWebACLsResult withWebACLs(WebACLSummary... webACLs) {
if (this.webACLs == null) {
setWebACLs(new java.util.ArrayList<WebACLSummary>(webACLs.length));
}
for (WebACLSummary ele : webACLs) {
this.webACLs.add(ele);
}
return this;
} } | public class class_name {
public ListWebACLsResult withWebACLs(WebACLSummary... webACLs) {
if (this.webACLs == null) {
setWebACLs(new java.util.ArrayList<WebACLSummary>(webACLs.length)); // depends on control dependency: [if], data = [none]
}
for (WebACLSummary ele : webACLs) {
this.webACLs.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
private static boolean[] createSafeOctets(String safeChars) {
int maxChar = -1;
char[] safeCharArray = safeChars.toCharArray();
for (char c : safeCharArray) {
maxChar = Math.max(c, maxChar);
}
boolean[] octets = new boolean[maxChar + 1];
for (char c : safeCharArray) {
octets[c] = true;
}
return octets;
} } | public class class_name {
private static boolean[] createSafeOctets(String safeChars) {
int maxChar = -1;
char[] safeCharArray = safeChars.toCharArray();
for (char c : safeCharArray) {
maxChar = Math.max(c, maxChar); // depends on control dependency: [for], data = [c]
}
boolean[] octets = new boolean[maxChar + 1];
for (char c : safeCharArray) {
octets[c] = true; // depends on control dependency: [for], data = [c]
}
return octets;
} } |
public class class_name {
public static void checkClassExists(String className, String msg) {
try {
getDefaultClassLoader().loadClass(className);
} catch (ClassNotFoundException e) {
throw new WatcherDependencyNotFoundException(msg);
}
} } | public class class_name {
public static void checkClassExists(String className, String msg) {
try {
getDefaultClassLoader().loadClass(className); // depends on control dependency: [try], data = [none]
} catch (ClassNotFoundException e) {
throw new WatcherDependencyNotFoundException(msg);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void checkForCyclicDependencies(AccessControlGroup group, List<AccessControlGroup> groupList) {
for (AccessControlGroup inheritedGroup : group.getInherits()) {
if (groupList.contains(inheritedGroup)) {
StringBuilder sb = new StringBuilder("A cyclic dependency of access control groups has been detected: \n");
for (int i = groupList.size() - 1; i >= 0; i--) {
AccessControlGroup node = groupList.get(i);
sb.append(node);
if (i > 0) {
sb.append("-->");
}
}
throw new IllegalStateException(sb.toString());
}
groupList.add(inheritedGroup);
checkForCyclicDependencies(inheritedGroup, groupList);
AccessControlGroup removed = groupList.remove(groupList.size() - 1);
assert (removed == inheritedGroup);
}
} } | public class class_name {
protected void checkForCyclicDependencies(AccessControlGroup group, List<AccessControlGroup> groupList) {
for (AccessControlGroup inheritedGroup : group.getInherits()) {
if (groupList.contains(inheritedGroup)) {
StringBuilder sb = new StringBuilder("A cyclic dependency of access control groups has been detected: \n");
for (int i = groupList.size() - 1; i >= 0; i--) {
AccessControlGroup node = groupList.get(i);
sb.append(node); // depends on control dependency: [for], data = [none]
if (i > 0) {
sb.append("-->"); // depends on control dependency: [if], data = [none]
}
}
throw new IllegalStateException(sb.toString());
}
groupList.add(inheritedGroup); // depends on control dependency: [for], data = [inheritedGroup]
checkForCyclicDependencies(inheritedGroup, groupList); // depends on control dependency: [for], data = [inheritedGroup]
AccessControlGroup removed = groupList.remove(groupList.size() - 1);
assert (removed == inheritedGroup); // depends on control dependency: [for], data = [inheritedGroup]
}
} } |
public class class_name {
public void marshall(StartChannelRequest startChannelRequest, ProtocolMarshaller protocolMarshaller) {
if (startChannelRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startChannelRequest.getChannelId(), CHANNELID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(StartChannelRequest startChannelRequest, ProtocolMarshaller protocolMarshaller) {
if (startChannelRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(startChannelRequest.getChannelId(), CHANNELID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeBrown intrinsic,
List<AssociatedPair> normalized) {
Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true);
List<AssociatedPair> pixels = new ArrayList<>();
for (AssociatedPair n : normalized) {
AssociatedPair p = new AssociatedPair();
n_to_p.compute(n.p1.x, n.p1.y, p.p1);
n_to_p.compute(n.p2.x, n.p2.y, p.p2);
pixels.add(p);
}
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pixels);
panel.setImages(left, right);
ShowImages.showWindow(panel, "Inlier Features", true);
} } | public class class_name {
public static void drawInliers(BufferedImage left, BufferedImage right, CameraPinholeBrown intrinsic,
List<AssociatedPair> normalized) {
Point2Transform2_F64 n_to_p = LensDistortionFactory.narrow(intrinsic).distort_F64(false,true);
List<AssociatedPair> pixels = new ArrayList<>();
for (AssociatedPair n : normalized) {
AssociatedPair p = new AssociatedPair();
n_to_p.compute(n.p1.x, n.p1.y, p.p1); // depends on control dependency: [for], data = [n]
n_to_p.compute(n.p2.x, n.p2.y, p.p2); // depends on control dependency: [for], data = [n]
pixels.add(p); // depends on control dependency: [for], data = [none]
}
// display the results
AssociationPanel panel = new AssociationPanel(20);
panel.setAssociation(pixels);
panel.setImages(left, right);
ShowImages.showWindow(panel, "Inlier Features", true);
} } |
public class class_name {
public boolean write(DataOutputStream daOut, boolean bFixedLength)
{
Object data = this.getData();
if (data == null)
data = Constants.BLANK;
ObjectOutputStream p = null;
try {
p = new ObjectOutputStream(daOut);
p.writeObject(data);
p.flush();
p.close(); // Unlink ObjectOutputStream
} catch (IOException ex) {
ex.printStackTrace(); // Never
return false;
}
return true; // Success
} } | public class class_name {
public boolean write(DataOutputStream daOut, boolean bFixedLength)
{
Object data = this.getData();
if (data == null)
data = Constants.BLANK;
ObjectOutputStream p = null;
try {
p = new ObjectOutputStream(daOut); // depends on control dependency: [try], data = [none]
p.writeObject(data); // depends on control dependency: [try], data = [none]
p.flush(); // depends on control dependency: [try], data = [none]
p.close(); // Unlink ObjectOutputStream // depends on control dependency: [try], data = [none]
} catch (IOException ex) {
ex.printStackTrace(); // Never
return false;
} // depends on control dependency: [catch], data = [none]
return true; // Success
} } |
public class class_name {
final Table SYSTEM_ALLTYPEINFO() {
Table t = sysTables[SYSTEM_ALLTYPEINFO];
if (t == null) {
t = createBlankTable(sysTableHsqlNames[SYSTEM_ALLTYPEINFO]);
//-------------------------------------------
// same as SYSTEM_TYPEINFO:
// ------------------------------------------
addColumn(t, "TYPE_NAME", SQL_IDENTIFIER);
addColumn(t, "DATA_TYPE", Type.SQL_SMALLINT);
addColumn(t, "PRECISION", Type.SQL_INTEGER);
addColumn(t, "LITERAL_PREFIX", CHARACTER_DATA);
addColumn(t, "LITERAL_SUFFIX", CHARACTER_DATA);
addColumn(t, "CREATE_PARAMS", CHARACTER_DATA);
addColumn(t, "NULLABLE", Type.SQL_SMALLINT);
addColumn(t, "CASE_SENSITIVE", Type.SQL_BOOLEAN);
addColumn(t, "SEARCHABLE", Type.SQL_SMALLINT);
addColumn(t, "UNSIGNED_ATTRIBUTE", Type.SQL_BOOLEAN);
addColumn(t, "FIXED_PREC_SCALE", Type.SQL_BOOLEAN);
addColumn(t, "AUTO_INCREMENT", Type.SQL_BOOLEAN);
addColumn(t, "LOCAL_TYPE_NAME", SQL_IDENTIFIER);
addColumn(t, "MINIMUM_SCALE", Type.SQL_SMALLINT);
addColumn(t, "MAXIMUM_SCALE", Type.SQL_SMALLINT);
addColumn(t, "SQL_DATA_TYPE", Type.SQL_INTEGER);
addColumn(t, "SQL_DATETIME_SUB", Type.SQL_INTEGER);
addColumn(t, "NUM_PREC_RADIX", Type.SQL_INTEGER);
//-------------------------------------------
// SQL CLI / ODBC - not in JDBC spec
// ------------------------------------------
addColumn(t, "INTERVAL_PRECISION", Type.SQL_INTEGER);
//-------------------------------------------
// extended:
//-------------------------------------------
// level of support
//-------------------------------------------
addColumn(t, "AS_TAB_COL", Type.SQL_BOOLEAN);
// for instance, some executable methods take Connection
// or return non-serializable Object such as ResultSet, neither
// of which maps to a supported table column type but which
// we show as JAVA_OBJECT in SYSTEM_PROCEDURECOLUMNS.
// Also, triggers take Object[] row, which we show as ARRAY
// presently, although STRUCT would probably be better in the
// future, as the row can actually contain mixed data types.
addColumn(t, "AS_PROC_COL", Type.SQL_BOOLEAN);
//-------------------------------------------
// actual values for attributes that cannot be represented
// within the limitations of the SQL CLI / JDBC interface
//-------------------------------------------
addColumn(t, "MAX_PREC_ACT", Type.SQL_BIGINT);
addColumn(t, "MIN_SCALE_ACT", Type.SQL_INTEGER);
addColumn(t, "MAX_SCALE_ACT", Type.SQL_INTEGER);
//-------------------------------------------
// how do we store this internally as a column value?
//-------------------------------------------
addColumn(t, "COL_ST_CLS_NAME", SQL_IDENTIFIER);
addColumn(t, "COL_ST_IS_SUP", Type.SQL_BOOLEAN);
//-------------------------------------------
// what is the standard Java mapping for the type?
//-------------------------------------------
addColumn(t, "STD_MAP_CLS_NAME", SQL_IDENTIFIER);
addColumn(t, "STD_MAP_IS_SUP", Type.SQL_BOOLEAN);
//-------------------------------------------
// what, if any, custom mapping do we provide?
// (under the current build options and hosting VM)
//-------------------------------------------
addColumn(t, "CST_MAP_CLS_NAME", SQL_IDENTIFIER);
addColumn(t, "CST_MAP_IS_SUP", Type.SQL_BOOLEAN);
//-------------------------------------------
// what is the max representable and actual
// character octet length, if applicable?
//-------------------------------------------
addColumn(t, "MCOL_JDBC", Type.SQL_INTEGER);
addColumn(t, "MCOL_ACT", Type.SQL_BIGINT);
//-------------------------------------------
// what is the default or fixed scale, if applicable?
//-------------------------------------------
addColumn(t, "DEF_OR_FIXED_SCALE", Type.SQL_INTEGER);
//-------------------------------------------
// Any type-specific, localized remarks can go here
//-------------------------------------------
addColumn(t, "REMARKS", CHARACTER_DATA);
//-------------------------------------------
// required for JDBC sort contract:
//-------------------------------------------
addColumn(t, "TYPE_SUB", Type.SQL_INTEGER);
// order: DATA_TYPE, TYPE_SUB
// true primary key
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[SYSTEM_ALLTYPEINFO].name, false,
SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
1, 34
}, true);
return t;
}
PersistentStore store = database.persistentStoreCollection.getStore(t);
Object[] row;
int type;
DITypeInfo ti;
//-----------------------------------------
// Same as SYSTEM_TYPEINFO
//-----------------------------------------
final int itype_name = 0;
final int idata_type = 1;
final int iprecision = 2;
final int iliteral_prefix = 3;
final int iliteral_suffix = 4;
final int icreate_params = 5;
final int inullable = 6;
final int icase_sensitive = 7;
final int isearchable = 8;
final int iunsigned_attribute = 9;
final int ifixed_prec_scale = 10;
final int iauto_increment = 11;
final int ilocal_type_name = 12;
final int iminimum_scale = 13;
final int imaximum_scale = 14;
final int isql_data_type = 15;
final int isql_datetime_sub = 16;
final int inum_prec_radix = 17;
//------------------------------------------
// Extensions
//------------------------------------------
// not in JDBC, but in SQL CLI SQLDA / ODBC
//------------------------------------------
final int iinterval_precision = 18;
//------------------------------------------
// HSQLDB/Java-specific:
//------------------------------------------
final int iis_sup_as_tcol = 19;
final int iis_sup_as_pcol = 20;
//------------------------------------------
final int imax_prec_or_len_act = 21;
final int imin_scale_actual = 22;
final int imax_scale_actual = 23;
//------------------------------------------
final int ics_cls_name = 24;
final int ics_cls_is_supported = 25;
//------------------------------------------
final int ism_cls_name = 26;
final int ism_cls_is_supported = 27;
//------------------------------------------
final int icm_cls_name = 28;
final int icm_cls_is_supported = 29;
//------------------------------------------
final int imax_char_oct_len_jdbc = 30;
final int imax_char_oct_len_act = 31;
//------------------------------------------
final int idef_or_fixed_scale = 32;
//------------------------------------------
final int iremarks = 33;
//------------------------------------------
final int itype_sub = 34;
ti = new DITypeInfo();
for (int i = 0; i < Types.ALL_TYPES.length; i++) {
ti.setTypeCode(Types.ALL_TYPES[i][0]);
ti.setTypeSub(Types.ALL_TYPES[i][1]);
row = t.getEmptyRowData();
row[itype_name] = ti.getTypeName();
row[idata_type] = ti.getDataType();
row[iprecision] = ti.getPrecision();
row[iliteral_prefix] = ti.getLiteralPrefix();
row[iliteral_suffix] = ti.getLiteralSuffix();
row[icreate_params] = ti.getCreateParams();
row[inullable] = ti.getNullability();
row[icase_sensitive] = ti.isCaseSensitive();
row[isearchable] = ti.getSearchability();
row[iunsigned_attribute] = ti.isUnsignedAttribute();
row[ifixed_prec_scale] = ti.isFixedPrecisionScale();
row[iauto_increment] = ti.isAutoIncrement();
row[ilocal_type_name] = ti.getLocalName();
row[iminimum_scale] = ti.getMinScale();
row[imaximum_scale] = ti.getMaxScale();
row[isql_data_type] = ti.getSqlDataType();
row[isql_datetime_sub] = ti.getSqlDateTimeSub();
row[inum_prec_radix] = ti.getNumPrecRadix();
//------------------------------------------
row[iinterval_precision] = ti.getIntervalPrecision();
//------------------------------------------
row[iis_sup_as_tcol] = ti.isSupportedAsTCol();
row[iis_sup_as_pcol] = ti.isSupportedAsPCol();
//------------------------------------------
row[imax_prec_or_len_act] = ti.getPrecisionAct();
row[imin_scale_actual] = ti.getMinScaleAct();
row[imax_scale_actual] = ti.getMaxScaleAct();
//------------------------------------------
row[ics_cls_name] = ti.getColStClsName();
row[ics_cls_is_supported] = ti.isColStClsSupported();
//------------------------------------------
row[ism_cls_name] = ti.getStdMapClsName();
row[ism_cls_is_supported] = ti.isStdMapClsSupported();
//------------------------------------------
row[icm_cls_name] = ti.getCstMapClsName();
try {
if (row[icm_cls_name] != null) {
ns.classForName((String) row[icm_cls_name]);
row[icm_cls_is_supported] = Boolean.TRUE;
}
} catch (Exception e) {
row[icm_cls_is_supported] = Boolean.FALSE;
}
//------------------------------------------
row[imax_char_oct_len_jdbc] = ti.getCharOctLen();
row[imax_char_oct_len_act] = ti.getCharOctLenAct();
//------------------------------------------
row[idef_or_fixed_scale] = ti.getDefaultScale();
//------------------------------------------
row[iremarks] = ti.getRemarks();
//------------------------------------------
row[itype_sub] = ti.getDataTypeSub();
t.insertSys(store, row);
}
return t;
} } | public class class_name {
final Table SYSTEM_ALLTYPEINFO() {
Table t = sysTables[SYSTEM_ALLTYPEINFO];
if (t == null) {
t = createBlankTable(sysTableHsqlNames[SYSTEM_ALLTYPEINFO]); // depends on control dependency: [if], data = [none]
//-------------------------------------------
// same as SYSTEM_TYPEINFO:
// ------------------------------------------
addColumn(t, "TYPE_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "DATA_TYPE", Type.SQL_SMALLINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "PRECISION", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
addColumn(t, "LITERAL_PREFIX", CHARACTER_DATA); // depends on control dependency: [if], data = [(t]
addColumn(t, "LITERAL_SUFFIX", CHARACTER_DATA); // depends on control dependency: [if], data = [(t]
addColumn(t, "CREATE_PARAMS", CHARACTER_DATA); // depends on control dependency: [if], data = [(t]
addColumn(t, "NULLABLE", Type.SQL_SMALLINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "CASE_SENSITIVE", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
addColumn(t, "SEARCHABLE", Type.SQL_SMALLINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "UNSIGNED_ATTRIBUTE", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
addColumn(t, "FIXED_PREC_SCALE", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
addColumn(t, "AUTO_INCREMENT", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
addColumn(t, "LOCAL_TYPE_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "MINIMUM_SCALE", Type.SQL_SMALLINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "MAXIMUM_SCALE", Type.SQL_SMALLINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "SQL_DATA_TYPE", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
addColumn(t, "SQL_DATETIME_SUB", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
addColumn(t, "NUM_PREC_RADIX", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// SQL CLI / ODBC - not in JDBC spec
// ------------------------------------------
addColumn(t, "INTERVAL_PRECISION", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// extended:
//-------------------------------------------
// level of support
//-------------------------------------------
addColumn(t, "AS_TAB_COL", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
// for instance, some executable methods take Connection
// or return non-serializable Object such as ResultSet, neither
// of which maps to a supported table column type but which
// we show as JAVA_OBJECT in SYSTEM_PROCEDURECOLUMNS.
// Also, triggers take Object[] row, which we show as ARRAY
// presently, although STRUCT would probably be better in the
// future, as the row can actually contain mixed data types.
addColumn(t, "AS_PROC_COL", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// actual values for attributes that cannot be represented
// within the limitations of the SQL CLI / JDBC interface
//-------------------------------------------
addColumn(t, "MAX_PREC_ACT", Type.SQL_BIGINT); // depends on control dependency: [if], data = [(t]
addColumn(t, "MIN_SCALE_ACT", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
addColumn(t, "MAX_SCALE_ACT", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// how do we store this internally as a column value?
//-------------------------------------------
addColumn(t, "COL_ST_CLS_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "COL_ST_IS_SUP", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// what is the standard Java mapping for the type?
//-------------------------------------------
addColumn(t, "STD_MAP_CLS_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "STD_MAP_IS_SUP", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// what, if any, custom mapping do we provide?
// (under the current build options and hosting VM)
//-------------------------------------------
addColumn(t, "CST_MAP_CLS_NAME", SQL_IDENTIFIER); // depends on control dependency: [if], data = [(t]
addColumn(t, "CST_MAP_IS_SUP", Type.SQL_BOOLEAN); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// what is the max representable and actual
// character octet length, if applicable?
//-------------------------------------------
addColumn(t, "MCOL_JDBC", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
addColumn(t, "MCOL_ACT", Type.SQL_BIGINT); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// what is the default or fixed scale, if applicable?
//-------------------------------------------
addColumn(t, "DEF_OR_FIXED_SCALE", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// Any type-specific, localized remarks can go here
//-------------------------------------------
addColumn(t, "REMARKS", CHARACTER_DATA); // depends on control dependency: [if], data = [(t]
//-------------------------------------------
// required for JDBC sort contract:
//-------------------------------------------
addColumn(t, "TYPE_SUB", Type.SQL_INTEGER); // depends on control dependency: [if], data = [(t]
// order: DATA_TYPE, TYPE_SUB
// true primary key
HsqlName name = HsqlNameManager.newInfoSchemaObjectName(
sysTableHsqlNames[SYSTEM_ALLTYPEINFO].name, false,
SchemaObject.INDEX);
t.createPrimaryKey(name, new int[] {
1, 34
}, true); // depends on control dependency: [if], data = [none]
return t; // depends on control dependency: [if], data = [none]
}
PersistentStore store = database.persistentStoreCollection.getStore(t);
Object[] row;
int type;
DITypeInfo ti;
//-----------------------------------------
// Same as SYSTEM_TYPEINFO
//-----------------------------------------
final int itype_name = 0;
final int idata_type = 1;
final int iprecision = 2;
final int iliteral_prefix = 3;
final int iliteral_suffix = 4;
final int icreate_params = 5;
final int inullable = 6;
final int icase_sensitive = 7;
final int isearchable = 8;
final int iunsigned_attribute = 9;
final int ifixed_prec_scale = 10;
final int iauto_increment = 11;
final int ilocal_type_name = 12;
final int iminimum_scale = 13;
final int imaximum_scale = 14;
final int isql_data_type = 15;
final int isql_datetime_sub = 16;
final int inum_prec_radix = 17;
//------------------------------------------
// Extensions
//------------------------------------------
// not in JDBC, but in SQL CLI SQLDA / ODBC
//------------------------------------------
final int iinterval_precision = 18;
//------------------------------------------
// HSQLDB/Java-specific:
//------------------------------------------
final int iis_sup_as_tcol = 19;
final int iis_sup_as_pcol = 20;
//------------------------------------------
final int imax_prec_or_len_act = 21;
final int imin_scale_actual = 22;
final int imax_scale_actual = 23;
//------------------------------------------
final int ics_cls_name = 24;
final int ics_cls_is_supported = 25;
//------------------------------------------
final int ism_cls_name = 26;
final int ism_cls_is_supported = 27;
//------------------------------------------
final int icm_cls_name = 28;
final int icm_cls_is_supported = 29;
//------------------------------------------
final int imax_char_oct_len_jdbc = 30;
final int imax_char_oct_len_act = 31;
//------------------------------------------
final int idef_or_fixed_scale = 32;
//------------------------------------------
final int iremarks = 33;
//------------------------------------------
final int itype_sub = 34;
ti = new DITypeInfo();
for (int i = 0; i < Types.ALL_TYPES.length; i++) {
ti.setTypeCode(Types.ALL_TYPES[i][0]); // depends on control dependency: [for], data = [i]
ti.setTypeSub(Types.ALL_TYPES[i][1]); // depends on control dependency: [for], data = [i]
row = t.getEmptyRowData(); // depends on control dependency: [for], data = [none]
row[itype_name] = ti.getTypeName(); // depends on control dependency: [for], data = [none]
row[idata_type] = ti.getDataType(); // depends on control dependency: [for], data = [none]
row[iprecision] = ti.getPrecision(); // depends on control dependency: [for], data = [none]
row[iliteral_prefix] = ti.getLiteralPrefix(); // depends on control dependency: [for], data = [none]
row[iliteral_suffix] = ti.getLiteralSuffix(); // depends on control dependency: [for], data = [none]
row[icreate_params] = ti.getCreateParams(); // depends on control dependency: [for], data = [none]
row[inullable] = ti.getNullability(); // depends on control dependency: [for], data = [none]
row[icase_sensitive] = ti.isCaseSensitive(); // depends on control dependency: [for], data = [none]
row[isearchable] = ti.getSearchability(); // depends on control dependency: [for], data = [none]
row[iunsigned_attribute] = ti.isUnsignedAttribute(); // depends on control dependency: [for], data = [none]
row[ifixed_prec_scale] = ti.isFixedPrecisionScale(); // depends on control dependency: [for], data = [none]
row[iauto_increment] = ti.isAutoIncrement(); // depends on control dependency: [for], data = [none]
row[ilocal_type_name] = ti.getLocalName(); // depends on control dependency: [for], data = [none]
row[iminimum_scale] = ti.getMinScale(); // depends on control dependency: [for], data = [none]
row[imaximum_scale] = ti.getMaxScale(); // depends on control dependency: [for], data = [none]
row[isql_data_type] = ti.getSqlDataType(); // depends on control dependency: [for], data = [none]
row[isql_datetime_sub] = ti.getSqlDateTimeSub(); // depends on control dependency: [for], data = [none]
row[inum_prec_radix] = ti.getNumPrecRadix(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[iinterval_precision] = ti.getIntervalPrecision(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[iis_sup_as_tcol] = ti.isSupportedAsTCol(); // depends on control dependency: [for], data = [none]
row[iis_sup_as_pcol] = ti.isSupportedAsPCol(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[imax_prec_or_len_act] = ti.getPrecisionAct(); // depends on control dependency: [for], data = [none]
row[imin_scale_actual] = ti.getMinScaleAct(); // depends on control dependency: [for], data = [none]
row[imax_scale_actual] = ti.getMaxScaleAct(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[ics_cls_name] = ti.getColStClsName(); // depends on control dependency: [for], data = [none]
row[ics_cls_is_supported] = ti.isColStClsSupported(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[ism_cls_name] = ti.getStdMapClsName(); // depends on control dependency: [for], data = [none]
row[ism_cls_is_supported] = ti.isStdMapClsSupported(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[icm_cls_name] = ti.getCstMapClsName(); // depends on control dependency: [for], data = [none]
try {
if (row[icm_cls_name] != null) {
ns.classForName((String) row[icm_cls_name]); // depends on control dependency: [if], data = [none]
row[icm_cls_is_supported] = Boolean.TRUE; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
row[icm_cls_is_supported] = Boolean.FALSE;
} // depends on control dependency: [catch], data = [none]
//------------------------------------------
row[imax_char_oct_len_jdbc] = ti.getCharOctLen(); // depends on control dependency: [for], data = [none]
row[imax_char_oct_len_act] = ti.getCharOctLenAct(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[idef_or_fixed_scale] = ti.getDefaultScale(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[iremarks] = ti.getRemarks(); // depends on control dependency: [for], data = [none]
//------------------------------------------
row[itype_sub] = ti.getDataTypeSub(); // depends on control dependency: [for], data = [none]
t.insertSys(store, row); // depends on control dependency: [for], data = [none]
}
return t;
} } |
public class class_name {
public void flushQueue() {
/*
Basically we just want to form GridOp and pass it to native executioner
But since we don't have GridOp interface yet, we'll send everything to underlying CudaExecutioner.
*/
// logger.info("Non-Blocking flush");
// TODO: proper implementation for GridOp creation required here
/*
Deque<OpDescriptor> currentQueue = deviceQueues.get();
if (currentQueue == null)
return;
OpDescriptor op = currentQueue.pollFirst();
while (op != null) {
pushToGrid(op, false);
op = currentQueue.pollFirst();
}
*/
// we need to check,
OpDescriptor op = lastOp.get();
if (op != null) {
if (!experimental.get()) {
//if (!nativeOps.isExperimentalEnabled()) {
// it might be only pairwise transform here for now
// logger.info("Flushing existing lastOp");
lastOp.remove();
dequeueOp(op);
pushToGrid(op, false);
} else {
throw new UnsupportedOperationException("Experimental flush isn't supported yet");
}
} else {
// logger.info("Queue is empty");
}
} } | public class class_name {
public void flushQueue() {
/*
Basically we just want to form GridOp and pass it to native executioner
But since we don't have GridOp interface yet, we'll send everything to underlying CudaExecutioner.
*/
// logger.info("Non-Blocking flush");
// TODO: proper implementation for GridOp creation required here
/*
Deque<OpDescriptor> currentQueue = deviceQueues.get();
if (currentQueue == null)
return;
OpDescriptor op = currentQueue.pollFirst();
while (op != null) {
pushToGrid(op, false);
op = currentQueue.pollFirst();
}
*/
// we need to check,
OpDescriptor op = lastOp.get();
if (op != null) {
if (!experimental.get()) {
//if (!nativeOps.isExperimentalEnabled()) {
// it might be only pairwise transform here for now
// logger.info("Flushing existing lastOp");
lastOp.remove(); // depends on control dependency: [if], data = [none]
dequeueOp(op); // depends on control dependency: [if], data = [none]
pushToGrid(op, false); // depends on control dependency: [if], data = [none]
} else {
throw new UnsupportedOperationException("Experimental flush isn't supported yet");
}
} else {
// logger.info("Queue is empty");
}
} } |
public class class_name {
public void savePNGKMNumRisk(String fileName) {
if (fileName.startsWith("null") || fileName.startsWith("Null") || fileName.startsWith("NULL")) {
return;
}
this.fileName = fileName;
NumbersAtRiskPanel numbersAtRiskPanel = new NumbersAtRiskPanel();
numbersAtRiskPanel.setKaplanMeierFigure(this);
numbersAtRiskPanel.setSize(this.getWidth(), numbersAtRiskPanel.getHeight());
BufferedImage imageKM = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2D = imageKM.createGraphics();
this.paint(graphics2D);
BufferedImage imageNumRisk = new BufferedImage(numbersAtRiskPanel.getWidth(), numbersAtRiskPanel.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2DNumRisk = imageNumRisk.createGraphics();
numbersAtRiskPanel.paint(graphics2DNumRisk);
BufferedImage image = new BufferedImage(numbersAtRiskPanel.getWidth(), numbersAtRiskPanel.getHeight() + this.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D g = image.createGraphics();
g.drawImage(imageKM, 0, 0, null);
g.drawImage(imageNumRisk, 0, this.getHeight(), null);
try {
ImageIO.write(image, "png", new File(fileName));
} catch (Exception ex) {
ex.printStackTrace();
}
} } | public class class_name {
public void savePNGKMNumRisk(String fileName) {
if (fileName.startsWith("null") || fileName.startsWith("Null") || fileName.startsWith("NULL")) {
return; // depends on control dependency: [if], data = [none]
}
this.fileName = fileName;
NumbersAtRiskPanel numbersAtRiskPanel = new NumbersAtRiskPanel();
numbersAtRiskPanel.setKaplanMeierFigure(this);
numbersAtRiskPanel.setSize(this.getWidth(), numbersAtRiskPanel.getHeight());
BufferedImage imageKM = new BufferedImage(this.getWidth(), this.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2D = imageKM.createGraphics();
this.paint(graphics2D);
BufferedImage imageNumRisk = new BufferedImage(numbersAtRiskPanel.getWidth(), numbersAtRiskPanel.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D graphics2DNumRisk = imageNumRisk.createGraphics();
numbersAtRiskPanel.paint(graphics2DNumRisk);
BufferedImage image = new BufferedImage(numbersAtRiskPanel.getWidth(), numbersAtRiskPanel.getHeight() + this.getHeight(), BufferedImage.TYPE_INT_RGB);
Graphics2D g = image.createGraphics();
g.drawImage(imageKM, 0, 0, null);
g.drawImage(imageNumRisk, 0, this.getHeight(), null);
try {
ImageIO.write(image, "png", new File(fileName)); // depends on control dependency: [try], data = [none]
} catch (Exception ex) {
ex.printStackTrace();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
public void handleRequest(final HttpServerExchange exchange) throws Exception {
// parse the body to map or list if content type is application/json
String contentType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE);
if (contentType != null) {
if (exchange.isInIoThread()) {
exchange.dispatch(this);
return;
}
exchange.startBlocking();
try {
if (contentType.startsWith("application/json")) {
InputStream inputStream = exchange.getInputStream();
String unparsedRequestBody = StringUtils.inputStreamToString(inputStream, StandardCharsets.UTF_8);
// attach the unparsed request body into exchange if the cacheRequestBody is enabled in body.yml
if (config.isCacheRequestBody()) {
exchange.putAttachment(REQUEST_BODY_STRING, unparsedRequestBody);
}
// attach the parsed request body into exchange if the body parser is enabled
attachJsonBody(exchange, unparsedRequestBody);
} else if (contentType.startsWith("multipart/form-data") || contentType.startsWith("application/x-www-form-urlencoded")) {
// attach the parsed request body into exchange if the body parser is enabled
attachFormDataBody(exchange);
}
} catch (IOException e) {
logger.error("IOException: ", e);
setExchangeStatus(exchange, CONTENT_TYPE_MISMATCH, contentType);
return;
}
}
Handler.next(exchange, next);
} } | public class class_name {
@Override
public void handleRequest(final HttpServerExchange exchange) throws Exception {
// parse the body to map or list if content type is application/json
String contentType = exchange.getRequestHeaders().getFirst(Headers.CONTENT_TYPE);
if (contentType != null) {
if (exchange.isInIoThread()) {
exchange.dispatch(this);
return;
}
exchange.startBlocking();
try {
if (contentType.startsWith("application/json")) {
InputStream inputStream = exchange.getInputStream();
String unparsedRequestBody = StringUtils.inputStreamToString(inputStream, StandardCharsets.UTF_8);
// attach the unparsed request body into exchange if the cacheRequestBody is enabled in body.yml
if (config.isCacheRequestBody()) {
exchange.putAttachment(REQUEST_BODY_STRING, unparsedRequestBody); // depends on control dependency: [if], data = [none]
}
// attach the parsed request body into exchange if the body parser is enabled
attachJsonBody(exchange, unparsedRequestBody); // depends on control dependency: [if], data = [none]
} else if (contentType.startsWith("multipart/form-data") || contentType.startsWith("application/x-www-form-urlencoded")) {
// attach the parsed request body into exchange if the body parser is enabled
attachFormDataBody(exchange); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
logger.error("IOException: ", e);
setExchangeStatus(exchange, CONTENT_TYPE_MISMATCH, contentType);
return;
}
}
Handler.next(exchange, next);
} } |
public class class_name {
public BaseMessageFilter linkRemoteSession(Object remoteSession)
{
if (remoteSession instanceof RemoteSession)
if (remoteSession instanceof RecordOwner) // Always
if (m_source == null)
{
String strTableName = (String)this.getProperties().get(TABLE_NAME);
Record record = (Record)((RecordOwner)remoteSession).getRecord(strTableName);
if (record != null)
{
record.addListener(new SyncRecordMessageFilterHandler(this, true));
m_source = record;
}
}
return super.linkRemoteSession(remoteSession);
} } | public class class_name {
public BaseMessageFilter linkRemoteSession(Object remoteSession)
{
if (remoteSession instanceof RemoteSession)
if (remoteSession instanceof RecordOwner) // Always
if (m_source == null)
{
String strTableName = (String)this.getProperties().get(TABLE_NAME);
Record record = (Record)((RecordOwner)remoteSession).getRecord(strTableName);
if (record != null)
{
record.addListener(new SyncRecordMessageFilterHandler(this, true)); // depends on control dependency: [if], data = [none]
m_source = record; // depends on control dependency: [if], data = [none]
}
}
return super.linkRemoteSession(remoteSession);
} } |
public class class_name {
public static List<Element> toElementList(NodeList nodeList) {
List<Element> elements = new ArrayList<Element>();
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (node instanceof Element) {
elements.add((Element) node);
}
}
return elements;
} } | public class class_name {
public static List<Element> toElementList(NodeList nodeList) {
List<Element> elements = new ArrayList<Element>();
for (int i = 0; i < nodeList.getLength(); i++) {
Node node = nodeList.item(i);
if (node instanceof Element) {
elements.add((Element) node); // depends on control dependency: [if], data = [none]
}
}
return elements;
} } |
public class class_name {
public void setProperties( Map<String,String> properties ) {
Map<String,String> userProperties = systemSession.getUserProperties();
if ( userProperties == null ) {
userProperties = new HashMap<String,String>();
}
userProperties.putAll( properties );
log.debug( "Session userProperties: {}", userProperties );
systemSession = (MavenRepositorySystemSession)systemSession.setUserProperties( userProperties );
} } | public class class_name {
public void setProperties( Map<String,String> properties ) {
Map<String,String> userProperties = systemSession.getUserProperties();
if ( userProperties == null ) {
userProperties = new HashMap<String,String>(); // depends on control dependency: [if], data = [none]
}
userProperties.putAll( properties );
log.debug( "Session userProperties: {}", userProperties );
systemSession = (MavenRepositorySystemSession)systemSession.setUserProperties( userProperties );
} } |
public class class_name {
public DatastreamVersion addVersion(Date createdDate) {
int n = versions.size();
while (hasVersion(id + "." + n)) {
n++;
}
DatastreamVersion dsv = new DatastreamVersion(id + "." + n,
createdDate);
versions.add(dsv);
return dsv;
} } | public class class_name {
public DatastreamVersion addVersion(Date createdDate) {
int n = versions.size();
while (hasVersion(id + "." + n)) {
n++; // depends on control dependency: [while], data = [none]
}
DatastreamVersion dsv = new DatastreamVersion(id + "." + n,
createdDate);
versions.add(dsv);
return dsv;
} } |
public class class_name {
public LibraryComponent getComponentByID(LibraryID id) {
// get from repository
LibraryComponent component = componentRepository.getComponentByID(id);
if (component == null) {
// not found in repository, get it from deployable unit
component = deployableUnit.getLibraryComponents().get(id);
}
return component;
} } | public class class_name {
public LibraryComponent getComponentByID(LibraryID id) {
// get from repository
LibraryComponent component = componentRepository.getComponentByID(id);
if (component == null) {
// not found in repository, get it from deployable unit
component = deployableUnit.getLibraryComponents().get(id); // depends on control dependency: [if], data = [none]
}
return component;
} } |
public class class_name {
public List<CmsGroup> getGroupsOfUser(
CmsDbContext dbc,
String username,
String ouFqn,
boolean includeChildOus,
boolean readRoles,
boolean directGroupsOnly,
String remoteAddress)
throws CmsException {
CmsUser user = readUser(dbc, username);
String prefix = ouFqn + "_" + includeChildOus + "_" + directGroupsOnly + "_" + readRoles + "_" + remoteAddress;
String cacheKey = m_keyGenerator.getCacheKeyForUserGroups(prefix, dbc, user);
List<CmsGroup> groups = m_monitor.getCachedUserGroups(cacheKey);
if (groups == null) {
// get all groups of the user
List<CmsGroup> directGroups = getUserDriver(dbc).readGroupsOfUser(
dbc,
user.getId(),
readRoles ? "" : ouFqn,
readRoles ? true : includeChildOus,
remoteAddress,
readRoles);
Set<CmsGroup> allGroups = new HashSet<CmsGroup>();
if (!readRoles) {
allGroups.addAll(directGroups);
}
if (!directGroupsOnly) {
if (!readRoles) {
// now get all parents of the groups
for (int i = 0; i < directGroups.size(); i++) {
CmsGroup parent = getParent(dbc, directGroups.get(i).getName());
while ((parent != null) && (!allGroups.contains(parent))) {
if (parent.getOuFqn().startsWith(ouFqn)) {
allGroups.add(parent);
}
// read next parent group
parent = getParent(dbc, parent.getName());
}
}
}
}
if (readRoles) {
// for each for role
for (int i = 0; i < directGroups.size(); i++) {
CmsGroup group = directGroups.get(i);
CmsRole role = CmsRole.valueOf(group);
if (!includeChildOus && role.getOuFqn().equals(ouFqn)) {
allGroups.add(group);
}
if (includeChildOus && role.getOuFqn().startsWith(ouFqn)) {
allGroups.add(group);
}
if (directGroupsOnly || (!includeChildOus && !role.getOuFqn().equals(ouFqn))) {
// if roles of child OUs are not requested and the role does not belong to the requested OU don't include the role children
continue;
}
CmsOrganizationalUnit currentOu = readOrganizationalUnit(dbc, group.getOuFqn());
boolean readChildRoleGroups = true;
if (currentOu.hasFlagWebuser() && role.forOrgUnit(null).equals(CmsRole.ACCOUNT_MANAGER)) {
readChildRoleGroups = false;
}
if (readChildRoleGroups) {
// get the child roles
Iterator<CmsRole> itChildRoles = role.getChildren(true).iterator();
while (itChildRoles.hasNext()) {
CmsRole childRole = itChildRoles.next();
if (childRole.isSystemRole()) {
if (canReadRoleInOu(currentOu, childRole)) {
// include system roles only
try {
allGroups.add(readGroup(dbc, childRole.getGroupName()));
} catch (CmsDataAccessException e) {
// should not happen, log error if it does
LOG.error(e.getLocalizedMessage(), e);
}
}
}
}
} else {
LOG.info("Skipping child role group check for web user OU " + currentOu.getName());
}
if (includeChildOus) {
// if needed include the roles of child ous
Iterator<CmsOrganizationalUnit> itSubOus = getOrganizationalUnits(
dbc,
readOrganizationalUnit(dbc, group.getOuFqn()),
true).iterator();
while (itSubOus.hasNext()) {
CmsOrganizationalUnit subOu = itSubOus.next();
// add role in child ou
try {
if (canReadRoleInOu(subOu, role)) {
allGroups.add(readGroup(dbc, role.forOrgUnit(subOu.getName()).getGroupName()));
}
} catch (CmsDbEntryNotFoundException e) {
// ignore, this may happen while deleting an orgunit
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e);
}
}
// add child roles in child ous
Iterator<CmsRole> itChildRoles = role.getChildren(true).iterator();
while (itChildRoles.hasNext()) {
CmsRole childRole = itChildRoles.next();
try {
if (canReadRoleInOu(subOu, childRole)) {
allGroups.add(
readGroup(dbc, childRole.forOrgUnit(subOu.getName()).getGroupName()));
}
} catch (CmsDbEntryNotFoundException e) {
// ignore, this may happen while deleting an orgunit
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e);
}
}
}
}
}
}
}
// make group list unmodifiable for caching
groups = Collections.unmodifiableList(new ArrayList<CmsGroup>(allGroups));
if (dbc.getProjectId().isNullUUID()) {
m_monitor.cacheUserGroups(cacheKey, groups);
}
}
return groups;
} } | public class class_name {
public List<CmsGroup> getGroupsOfUser(
CmsDbContext dbc,
String username,
String ouFqn,
boolean includeChildOus,
boolean readRoles,
boolean directGroupsOnly,
String remoteAddress)
throws CmsException {
CmsUser user = readUser(dbc, username);
String prefix = ouFqn + "_" + includeChildOus + "_" + directGroupsOnly + "_" + readRoles + "_" + remoteAddress;
String cacheKey = m_keyGenerator.getCacheKeyForUserGroups(prefix, dbc, user);
List<CmsGroup> groups = m_monitor.getCachedUserGroups(cacheKey);
if (groups == null) {
// get all groups of the user
List<CmsGroup> directGroups = getUserDriver(dbc).readGroupsOfUser(
dbc,
user.getId(),
readRoles ? "" : ouFqn,
readRoles ? true : includeChildOus,
remoteAddress,
readRoles);
Set<CmsGroup> allGroups = new HashSet<CmsGroup>();
if (!readRoles) {
allGroups.addAll(directGroups);
}
if (!directGroupsOnly) {
if (!readRoles) {
// now get all parents of the groups
for (int i = 0; i < directGroups.size(); i++) {
CmsGroup parent = getParent(dbc, directGroups.get(i).getName());
while ((parent != null) && (!allGroups.contains(parent))) {
if (parent.getOuFqn().startsWith(ouFqn)) {
allGroups.add(parent); // depends on control dependency: [if], data = [none]
}
// read next parent group
parent = getParent(dbc, parent.getName()); // depends on control dependency: [while], data = [none]
}
}
}
}
if (readRoles) {
// for each for role
for (int i = 0; i < directGroups.size(); i++) {
CmsGroup group = directGroups.get(i);
CmsRole role = CmsRole.valueOf(group);
if (!includeChildOus && role.getOuFqn().equals(ouFqn)) {
allGroups.add(group); // depends on control dependency: [if], data = [none]
}
if (includeChildOus && role.getOuFqn().startsWith(ouFqn)) {
allGroups.add(group); // depends on control dependency: [if], data = [none]
}
if (directGroupsOnly || (!includeChildOus && !role.getOuFqn().equals(ouFqn))) {
// if roles of child OUs are not requested and the role does not belong to the requested OU don't include the role children
continue;
}
CmsOrganizationalUnit currentOu = readOrganizationalUnit(dbc, group.getOuFqn());
boolean readChildRoleGroups = true;
if (currentOu.hasFlagWebuser() && role.forOrgUnit(null).equals(CmsRole.ACCOUNT_MANAGER)) {
readChildRoleGroups = false; // depends on control dependency: [if], data = [none]
}
if (readChildRoleGroups) {
// get the child roles
Iterator<CmsRole> itChildRoles = role.getChildren(true).iterator();
while (itChildRoles.hasNext()) {
CmsRole childRole = itChildRoles.next();
if (childRole.isSystemRole()) {
if (canReadRoleInOu(currentOu, childRole)) {
// include system roles only
try {
allGroups.add(readGroup(dbc, childRole.getGroupName())); // depends on control dependency: [try], data = [none]
} catch (CmsDataAccessException e) {
// should not happen, log error if it does
LOG.error(e.getLocalizedMessage(), e);
} // depends on control dependency: [catch], data = [none]
}
}
}
} else {
LOG.info("Skipping child role group check for web user OU " + currentOu.getName()); // depends on control dependency: [if], data = [none]
}
if (includeChildOus) {
// if needed include the roles of child ous
Iterator<CmsOrganizationalUnit> itSubOus = getOrganizationalUnits(
dbc,
readOrganizationalUnit(dbc, group.getOuFqn()),
true).iterator();
while (itSubOus.hasNext()) {
CmsOrganizationalUnit subOu = itSubOus.next();
// add role in child ou
try {
if (canReadRoleInOu(subOu, role)) {
allGroups.add(readGroup(dbc, role.forOrgUnit(subOu.getName()).getGroupName())); // depends on control dependency: [if], data = [none]
}
} catch (CmsDbEntryNotFoundException e) {
// ignore, this may happen while deleting an orgunit
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
// add child roles in child ous
Iterator<CmsRole> itChildRoles = role.getChildren(true).iterator();
while (itChildRoles.hasNext()) {
CmsRole childRole = itChildRoles.next();
try {
if (canReadRoleInOu(subOu, childRole)) {
allGroups.add(
readGroup(dbc, childRole.forOrgUnit(subOu.getName()).getGroupName())); // depends on control dependency: [if], data = [none]
}
} catch (CmsDbEntryNotFoundException e) {
// ignore, this may happen while deleting an orgunit
if (LOG.isDebugEnabled()) {
LOG.debug(e.getLocalizedMessage(), e); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
}
}
}
}
}
// make group list unmodifiable for caching
groups = Collections.unmodifiableList(new ArrayList<CmsGroup>(allGroups));
if (dbc.getProjectId().isNullUUID()) {
m_monitor.cacheUserGroups(cacheKey, groups);
}
}
return groups;
} } |
public class class_name {
public void setCacheKeyParameters(java.util.Collection<String> cacheKeyParameters) {
if (cacheKeyParameters == null) {
this.cacheKeyParameters = null;
return;
}
this.cacheKeyParameters = new java.util.ArrayList<String>(cacheKeyParameters);
} } | public class class_name {
public void setCacheKeyParameters(java.util.Collection<String> cacheKeyParameters) {
if (cacheKeyParameters == null) {
this.cacheKeyParameters = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.cacheKeyParameters = new java.util.ArrayList<String>(cacheKeyParameters);
} } |
public class class_name {
private void addSummary(MemberSummaryWriter writer,
VisibleMemberMap visibleMemberMap, boolean showInheritedSummary,
Content memberSummaryTree) {
LinkedList<Content> summaryTreeList = new LinkedList<Content>();
buildSummary(writer, visibleMemberMap, summaryTreeList);
if (showInheritedSummary)
buildInheritedSummary(writer, visibleMemberMap, summaryTreeList);
if (!summaryTreeList.isEmpty()) {
Content memberTree = writer.getMemberSummaryHeader(
classDoc, memberSummaryTree);
for (int i = 0; i < summaryTreeList.size(); i++) {
memberTree.addContent(summaryTreeList.get(i));
}
memberSummaryTree.addContent(writer.getMemberTree(memberTree));
}
} } | public class class_name {
private void addSummary(MemberSummaryWriter writer,
VisibleMemberMap visibleMemberMap, boolean showInheritedSummary,
Content memberSummaryTree) {
LinkedList<Content> summaryTreeList = new LinkedList<Content>();
buildSummary(writer, visibleMemberMap, summaryTreeList);
if (showInheritedSummary)
buildInheritedSummary(writer, visibleMemberMap, summaryTreeList);
if (!summaryTreeList.isEmpty()) {
Content memberTree = writer.getMemberSummaryHeader(
classDoc, memberSummaryTree);
for (int i = 0; i < summaryTreeList.size(); i++) {
memberTree.addContent(summaryTreeList.get(i)); // depends on control dependency: [for], data = [i]
}
memberSummaryTree.addContent(writer.getMemberTree(memberTree)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private ProtoNetwork stage5(ProtoNetwork pn) {
beginStage(PHASE3_STAGE5_HDR, "5", NUM_PHASES);
final StringBuilder bldr = new StringBuilder();
if (!withGeneScaffoldingInjection() &&
!withNamedComplexInjection() &&
!withProteinFamilyInjection()) {
bldr.append(INJECTIONS_DISABLED);
markEndStage(bldr);
stageOutput(bldr.toString());
return pn;
}
// load equivalences
Set<EquivalenceDataIndex> equivs;
try {
equivs = p2.stage2LoadNamespaceEquivalences();
} catch (EquivalenceMapResolutionFailure e) {
stageError(e.getUserFacingMessage());
equivs = emptySet();
}
long t1 = currentTimeMillis();
int pct = stage5Parameter(pn, equivs, bldr);
stage5Term(pn, pct);
stage5Statement(pn, pct);
long t2 = currentTimeMillis();
final int paramct = pn.getParameterTable().getTableParameters().size();
final int termct = pn.getTermTable().getTermValues().size();
final int stmtct = pn.getStatementTable().getStatements().size();
bldr.setLength(0);
bldr.append(stmtct);
bldr.append(" statements, ");
bldr.append(termct);
bldr.append(" terms, ");
bldr.append(paramct);
bldr.append(" parameters");
stageOutput(bldr.toString());
bldr.setLength(0);
markTime(bldr, t1, t2);
markEndStage(bldr);
stageOutput(bldr.toString());
return pn;
} } | public class class_name {
private ProtoNetwork stage5(ProtoNetwork pn) {
beginStage(PHASE3_STAGE5_HDR, "5", NUM_PHASES);
final StringBuilder bldr = new StringBuilder();
if (!withGeneScaffoldingInjection() &&
!withNamedComplexInjection() &&
!withProteinFamilyInjection()) {
bldr.append(INJECTIONS_DISABLED); // depends on control dependency: [if], data = [none]
markEndStage(bldr); // depends on control dependency: [if], data = [none]
stageOutput(bldr.toString()); // depends on control dependency: [if], data = [none]
return pn; // depends on control dependency: [if], data = [none]
}
// load equivalences
Set<EquivalenceDataIndex> equivs;
try {
equivs = p2.stage2LoadNamespaceEquivalences(); // depends on control dependency: [try], data = [none]
} catch (EquivalenceMapResolutionFailure e) {
stageError(e.getUserFacingMessage());
equivs = emptySet();
} // depends on control dependency: [catch], data = [none]
long t1 = currentTimeMillis();
int pct = stage5Parameter(pn, equivs, bldr);
stage5Term(pn, pct);
stage5Statement(pn, pct);
long t2 = currentTimeMillis();
final int paramct = pn.getParameterTable().getTableParameters().size();
final int termct = pn.getTermTable().getTermValues().size();
final int stmtct = pn.getStatementTable().getStatements().size();
bldr.setLength(0);
bldr.append(stmtct);
bldr.append(" statements, ");
bldr.append(termct);
bldr.append(" terms, ");
bldr.append(paramct);
bldr.append(" parameters");
stageOutput(bldr.toString());
bldr.setLength(0);
markTime(bldr, t1, t2);
markEndStage(bldr);
stageOutput(bldr.toString());
return pn;
} } |
public class class_name {
public DoubleRange getMzRange() {
if (isolationWindowMzLo != null && isolationWindowMzHi != null) {
return new DoubleRange(isolationWindowMzLo, isolationWindowMzHi);
}
return null;
} } | public class class_name {
public DoubleRange getMzRange() {
if (isolationWindowMzLo != null && isolationWindowMzHi != null) {
return new DoubleRange(isolationWindowMzLo, isolationWindowMzHi); // depends on control dependency: [if], data = [(isolationWindowMzLo]
}
return null;
} } |
public class class_name {
public static int search(char[] charArray, char value, int occurrence) {
if(occurrence <= 0 || occurrence > charArray.length) {
throw new IllegalArgumentException("Occurrence must be greater or equal to 1 and less than "
+ "the array length: " + occurrence);
}
int valuesSeen = 0;
for(int i = 0; i < charArray.length; i++) {
if(charArray[i] == value) {
valuesSeen++;
if(valuesSeen == occurrence) {
return i;
}
}
}
return -1;
} } | public class class_name {
public static int search(char[] charArray, char value, int occurrence) {
if(occurrence <= 0 || occurrence > charArray.length) {
throw new IllegalArgumentException("Occurrence must be greater or equal to 1 and less than "
+ "the array length: " + occurrence);
}
int valuesSeen = 0;
for(int i = 0; i < charArray.length; i++) {
if(charArray[i] == value) {
valuesSeen++; // depends on control dependency: [if], data = [none]
if(valuesSeen == occurrence) {
return i; // depends on control dependency: [if], data = [none]
}
}
}
return -1;
} } |
public class class_name {
@Override
public void stop(){
if(isStarted){
synchronized (this) {
if (isStarted) {
if (getLookupService() instanceof Closable) {
((Closable) getLookupService()).stop();
}
isStarted = false;
}
}
}
} } | public class class_name {
@Override
public void stop(){
if(isStarted){
synchronized (this) { // depends on control dependency: [if], data = [none]
if (isStarted) {
if (getLookupService() instanceof Closable) {
((Closable) getLookupService()).stop(); // depends on control dependency: [if], data = [none]
}
isStarted = false; // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public void setSwitch(XExpression newSwitch)
{
if (newSwitch != switch_)
{
NotificationChain msgs = null;
if (switch_ != null)
msgs = ((InternalEObject)switch_).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XSWITCH_EXPRESSION__SWITCH, null, msgs);
if (newSwitch != null)
msgs = ((InternalEObject)newSwitch).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XSWITCH_EXPRESSION__SWITCH, null, msgs);
msgs = basicSetSwitch(newSwitch, msgs);
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, XbasePackage.XSWITCH_EXPRESSION__SWITCH, newSwitch, newSwitch));
} } | public class class_name {
public void setSwitch(XExpression newSwitch)
{
if (newSwitch != switch_)
{
NotificationChain msgs = null;
if (switch_ != null)
msgs = ((InternalEObject)switch_).eInverseRemove(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XSWITCH_EXPRESSION__SWITCH, null, msgs);
if (newSwitch != null)
msgs = ((InternalEObject)newSwitch).eInverseAdd(this, EOPPOSITE_FEATURE_BASE - XbasePackage.XSWITCH_EXPRESSION__SWITCH, null, msgs);
msgs = basicSetSwitch(newSwitch, msgs); // depends on control dependency: [if], data = [(newSwitch]
if (msgs != null) msgs.dispatch();
}
else if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, XbasePackage.XSWITCH_EXPRESSION__SWITCH, newSwitch, newSwitch));
} } |
public class class_name {
public Result<FindRandomPointResult> findRandomPointAroundCircle(long startRef, float[] centerPos, float maxRadius,
QueryFilter filter, FRand frand) {
// Validate input
if (!m_nav.isValidPolyRef(startRef) || Objects.isNull(centerPos) || !vIsFinite(centerPos) || maxRadius < 0
|| !Float.isFinite(maxRadius) || Objects.isNull(filter) || Objects.isNull(frand)) {
return Result.invalidParam();
}
Tupple2<MeshTile, Poly> tileAndPoly = m_nav.getTileAndPolyByRefUnsafe(startRef);
MeshTile startTile = tileAndPoly.first;
Poly startPoly = tileAndPoly.second;
if (!filter.passFilter(startRef, startTile, startPoly)) {
return Result.invalidParam("Invalid start ref");
}
m_nodePool.clear();
m_openList.clear();
Node startNode = m_nodePool.getNode(startRef);
vCopy(startNode.pos, centerPos);
startNode.pidx = 0;
startNode.cost = 0;
startNode.total = 0;
startNode.id = startRef;
startNode.flags = DT_NODE_OPEN;
m_openList.push(startNode);
float radiusSqr = maxRadius * maxRadius;
float areaSum = 0.0f;
MeshTile randomTile = null;
Poly randomPoly = null;
long randomPolyRef = 0;
while (!m_openList.isEmpty()) {
Node bestNode = m_openList.pop();
bestNode.flags &= ~DT_NODE_OPEN;
bestNode.flags |= DT_NODE_CLOSED;
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
long bestRef = bestNode.id;
Tupple2<MeshTile, Poly> bestTilePoly = m_nav.getTileAndPolyByRefUnsafe(bestRef);
MeshTile bestTile = bestTilePoly.first;
Poly bestPoly = bestTilePoly.second;
// Place random locations on on ground.
if (bestPoly.getType() == Poly.DT_POLYTYPE_GROUND) {
// Calc area of the polygon.
float polyArea = 0.0f;
for (int j = 2; j < bestPoly.vertCount; ++j) {
int va = bestPoly.verts[0] * 3;
int vb = bestPoly.verts[j - 1] * 3;
int vc = bestPoly.verts[j] * 3;
polyArea += triArea2D(bestTile.data.verts, va, vb, vc);
}
// Choose random polygon weighted by area, using reservoi sampling.
areaSum += polyArea;
float u = frand.frand();
if (u * areaSum <= polyArea) {
randomTile = bestTile;
randomPoly = bestPoly;
randomPolyRef = bestRef;
}
}
// Get parent poly and tile.
long parentRef = 0;
if (bestNode.pidx != 0) {
parentRef = m_nodePool.getNodeAtIdx(bestNode.pidx).id;
}
if (parentRef != 0) {
Tupple2<MeshTile, Poly> parentTilePoly = m_nav.getTileAndPolyByRefUnsafe(parentRef);
MeshTile parentTile = parentTilePoly.first;
Poly parentPoly = parentTilePoly.second;
}
for (int i = bestPoly.firstLink; i != NavMesh.DT_NULL_LINK; i = bestTile.links.get(i).next) {
Link link = bestTile.links.get(i);
long neighbourRef = link.ref;
// Skip invalid neighbours and do not follow back to parent.
if (neighbourRef == 0 || neighbourRef == parentRef) {
continue;
}
// Expand to neighbour
Tupple2<MeshTile, Poly> neighbourTilePoly = m_nav.getTileAndPolyByRefUnsafe(neighbourRef);
MeshTile neighbourTile = neighbourTilePoly.first;
Poly neighbourPoly = neighbourTilePoly.second;
// Do not advance if the polygon is excluded by the filter.
if (!filter.passFilter(neighbourRef, neighbourTile, neighbourPoly)) {
continue;
}
// Find edge and calc distance to the edge.
Result<PortalResult> portalpoints = getPortalPoints(bestRef, bestPoly, bestTile, neighbourRef,
neighbourPoly, neighbourTile, 0, 0);
if (portalpoints.failed()) {
continue;
}
float[] va = portalpoints.result.left;
float[] vb = portalpoints.result.right;
// If the circle is not touching the next polygon, skip it.
Tupple2<Float, Float> distseg = distancePtSegSqr2D(centerPos, va, vb);
float distSqr = distseg.first;
if (distSqr > radiusSqr) {
continue;
}
Node neighbourNode = m_nodePool.getNode(neighbourRef);
if ((neighbourNode.flags & Node.DT_NODE_CLOSED) != 0) {
continue;
}
// Cost
if (neighbourNode.flags == 0) {
neighbourNode.pos = vLerp(va, vb, 0.5f);
}
float total = bestNode.total + vDist(bestNode.pos, neighbourNode.pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode.flags & Node.DT_NODE_OPEN) != 0 && total >= neighbourNode.total) {
continue;
}
neighbourNode.id = neighbourRef;
neighbourNode.flags = (neighbourNode.flags & ~Node.DT_NODE_CLOSED);
neighbourNode.pidx = m_nodePool.getNodeIdx(bestNode);
neighbourNode.total = total;
if ((neighbourNode.flags & Node.DT_NODE_OPEN) != 0) {
m_openList.modify(neighbourNode);
} else {
neighbourNode.flags = Node.DT_NODE_OPEN;
m_openList.push(neighbourNode);
}
}
}
if (randomPoly == null) {
return Result.failure();
}
// Randomly pick point on polygon.
float[] verts = new float[3 * m_nav.getMaxVertsPerPoly()];
float[] areas = new float[m_nav.getMaxVertsPerPoly()];
System.arraycopy(randomTile.data.verts, randomPoly.verts[0] * 3, verts, 0, 3);
for (int j = 1; j < randomPoly.vertCount; ++j) {
System.arraycopy(randomTile.data.verts, randomPoly.verts[j] * 3, verts, j * 3, 3);
}
float s = frand.frand();
float t = frand.frand();
float[] pt = randomPointInConvexPoly(verts, randomPoly.vertCount, areas, s, t);
FindRandomPointResult result = new FindRandomPointResult(randomPolyRef, pt);
Result<Float> pheight = getPolyHeight(randomPolyRef, pt);
if (pheight.failed()) {
return Result.of(pheight.status, result);
}
pt[1] = pheight.result;
return Result.success(result);
} } | public class class_name {
public Result<FindRandomPointResult> findRandomPointAroundCircle(long startRef, float[] centerPos, float maxRadius,
QueryFilter filter, FRand frand) {
// Validate input
if (!m_nav.isValidPolyRef(startRef) || Objects.isNull(centerPos) || !vIsFinite(centerPos) || maxRadius < 0
|| !Float.isFinite(maxRadius) || Objects.isNull(filter) || Objects.isNull(frand)) {
return Result.invalidParam(); // depends on control dependency: [if], data = [none]
}
Tupple2<MeshTile, Poly> tileAndPoly = m_nav.getTileAndPolyByRefUnsafe(startRef);
MeshTile startTile = tileAndPoly.first;
Poly startPoly = tileAndPoly.second;
if (!filter.passFilter(startRef, startTile, startPoly)) {
return Result.invalidParam("Invalid start ref"); // depends on control dependency: [if], data = [none]
}
m_nodePool.clear();
m_openList.clear();
Node startNode = m_nodePool.getNode(startRef);
vCopy(startNode.pos, centerPos);
startNode.pidx = 0;
startNode.cost = 0;
startNode.total = 0;
startNode.id = startRef;
startNode.flags = DT_NODE_OPEN;
m_openList.push(startNode);
float radiusSqr = maxRadius * maxRadius;
float areaSum = 0.0f;
MeshTile randomTile = null;
Poly randomPoly = null;
long randomPolyRef = 0;
while (!m_openList.isEmpty()) {
Node bestNode = m_openList.pop();
bestNode.flags &= ~DT_NODE_OPEN; // depends on control dependency: [while], data = [none]
bestNode.flags |= DT_NODE_CLOSED; // depends on control dependency: [while], data = [none]
// Get poly and tile.
// The API input has been cheked already, skip checking internal data.
long bestRef = bestNode.id;
Tupple2<MeshTile, Poly> bestTilePoly = m_nav.getTileAndPolyByRefUnsafe(bestRef);
MeshTile bestTile = bestTilePoly.first;
Poly bestPoly = bestTilePoly.second;
// Place random locations on on ground.
if (bestPoly.getType() == Poly.DT_POLYTYPE_GROUND) {
// Calc area of the polygon.
float polyArea = 0.0f;
for (int j = 2; j < bestPoly.vertCount; ++j) {
int va = bestPoly.verts[0] * 3;
int vb = bestPoly.verts[j - 1] * 3;
int vc = bestPoly.verts[j] * 3;
polyArea += triArea2D(bestTile.data.verts, va, vb, vc); // depends on control dependency: [for], data = [none]
}
// Choose random polygon weighted by area, using reservoi sampling.
areaSum += polyArea; // depends on control dependency: [if], data = [none]
float u = frand.frand();
if (u * areaSum <= polyArea) {
randomTile = bestTile; // depends on control dependency: [if], data = [none]
randomPoly = bestPoly; // depends on control dependency: [if], data = [none]
randomPolyRef = bestRef; // depends on control dependency: [if], data = [none]
}
}
// Get parent poly and tile.
long parentRef = 0;
if (bestNode.pidx != 0) {
parentRef = m_nodePool.getNodeAtIdx(bestNode.pidx).id; // depends on control dependency: [if], data = [(bestNode.pidx]
}
if (parentRef != 0) {
Tupple2<MeshTile, Poly> parentTilePoly = m_nav.getTileAndPolyByRefUnsafe(parentRef);
MeshTile parentTile = parentTilePoly.first;
Poly parentPoly = parentTilePoly.second;
}
for (int i = bestPoly.firstLink; i != NavMesh.DT_NULL_LINK; i = bestTile.links.get(i).next) {
Link link = bestTile.links.get(i);
long neighbourRef = link.ref;
// Skip invalid neighbours and do not follow back to parent.
if (neighbourRef == 0 || neighbourRef == parentRef) {
continue;
}
// Expand to neighbour
Tupple2<MeshTile, Poly> neighbourTilePoly = m_nav.getTileAndPolyByRefUnsafe(neighbourRef);
MeshTile neighbourTile = neighbourTilePoly.first;
Poly neighbourPoly = neighbourTilePoly.second;
// Do not advance if the polygon is excluded by the filter.
if (!filter.passFilter(neighbourRef, neighbourTile, neighbourPoly)) {
continue;
}
// Find edge and calc distance to the edge.
Result<PortalResult> portalpoints = getPortalPoints(bestRef, bestPoly, bestTile, neighbourRef,
neighbourPoly, neighbourTile, 0, 0);
if (portalpoints.failed()) {
continue;
}
float[] va = portalpoints.result.left;
float[] vb = portalpoints.result.right;
// If the circle is not touching the next polygon, skip it.
Tupple2<Float, Float> distseg = distancePtSegSqr2D(centerPos, va, vb);
float distSqr = distseg.first;
if (distSqr > radiusSqr) {
continue;
}
Node neighbourNode = m_nodePool.getNode(neighbourRef);
if ((neighbourNode.flags & Node.DT_NODE_CLOSED) != 0) {
continue;
}
// Cost
if (neighbourNode.flags == 0) {
neighbourNode.pos = vLerp(va, vb, 0.5f); // depends on control dependency: [if], data = [none]
}
float total = bestNode.total + vDist(bestNode.pos, neighbourNode.pos);
// The node is already in open list and the new result is worse, skip.
if ((neighbourNode.flags & Node.DT_NODE_OPEN) != 0 && total >= neighbourNode.total) {
continue;
}
neighbourNode.id = neighbourRef; // depends on control dependency: [for], data = [none]
neighbourNode.flags = (neighbourNode.flags & ~Node.DT_NODE_CLOSED); // depends on control dependency: [for], data = [none]
neighbourNode.pidx = m_nodePool.getNodeIdx(bestNode); // depends on control dependency: [for], data = [none]
neighbourNode.total = total; // depends on control dependency: [for], data = [none]
if ((neighbourNode.flags & Node.DT_NODE_OPEN) != 0) {
m_openList.modify(neighbourNode); // depends on control dependency: [if], data = [none]
} else {
neighbourNode.flags = Node.DT_NODE_OPEN; // depends on control dependency: [if], data = [none]
m_openList.push(neighbourNode); // depends on control dependency: [if], data = [none]
}
}
}
if (randomPoly == null) {
return Result.failure(); // depends on control dependency: [if], data = [none]
}
// Randomly pick point on polygon.
float[] verts = new float[3 * m_nav.getMaxVertsPerPoly()];
float[] areas = new float[m_nav.getMaxVertsPerPoly()];
System.arraycopy(randomTile.data.verts, randomPoly.verts[0] * 3, verts, 0, 3);
for (int j = 1; j < randomPoly.vertCount; ++j) {
System.arraycopy(randomTile.data.verts, randomPoly.verts[j] * 3, verts, j * 3, 3); // depends on control dependency: [for], data = [j]
}
float s = frand.frand();
float t = frand.frand();
float[] pt = randomPointInConvexPoly(verts, randomPoly.vertCount, areas, s, t);
FindRandomPointResult result = new FindRandomPointResult(randomPolyRef, pt);
Result<Float> pheight = getPolyHeight(randomPolyRef, pt);
if (pheight.failed()) {
return Result.of(pheight.status, result); // depends on control dependency: [if], data = [none]
}
pt[1] = pheight.result;
return Result.success(result);
} } |
public class class_name {
public java.util.List<PolicyTypeDescription> getPolicyTypeDescriptions() {
if (policyTypeDescriptions == null) {
policyTypeDescriptions = new com.amazonaws.internal.SdkInternalList<PolicyTypeDescription>();
}
return policyTypeDescriptions;
} } | public class class_name {
public java.util.List<PolicyTypeDescription> getPolicyTypeDescriptions() {
if (policyTypeDescriptions == null) {
policyTypeDescriptions = new com.amazonaws.internal.SdkInternalList<PolicyTypeDescription>(); // depends on control dependency: [if], data = [none]
}
return policyTypeDescriptions;
} } |
public class class_name {
public void close() {
checkOpen();
open = false;
for (Object arg : arguments.values()) {
if (arg instanceof File) {
File file = (File) arg;
if (JournalHelper.isTempFile(file)) {
if (file.exists()) {
file.delete();
}
}
}
}
} } | public class class_name {
public void close() {
checkOpen();
open = false;
for (Object arg : arguments.values()) {
if (arg instanceof File) {
File file = (File) arg;
if (JournalHelper.isTempFile(file)) {
if (file.exists()) {
file.delete(); // depends on control dependency: [if], data = [none]
}
}
}
}
} } |
public class class_name {
protected String readLineWithMessage(String message)
{
System.out.print(message + " ");
try
{
BufferedReader rin = new BufferedReader(new InputStreamReader(System.in));
return rin.readLine();
}
catch (Exception e)
{
return "";
}
} } | public class class_name {
protected String readLineWithMessage(String message)
{
System.out.print(message + " ");
try
{
BufferedReader rin = new BufferedReader(new InputStreamReader(System.in));
return rin.readLine();
// depends on control dependency: [try], data = [none]
}
catch (Exception e)
{
return "";
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public boolean isTrusted() {
if (delayedCacheUpdate || farEndZID == null) {
return false;
}
cache.selectEntry(farEndZID);
return cache.getTrust();
} } | public class class_name {
public boolean isTrusted() {
if (delayedCacheUpdate || farEndZID == null) {
return false; // depends on control dependency: [if], data = [none]
}
cache.selectEntry(farEndZID);
return cache.getTrust();
} } |
public class class_name {
public Date getLastIndexed(FeatureIndexType type) {
Date lastIndexed = null;
if (type == null) {
lastIndexed = getLastIndexed();
} else {
switch (type) {
case GEOPACKAGE:
lastIndexed = featureTableIndex.getLastIndexed();
break;
case RTREE:
if (rTreeIndexTableDao.has()) {
// Updated by triggers, assume up to date
lastIndexed = new Date();
}
break;
default:
throw new GeoPackageException("Unsupported FeatureIndexType: "
+ type);
}
}
return lastIndexed;
} } | public class class_name {
public Date getLastIndexed(FeatureIndexType type) {
Date lastIndexed = null;
if (type == null) {
lastIndexed = getLastIndexed(); // depends on control dependency: [if], data = [none]
} else {
switch (type) {
case GEOPACKAGE:
lastIndexed = featureTableIndex.getLastIndexed();
break;
case RTREE:
if (rTreeIndexTableDao.has()) {
// Updated by triggers, assume up to date
lastIndexed = new Date(); // depends on control dependency: [if], data = [none]
}
break;
default:
throw new GeoPackageException("Unsupported FeatureIndexType: "
+ type);
}
}
return lastIndexed;
} } |
public class class_name {
public DescribeFpgaImagesRequest withFpgaImageIds(String... fpgaImageIds) {
if (this.fpgaImageIds == null) {
setFpgaImageIds(new com.amazonaws.internal.SdkInternalList<String>(fpgaImageIds.length));
}
for (String ele : fpgaImageIds) {
this.fpgaImageIds.add(ele);
}
return this;
} } | public class class_name {
public DescribeFpgaImagesRequest withFpgaImageIds(String... fpgaImageIds) {
if (this.fpgaImageIds == null) {
setFpgaImageIds(new com.amazonaws.internal.SdkInternalList<String>(fpgaImageIds.length)); // depends on control dependency: [if], data = [none]
}
for (String ele : fpgaImageIds) {
this.fpgaImageIds.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public void marshall(ContainerDatasetAction containerDatasetAction, ProtocolMarshaller protocolMarshaller) {
if (containerDatasetAction == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(containerDatasetAction.getImage(), IMAGE_BINDING);
protocolMarshaller.marshall(containerDatasetAction.getExecutionRoleArn(), EXECUTIONROLEARN_BINDING);
protocolMarshaller.marshall(containerDatasetAction.getResourceConfiguration(), RESOURCECONFIGURATION_BINDING);
protocolMarshaller.marshall(containerDatasetAction.getVariables(), VARIABLES_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ContainerDatasetAction containerDatasetAction, ProtocolMarshaller protocolMarshaller) {
if (containerDatasetAction == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(containerDatasetAction.getImage(), IMAGE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(containerDatasetAction.getExecutionRoleArn(), EXECUTIONROLEARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(containerDatasetAction.getResourceConfiguration(), RESOURCECONFIGURATION_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(containerDatasetAction.getVariables(), VARIABLES_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings({ "PMD.AvoidInstantiatingObjectsInLoops",
"PMD.LongVariable", "PMD.ExcessiveMethodLength",
"PMD.DataflowAnomalyAnalysis", "PMD.NcssMethodCount",
"PMD.NPathComplexity" })
private List<Token> parseTokenLogFileDefinition(final String tokenLogFormat) {
// Add all available parsers
final List<TokenParser> tokenParsers = createAllTokenParsers();
final List<Token> tokens = new ArrayList<>(50);
// We first change all the references to headers to lowercase
// because we must handle these as "case insensitive"
String cleanedTokenLogFormat = cleanupLogFormat(tokenLogFormat);
// Now we let all tokens figure out if they are present in here
for (TokenParser tokenParser : tokenParsers) {
List<Token> newTokens = tokenParser.getTokens(cleanedTokenLogFormat);
if (newTokens != null) {
tokens.addAll(newTokens);
}
}
// We now have a full list of all matched tokens
// ---------------------------------------
// We sort them by position of the token in the format specifier
tokens.sort(new TokenSorterByStartPos());
// First we take out the duplicates with a lower prio(=relevance score)
final List<Token> kickTokens = new ArrayList<>(50);
Token prevToken = null;
for (Token token : tokens) {
if (prevToken==null){
prevToken=token;
continue;
}
if (prevToken.getStartPos() == token.getStartPos()) {
if (prevToken.getLength() == token.getLength()) {
if (prevToken.getPrio() < token.getPrio()) {
kickTokens.add(prevToken);
} else {
kickTokens.add(token);
}
} else {
if (prevToken.getLength() < token.getLength()) {
kickTokens.add(prevToken);
} else {
kickTokens.add(token);
}
}
} else {
// Sometimes we find that a part of a token matches another token aswell.
// Example: %{%H}t Custom Timeformat (only the hour) also matches the protocol token.
// So we kick them of they overlap
if (prevToken.getStartPos() + prevToken.getLength() > token.getStartPos()) {
kickTokens.add(token);
continue;
}
}
prevToken=token;
}
tokens.removeAll(kickTokens);
final List<Token> allTokens = new ArrayList<>(50);
// We now look for the holes and add "FIXED STRING" tokens
int tokenBegin;
int tokenEnd = 0;
for (Token token : tokens) {
tokenBegin = token.getStartPos();
// Space between the begin of the next token and the end of the previous token?
if (tokenBegin - tokenEnd > 0) {
String separator = cleanedTokenLogFormat.substring(tokenEnd, tokenBegin);
Token fixedStringToken = new FixedStringToken(separator, tokenBegin, tokenBegin - tokenEnd, 0);
allTokens.add(fixedStringToken);
}
allTokens.add(token);
tokenEnd = tokenBegin + token.getLength();
}
int logFormatLength = cleanedTokenLogFormat.length();
if (tokenEnd < logFormatLength) {
String separator = cleanedTokenLogFormat.substring(tokenEnd);
Token fixedStringToken = new FixedStringToken(separator, tokenEnd, cleanedTokenLogFormat.length() - tokenEnd, 0);
allTokens.add(fixedStringToken);
}
return allTokens;
} } | public class class_name {
@SuppressWarnings({ "PMD.AvoidInstantiatingObjectsInLoops",
"PMD.LongVariable", "PMD.ExcessiveMethodLength",
"PMD.DataflowAnomalyAnalysis", "PMD.NcssMethodCount",
"PMD.NPathComplexity" })
private List<Token> parseTokenLogFileDefinition(final String tokenLogFormat) {
// Add all available parsers
final List<TokenParser> tokenParsers = createAllTokenParsers();
final List<Token> tokens = new ArrayList<>(50);
// We first change all the references to headers to lowercase
// because we must handle these as "case insensitive"
String cleanedTokenLogFormat = cleanupLogFormat(tokenLogFormat);
// Now we let all tokens figure out if they are present in here
for (TokenParser tokenParser : tokenParsers) {
List<Token> newTokens = tokenParser.getTokens(cleanedTokenLogFormat);
if (newTokens != null) {
tokens.addAll(newTokens); // depends on control dependency: [if], data = [(newTokens]
}
}
// We now have a full list of all matched tokens
// ---------------------------------------
// We sort them by position of the token in the format specifier
tokens.sort(new TokenSorterByStartPos());
// First we take out the duplicates with a lower prio(=relevance score)
final List<Token> kickTokens = new ArrayList<>(50);
Token prevToken = null;
for (Token token : tokens) {
if (prevToken==null){
prevToken=token; // depends on control dependency: [if], data = [none]
continue;
}
if (prevToken.getStartPos() == token.getStartPos()) {
if (prevToken.getLength() == token.getLength()) {
if (prevToken.getPrio() < token.getPrio()) {
kickTokens.add(prevToken); // depends on control dependency: [if], data = [none]
} else {
kickTokens.add(token); // depends on control dependency: [if], data = [none]
}
} else {
if (prevToken.getLength() < token.getLength()) {
kickTokens.add(prevToken); // depends on control dependency: [if], data = [none]
} else {
kickTokens.add(token); // depends on control dependency: [if], data = [none]
}
}
} else {
// Sometimes we find that a part of a token matches another token aswell.
// Example: %{%H}t Custom Timeformat (only the hour) also matches the protocol token.
// So we kick them of they overlap
if (prevToken.getStartPos() + prevToken.getLength() > token.getStartPos()) {
kickTokens.add(token); // depends on control dependency: [if], data = [none]
continue;
}
}
prevToken=token; // depends on control dependency: [for], data = [token]
}
tokens.removeAll(kickTokens);
final List<Token> allTokens = new ArrayList<>(50);
// We now look for the holes and add "FIXED STRING" tokens
int tokenBegin;
int tokenEnd = 0;
for (Token token : tokens) {
tokenBegin = token.getStartPos(); // depends on control dependency: [for], data = [token]
// Space between the begin of the next token and the end of the previous token?
if (tokenBegin - tokenEnd > 0) {
String separator = cleanedTokenLogFormat.substring(tokenEnd, tokenBegin);
Token fixedStringToken = new FixedStringToken(separator, tokenBegin, tokenBegin - tokenEnd, 0);
allTokens.add(fixedStringToken); // depends on control dependency: [if], data = [none]
}
allTokens.add(token); // depends on control dependency: [for], data = [token]
tokenEnd = tokenBegin + token.getLength(); // depends on control dependency: [for], data = [token]
}
int logFormatLength = cleanedTokenLogFormat.length();
if (tokenEnd < logFormatLength) {
String separator = cleanedTokenLogFormat.substring(tokenEnd);
Token fixedStringToken = new FixedStringToken(separator, tokenEnd, cleanedTokenLogFormat.length() - tokenEnd, 0);
allTokens.add(fixedStringToken); // depends on control dependency: [if], data = [none]
}
return allTokens;
} } |
public class class_name {
protected void ensureElementsCapacity() {
final int elStackSize = elName.length;
// assert (depth + 1) >= elName.length;
// we add at least one extra slot ...
final int newSize = (depth >= 7 ? 2 * depth : 8) + 2; // = lucky 7 + 1
// //25
if (TRACE_SIZING) {
System.err.println(getClass().getName() + " elStackSize " + elStackSize + " ==> " + newSize);
}
final boolean needsCopying = elStackSize > 0;
String[] arr = null;
// reuse arr local variable slot
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elName, 0, arr, 0, elStackSize);
elName = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elPrefix, 0, arr, 0, elStackSize);
elPrefix = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elNamespace, 0, arr, 0, elStackSize);
elNamespace = arr;
final int[] iarr = new int[newSize];
if (needsCopying) {
System.arraycopy(elNamespaceCount, 0, iarr, 0, elStackSize);
} else {
// special initialization
iarr[0] = 0;
}
elNamespaceCount = iarr;
} } | public class class_name {
protected void ensureElementsCapacity() {
final int elStackSize = elName.length;
// assert (depth + 1) >= elName.length;
// we add at least one extra slot ...
final int newSize = (depth >= 7 ? 2 * depth : 8) + 2; // = lucky 7 + 1
// //25
if (TRACE_SIZING) {
System.err.println(getClass().getName() + " elStackSize " + elStackSize + " ==> " + newSize); // depends on control dependency: [if], data = [none]
}
final boolean needsCopying = elStackSize > 0;
String[] arr = null;
// reuse arr local variable slot
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elName, 0, arr, 0, elStackSize);
elName = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elPrefix, 0, arr, 0, elStackSize);
elPrefix = arr;
arr = new String[newSize];
if (needsCopying)
System.arraycopy(elNamespace, 0, arr, 0, elStackSize);
elNamespace = arr;
final int[] iarr = new int[newSize];
if (needsCopying) {
System.arraycopy(elNamespaceCount, 0, iarr, 0, elStackSize); // depends on control dependency: [if], data = [none]
} else {
// special initialization
iarr[0] = 0; // depends on control dependency: [if], data = [none]
}
elNamespaceCount = iarr;
} } |
public class class_name {
public AsciiString concat(CharSequence string) {
int thisLen = length();
int thatLen = string.length();
if (thatLen == 0) {
return this;
}
if (string.getClass() == AsciiString.class) {
AsciiString that = (AsciiString) string;
if (isEmpty()) {
return that;
}
byte[] newValue = PlatformDependent.allocateUninitializedArray(thisLen + thatLen);
System.arraycopy(value, arrayOffset(), newValue, 0, thisLen);
System.arraycopy(that.value, that.arrayOffset(), newValue, thisLen, thatLen);
return new AsciiString(newValue, false);
}
if (isEmpty()) {
return new AsciiString(string);
}
byte[] newValue = PlatformDependent.allocateUninitializedArray(thisLen + thatLen);
System.arraycopy(value, arrayOffset(), newValue, 0, thisLen);
for (int i = thisLen, j = 0; i < newValue.length; i++, j++) {
newValue[i] = c2b(string.charAt(j));
}
return new AsciiString(newValue, false);
} } | public class class_name {
public AsciiString concat(CharSequence string) {
int thisLen = length();
int thatLen = string.length();
if (thatLen == 0) {
return this; // depends on control dependency: [if], data = [none]
}
if (string.getClass() == AsciiString.class) {
AsciiString that = (AsciiString) string;
if (isEmpty()) {
return that; // depends on control dependency: [if], data = [none]
}
byte[] newValue = PlatformDependent.allocateUninitializedArray(thisLen + thatLen);
System.arraycopy(value, arrayOffset(), newValue, 0, thisLen); // depends on control dependency: [if], data = [none]
System.arraycopy(that.value, that.arrayOffset(), newValue, thisLen, thatLen); // depends on control dependency: [if], data = [none]
return new AsciiString(newValue, false); // depends on control dependency: [if], data = [none]
}
if (isEmpty()) {
return new AsciiString(string); // depends on control dependency: [if], data = [none]
}
byte[] newValue = PlatformDependent.allocateUninitializedArray(thisLen + thatLen);
System.arraycopy(value, arrayOffset(), newValue, 0, thisLen);
for (int i = thisLen, j = 0; i < newValue.length; i++, j++) {
newValue[i] = c2b(string.charAt(j)); // depends on control dependency: [for], data = [i]
}
return new AsciiString(newValue, false);
} } |
public class class_name {
public static boolean start(final RootDoc root) {
// Perform some reflective investigation of the RootDoc
final boolean toReturn = Standard.start(root);
eventSequence.add("start (root): " + toReturn);
// We should emit the eventSequence here.
for(int i = 0; i < eventSequence.size(); i++) {
System.out.println(" event [" + i + " / " + eventSequence.size() + "]: " + eventSequence.get(i));
}
// All Done.
return toReturn;
} } | public class class_name {
public static boolean start(final RootDoc root) {
// Perform some reflective investigation of the RootDoc
final boolean toReturn = Standard.start(root);
eventSequence.add("start (root): " + toReturn);
// We should emit the eventSequence here.
for(int i = 0; i < eventSequence.size(); i++) {
System.out.println(" event [" + i + " / " + eventSequence.size() + "]: " + eventSequence.get(i)); // depends on control dependency: [for], data = [i]
}
// All Done.
return toReturn;
} } |
public class class_name {
public ClassLoader getClassLoader(MavenProject project, final ClassLoader parent, Log log) throws DependencyResolutionRequiredException {
@SuppressWarnings("unchecked")
List<String> classpathElements = project.getCompileClasspathElements();
final List<URL> classpathUrls = new ArrayList<>(classpathElements.size());
for (String classpathElement : classpathElements) {
try {
log.debug("Adding project artifact to classpath: " + classpathElement);
classpathUrls.add(new File(classpathElement).toURI().toURL());
} catch (MalformedURLException e) {
log.debug("Unable to use classpath entry as it could not be understood as a valid URL: " + classpathElement, e);
}
}
return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return new URLClassLoader(classpathUrls.toArray(new URL[classpathUrls.size()]), parent);
}
});
} } | public class class_name {
public ClassLoader getClassLoader(MavenProject project, final ClassLoader parent, Log log) throws DependencyResolutionRequiredException {
@SuppressWarnings("unchecked")
List<String> classpathElements = project.getCompileClasspathElements();
final List<URL> classpathUrls = new ArrayList<>(classpathElements.size());
for (String classpathElement : classpathElements) {
try {
log.debug("Adding project artifact to classpath: " + classpathElement); // depends on control dependency: [try], data = [none]
classpathUrls.add(new File(classpathElement).toURI().toURL()); // depends on control dependency: [try], data = [none]
} catch (MalformedURLException e) {
log.debug("Unable to use classpath entry as it could not be understood as a valid URL: " + classpathElement, e);
} // depends on control dependency: [catch], data = [none]
}
return AccessController.doPrivileged(new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return new URLClassLoader(classpathUrls.toArray(new URL[classpathUrls.size()]), parent);
}
});
} } |
public class class_name {
public void nodeFeedback(String nodeName, List<ResourceType> resourceTypes,
NodeUsageReport usageReport) {
List<FaultStatsForType> faultStats = nodeToFaultStats.get(nodeName);
if (faultStats == null) {
LOG.info("Received node feedback for deleted node " + nodeName);
return;
}
boolean statsModified = false;
synchronized (faultStats) {
if (tooManyFailedConnectionsInSession(usageReport)) {
for (FaultStatsForType stat : faultStats) {
if (resourceTypes.contains(stat.type)) {
stat.numSessionsWithFailedConnections++;
statsModified = true;
}
}
}
if (tooManyFailuresInSession(usageReport)) {
for (FaultStatsForType stat : faultStats) {
if (resourceTypes.contains(stat.type)) {
stat.numSessionsWithTooManyFailures++;
statsModified = true;
}
}
}
}
if (statsModified) {
blacklistIfNeeded(nodeName, faultStats);
}
} } | public class class_name {
public void nodeFeedback(String nodeName, List<ResourceType> resourceTypes,
NodeUsageReport usageReport) {
List<FaultStatsForType> faultStats = nodeToFaultStats.get(nodeName);
if (faultStats == null) {
LOG.info("Received node feedback for deleted node " + nodeName); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
boolean statsModified = false;
synchronized (faultStats) {
if (tooManyFailedConnectionsInSession(usageReport)) {
for (FaultStatsForType stat : faultStats) {
if (resourceTypes.contains(stat.type)) {
stat.numSessionsWithFailedConnections++; // depends on control dependency: [if], data = [none]
statsModified = true; // depends on control dependency: [if], data = [none]
}
}
}
if (tooManyFailuresInSession(usageReport)) {
for (FaultStatsForType stat : faultStats) {
if (resourceTypes.contains(stat.type)) {
stat.numSessionsWithTooManyFailures++; // depends on control dependency: [if], data = [none]
statsModified = true; // depends on control dependency: [if], data = [none]
}
}
}
}
if (statsModified) {
blacklistIfNeeded(nodeName, faultStats); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static boolean isSelfConfig(String configAbsoluteClassPath, String keyPrefix, IConfigKey key) {
OneProperties configs = otherConfigs.get(configAbsoluteClassPath);
if (configs == null) {
addSelfConfigs(configAbsoluteClassPath, null);
configs = otherConfigs.get(configAbsoluteClassPath);
if (configs == null) {
return VOID_CONFIGS.isConfigTrue(keyPrefix, key);
}
}
return configs.isConfigTrue(keyPrefix, key);
} } | public class class_name {
public static boolean isSelfConfig(String configAbsoluteClassPath, String keyPrefix, IConfigKey key) {
OneProperties configs = otherConfigs.get(configAbsoluteClassPath);
if (configs == null) {
addSelfConfigs(configAbsoluteClassPath, null);
// depends on control dependency: [if], data = [null)]
configs = otherConfigs.get(configAbsoluteClassPath);
// depends on control dependency: [if], data = [none]
if (configs == null) {
return VOID_CONFIGS.isConfigTrue(keyPrefix, key);
// depends on control dependency: [if], data = [none]
}
}
return configs.isConfigTrue(keyPrefix, key);
} } |
public class class_name {
@Override
public int compareTo(ComparableField rhs) {
if (rhs instanceof FieldDescriptor) {
return FieldOrMethodDescriptor.compareTo(this, (FieldDescriptor) rhs);
}
if (rhs instanceof XField) {
return XFactory.compare((XField) this, (XField) rhs);
}
throw new ClassCastException("Can't compare a " + this.getClass().getName() + " to a " + rhs.getClass().getName());
} } | public class class_name {
@Override
public int compareTo(ComparableField rhs) {
if (rhs instanceof FieldDescriptor) {
return FieldOrMethodDescriptor.compareTo(this, (FieldDescriptor) rhs); // depends on control dependency: [if], data = [none]
}
if (rhs instanceof XField) {
return XFactory.compare((XField) this, (XField) rhs); // depends on control dependency: [if], data = [none]
}
throw new ClassCastException("Can't compare a " + this.getClass().getName() + " to a " + rhs.getClass().getName());
} } |
public class class_name {
public void removePathPattern(String pathPattern) {
PathPattern p = new PathPattern(pathPattern);
T target = routes.remove(p);
if (target == null) {
return;
}
} } | public class class_name {
public void removePathPattern(String pathPattern) {
PathPattern p = new PathPattern(pathPattern);
T target = routes.remove(p);
if (target == null) {
return; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public Object doOutParameter(final String key, final Object val) {
if (getFilters().isEmpty()) {
return val;
}
Object obj = val;
for (final SqlFilter filter : getFilters()) {
obj = filter.doOutParameter(key, obj);
}
return obj;
} } | public class class_name {
@Override
public Object doOutParameter(final String key, final Object val) {
if (getFilters().isEmpty()) {
return val; // depends on control dependency: [if], data = [none]
}
Object obj = val;
for (final SqlFilter filter : getFilters()) {
obj = filter.doOutParameter(key, obj); // depends on control dependency: [for], data = [filter]
}
return obj;
} } |
public class class_name {
int writeModuleAttribute(ClassSymbol c) {
ModuleSymbol m = (ModuleSymbol) c.owner;
int alenIdx = writeAttr(names.Module);
databuf.appendChar(pool.put(m));
databuf.appendChar(ModuleFlags.value(m.flags)); // module_flags
databuf.appendChar(m.version != null ? pool.put(m.version) : 0);
ListBuffer<RequiresDirective> requires = new ListBuffer<>();
for (RequiresDirective r: m.requires) {
if (!r.flags.contains(RequiresFlag.EXTRA))
requires.add(r);
}
databuf.appendChar(requires.size());
for (RequiresDirective r: requires) {
databuf.appendChar(pool.put(r.module));
databuf.appendChar(RequiresFlag.value(r.flags));
databuf.appendChar(r.module.version != null ? pool.put(r.module.version) : 0);
}
List<ExportsDirective> exports = m.exports;
databuf.appendChar(exports.size());
for (ExportsDirective e: exports) {
databuf.appendChar(pool.put(e.packge));
databuf.appendChar(ExportsFlag.value(e.flags));
if (e.modules == null) {
databuf.appendChar(0);
} else {
databuf.appendChar(e.modules.size());
for (ModuleSymbol msym: e.modules) {
databuf.appendChar(pool.put(msym));
}
}
}
List<OpensDirective> opens = m.opens;
databuf.appendChar(opens.size());
for (OpensDirective o: opens) {
databuf.appendChar(pool.put(o.packge));
databuf.appendChar(OpensFlag.value(o.flags));
if (o.modules == null) {
databuf.appendChar(0);
} else {
databuf.appendChar(o.modules.size());
for (ModuleSymbol msym: o.modules) {
databuf.appendChar(pool.put(msym));
}
}
}
List<UsesDirective> uses = m.uses;
databuf.appendChar(uses.size());
for (UsesDirective s: uses) {
databuf.appendChar(pool.put(s.service));
}
// temporary fix to merge repeated provides clause for same service;
// eventually this should be disallowed when analyzing the module,
// so that each service type only appears once.
Map<ClassSymbol, Set<ClassSymbol>> mergedProvides = new LinkedHashMap<>();
for (ProvidesDirective p : m.provides) {
mergedProvides.computeIfAbsent(p.service, s -> new LinkedHashSet<>()).addAll(p.impls);
}
databuf.appendChar(mergedProvides.size());
mergedProvides.forEach((srvc, impls) -> {
databuf.appendChar(pool.put(srvc));
databuf.appendChar(impls.size());
impls.forEach(impl -> databuf.appendChar(pool.put(impl)));
});
endAttr(alenIdx);
return 1;
} } | public class class_name {
int writeModuleAttribute(ClassSymbol c) {
ModuleSymbol m = (ModuleSymbol) c.owner;
int alenIdx = writeAttr(names.Module);
databuf.appendChar(pool.put(m));
databuf.appendChar(ModuleFlags.value(m.flags)); // module_flags
databuf.appendChar(m.version != null ? pool.put(m.version) : 0);
ListBuffer<RequiresDirective> requires = new ListBuffer<>();
for (RequiresDirective r: m.requires) {
if (!r.flags.contains(RequiresFlag.EXTRA))
requires.add(r);
}
databuf.appendChar(requires.size());
for (RequiresDirective r: requires) {
databuf.appendChar(pool.put(r.module)); // depends on control dependency: [for], data = [r]
databuf.appendChar(RequiresFlag.value(r.flags)); // depends on control dependency: [for], data = [r]
databuf.appendChar(r.module.version != null ? pool.put(r.module.version) : 0); // depends on control dependency: [for], data = [r]
}
List<ExportsDirective> exports = m.exports;
databuf.appendChar(exports.size());
for (ExportsDirective e: exports) {
databuf.appendChar(pool.put(e.packge)); // depends on control dependency: [for], data = [e]
databuf.appendChar(ExportsFlag.value(e.flags)); // depends on control dependency: [for], data = [e]
if (e.modules == null) {
databuf.appendChar(0); // depends on control dependency: [if], data = [none]
} else {
databuf.appendChar(e.modules.size()); // depends on control dependency: [if], data = [(e.modules]
for (ModuleSymbol msym: e.modules) {
databuf.appendChar(pool.put(msym)); // depends on control dependency: [for], data = [msym]
}
}
}
List<OpensDirective> opens = m.opens;
databuf.appendChar(opens.size());
for (OpensDirective o: opens) {
databuf.appendChar(pool.put(o.packge)); // depends on control dependency: [for], data = [o]
databuf.appendChar(OpensFlag.value(o.flags)); // depends on control dependency: [for], data = [o]
if (o.modules == null) {
databuf.appendChar(0); // depends on control dependency: [if], data = [none]
} else {
databuf.appendChar(o.modules.size()); // depends on control dependency: [if], data = [(o.modules]
for (ModuleSymbol msym: o.modules) {
databuf.appendChar(pool.put(msym)); // depends on control dependency: [for], data = [msym]
}
}
}
List<UsesDirective> uses = m.uses;
databuf.appendChar(uses.size());
for (UsesDirective s: uses) {
databuf.appendChar(pool.put(s.service)); // depends on control dependency: [for], data = [s]
}
// temporary fix to merge repeated provides clause for same service;
// eventually this should be disallowed when analyzing the module,
// so that each service type only appears once.
Map<ClassSymbol, Set<ClassSymbol>> mergedProvides = new LinkedHashMap<>();
for (ProvidesDirective p : m.provides) {
mergedProvides.computeIfAbsent(p.service, s -> new LinkedHashSet<>()).addAll(p.impls); // depends on control dependency: [for], data = [p]
}
databuf.appendChar(mergedProvides.size());
mergedProvides.forEach((srvc, impls) -> {
databuf.appendChar(pool.put(srvc));
databuf.appendChar(impls.size());
impls.forEach(impl -> databuf.appendChar(pool.put(impl)));
});
endAttr(alenIdx);
return 1;
} } |
public class class_name {
public CellStyle getOrCreateRowStyle(int y) {
final Row row = getOrCreateRow(y);
CellStyle rowStyle = row.getRowStyle();
if (null == rowStyle) {
rowStyle = this.workbook.createCellStyle();
row.setRowStyle(rowStyle);
}
return rowStyle;
} } | public class class_name {
public CellStyle getOrCreateRowStyle(int y) {
final Row row = getOrCreateRow(y);
CellStyle rowStyle = row.getRowStyle();
if (null == rowStyle) {
rowStyle = this.workbook.createCellStyle();
// depends on control dependency: [if], data = [none]
row.setRowStyle(rowStyle);
// depends on control dependency: [if], data = [rowStyle)]
}
return rowStyle;
} } |
public class class_name {
@Override
public void mouseReleased(MouseEvent e) {
if (e.getComponent() == this
&& (e.getModifiers() & MouseEvent.BUTTON1_MASK) != 0) {
int y = e.getY();
Font font = fileWindow.textArea.getFont();
FontMetrics metrics = getFontMetrics(font);
int h = metrics.getHeight();
int line = y/h;
if (line == pressLine) {
fileWindow.toggleBreakPoint(line + 1);
} else {
pressLine = -1;
}
}
} } | public class class_name {
@Override
public void mouseReleased(MouseEvent e) {
if (e.getComponent() == this
&& (e.getModifiers() & MouseEvent.BUTTON1_MASK) != 0) {
int y = e.getY();
Font font = fileWindow.textArea.getFont();
FontMetrics metrics = getFontMetrics(font);
int h = metrics.getHeight();
int line = y/h;
if (line == pressLine) {
fileWindow.toggleBreakPoint(line + 1); // depends on control dependency: [if], data = [(line]
} else {
pressLine = -1; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public float getCapacity(String queue) {
//Check done in order to return default capacity which can be negative
//In case of both capacity and default capacity not configured.
//Last check is if the configuration is specified and is marked as
//negative we throw exception
String raw = rmConf.getRaw(toFullPropertyName(queue, CAPACITY_PROPERTY));
if(raw == null) {
return -1;
}
float result = rmConf.getFloat(
toFullPropertyName(queue, CAPACITY_PROPERTY), -1);
if (result < 0.0 || result > 100.0) {
throw new IllegalArgumentException(
"Illegal capacity for queue " + queue +
" of " + result);
}
return result;
} } | public class class_name {
public float getCapacity(String queue) {
//Check done in order to return default capacity which can be negative
//In case of both capacity and default capacity not configured.
//Last check is if the configuration is specified and is marked as
//negative we throw exception
String raw = rmConf.getRaw(toFullPropertyName(queue, CAPACITY_PROPERTY));
if(raw == null) {
return -1; // depends on control dependency: [if], data = [none]
}
float result = rmConf.getFloat(
toFullPropertyName(queue, CAPACITY_PROPERTY), -1);
if (result < 0.0 || result > 100.0) {
throw new IllegalArgumentException(
"Illegal capacity for queue " + queue +
" of " + result);
}
return result;
} } |
public class class_name {
protected void processGossipResponse(Tree data) throws Exception {
// Debug
if (debugHeartbeats) {
String sender = data.get("sender", (String) null);
logger.info("Gossip response received from \"" + sender + "\" node:\r\n" + data);
}
// Online / offline nodes in responnse
Tree online = data.get("online");
Tree offline = data.get("offline");
// Process "online" block
if (online != null) {
for (Tree row : online) {
// Get nodeID
String nodeID = row.getName();
if (this.nodeID.equals(nodeID)) {
continue;
}
int size = row.size();
if (!row.isEnumeration() || size < 1 || size > 3) {
logger.warn("Invalid \"offline\" block: " + row);
continue;
}
// Get parameters from input
Tree info = null;
long cpuSeq = 0;
int cpu = 0;
if (row.size() == 1) {
info = row.get(0);
} else if (row.size() == 2) {
cpuSeq = row.get(0).asLong();
cpu = row.get(1).asInteger();
} else if (row.size() == 3) {
info = row.get(0);
cpuSeq = row.get(1).asLong();
cpu = row.get(2).asInteger();
} else {
logger.warn("Invalid \"online\" block: " + row.toString(false));
continue;
}
if (info != null) {
// Update "info" block,
// send updated, connected or reconnected event
updateNodeInfo(nodeID, info);
}
if (cpuSeq > 0) {
// We update our CPU info
NodeDescriptor node = nodes.get(nodeID);
if (node != null) {
node.writeLock.lock();
try {
node.updateCpu(cpuSeq, cpu);
} finally {
node.writeLock.unlock();
}
}
}
}
}
// Process "offline" block
if (offline != null) {
for (Tree row : offline) {
String nodeID = row.getName();
NodeDescriptor node;
if (this.nodeID.equals(nodeID)) {
long seq = row.asLong();
node = getDescriptor();
node.writeLock.lock();
try {
long newSeq = Math.max(node.seq, seq + 1);
if (node.seq < newSeq) {
node.seq = newSeq;
node.info.put("seq", newSeq);
}
} finally {
node.writeLock.unlock();
}
continue;
}
node = nodes.get(nodeID);
if (node == null) {
return;
}
if (!row.isPrimitive()) {
logger.warn("Invalid \"offline\" block: " + row);
continue;
}
// Get parameters from input
boolean disconnected = false;
node.writeLock.lock();
try {
long seq = row.asLong();
if (node.seq < seq && node.markAsOffline(seq)) {
// We know it is online, so we change it to offline
// Remove remote actions and listeners
registry.removeActions(node.nodeID);
eventbus.removeListeners(node.nodeID);
writer.close(node.nodeID);
disconnected = true;
}
} finally {
node.writeLock.unlock();
}
if (node != null && disconnected) {
// Notify listeners (not unexpected disconnection)
logger.info("Node \"" + node.nodeID + "\" disconnected.");
broadcastNodeDisconnected(node.info, false);
}
}
}
} } | public class class_name {
protected void processGossipResponse(Tree data) throws Exception {
// Debug
if (debugHeartbeats) {
String sender = data.get("sender", (String) null);
logger.info("Gossip response received from \"" + sender + "\" node:\r\n" + data);
}
// Online / offline nodes in responnse
Tree online = data.get("online");
Tree offline = data.get("offline");
// Process "online" block
if (online != null) {
for (Tree row : online) {
// Get nodeID
String nodeID = row.getName();
if (this.nodeID.equals(nodeID)) {
continue;
}
int size = row.size();
if (!row.isEnumeration() || size < 1 || size > 3) {
logger.warn("Invalid \"offline\" block: " + row); // depends on control dependency: [if], data = [none]
continue;
}
// Get parameters from input
Tree info = null;
long cpuSeq = 0;
int cpu = 0;
if (row.size() == 1) {
info = row.get(0); // depends on control dependency: [if], data = [none]
} else if (row.size() == 2) {
cpuSeq = row.get(0).asLong(); // depends on control dependency: [if], data = [none]
cpu = row.get(1).asInteger(); // depends on control dependency: [if], data = [none]
} else if (row.size() == 3) {
info = row.get(0); // depends on control dependency: [if], data = [none]
cpuSeq = row.get(1).asLong(); // depends on control dependency: [if], data = [none]
cpu = row.get(2).asInteger(); // depends on control dependency: [if], data = [none]
} else {
logger.warn("Invalid \"online\" block: " + row.toString(false)); // depends on control dependency: [if], data = [none]
continue;
}
if (info != null) {
// Update "info" block,
// send updated, connected or reconnected event
updateNodeInfo(nodeID, info); // depends on control dependency: [if], data = [none]
}
if (cpuSeq > 0) {
// We update our CPU info
NodeDescriptor node = nodes.get(nodeID);
if (node != null) {
node.writeLock.lock(); // depends on control dependency: [if], data = [none]
try {
node.updateCpu(cpuSeq, cpu); // depends on control dependency: [try], data = [none]
} finally {
node.writeLock.unlock();
}
}
}
}
}
// Process "offline" block
if (offline != null) {
for (Tree row : offline) {
String nodeID = row.getName();
NodeDescriptor node;
if (this.nodeID.equals(nodeID)) {
long seq = row.asLong();
node = getDescriptor();
node.writeLock.lock();
try {
long newSeq = Math.max(node.seq, seq + 1);
if (node.seq < newSeq) {
node.seq = newSeq;
node.info.put("seq", newSeq);
}
} finally {
node.writeLock.unlock();
}
continue;
}
node = nodes.get(nodeID);
if (node == null) {
return;
}
if (!row.isPrimitive()) {
logger.warn("Invalid \"offline\" block: " + row);
continue;
}
// Get parameters from input
boolean disconnected = false;
node.writeLock.lock();
try {
long seq = row.asLong();
if (node.seq < seq && node.markAsOffline(seq)) {
// We know it is online, so we change it to offline
// Remove remote actions and listeners
registry.removeActions(node.nodeID);
eventbus.removeListeners(node.nodeID);
writer.close(node.nodeID);
disconnected = true;
}
} finally {
node.writeLock.unlock();
}
if (node != null && disconnected) {
// Notify listeners (not unexpected disconnection)
logger.info("Node \"" + node.nodeID + "\" disconnected.");
broadcastNodeDisconnected(node.info, false);
}
}
}
} } |
public class class_name {
private static <T> void log(RedwoodChannels channels, String description, Iterable<T> iterable) {
Redwood.startTrack(description);
if (iterable == null) {
channels.log("(iterable is null)");
} else {
int index = 0;
for (T item : iterable) {
if (dispatchable(item)) {
log(channels, "Index " + index, item);
} else {
channels.logf("Index %d: %s", index, item);
}
index++;
}
if (index == 0) {
channels.log("(empty)");
}
}
Redwood.endTrack(description);
} } | public class class_name {
private static <T> void log(RedwoodChannels channels, String description, Iterable<T> iterable) {
Redwood.startTrack(description);
if (iterable == null) {
channels.log("(iterable is null)");
// depends on control dependency: [if], data = [(iterable]
} else {
int index = 0;
for (T item : iterable) {
if (dispatchable(item)) {
log(channels, "Index " + index, item);
// depends on control dependency: [if], data = [none]
} else {
channels.logf("Index %d: %s", index, item);
// depends on control dependency: [if], data = [none]
}
index++;
// depends on control dependency: [for], data = [none]
}
if (index == 0) {
channels.log("(empty)");
// depends on control dependency: [if], data = [none]
}
}
Redwood.endTrack(description);
} } |
public class class_name {
private List<CmsSelectWidgetOption> getProjectSelections() {
List<CmsSelectWidgetOption> result = new LinkedList<CmsSelectWidgetOption>();
List<CmsProject> projects = null;
try {
projects = OpenCms.getOrgUnitManager().getAllAccessibleProjects(getCms(), "", true);
} catch (CmsException e) {
return result;
}
boolean first = true;
for (CmsProject project : projects) {
if (!project.getName().equals(CmsProject.ONLINE_PROJECT_NAME)) {
first = false;
result.add(new CmsSelectWidgetOption(project.getName(), first, project.getName()));
}
}
return result;
} } | public class class_name {
private List<CmsSelectWidgetOption> getProjectSelections() {
List<CmsSelectWidgetOption> result = new LinkedList<CmsSelectWidgetOption>();
List<CmsProject> projects = null;
try {
projects = OpenCms.getOrgUnitManager().getAllAccessibleProjects(getCms(), "", true); // depends on control dependency: [try], data = [none]
} catch (CmsException e) {
return result;
} // depends on control dependency: [catch], data = [none]
boolean first = true;
for (CmsProject project : projects) {
if (!project.getName().equals(CmsProject.ONLINE_PROJECT_NAME)) {
first = false; // depends on control dependency: [if], data = [none]
result.add(new CmsSelectWidgetOption(project.getName(), first, project.getName())); // depends on control dependency: [if], data = [none]
}
}
return result;
} } |
public class class_name {
@Override
public int getIdAttrIndex()
{
// Let's figure out the index only when needed
int ix = mIdAttrIndex;
if (ix == -2) {
ix = -1;
if (mCurrElem != null) {
DTDAttribute idAttr = mCurrElem.getIdAttribute();
if (idAttr != null) {
DTDAttribute[] attrs = mAttrSpecs;
for (int i = 0, len = attrs.length; i < len; ++i) {
if (attrs[i] == idAttr) {
ix = i;
break;
}
}
}
}
mIdAttrIndex = ix;
}
return ix;
} } | public class class_name {
@Override
public int getIdAttrIndex()
{
// Let's figure out the index only when needed
int ix = mIdAttrIndex;
if (ix == -2) {
ix = -1; // depends on control dependency: [if], data = [none]
if (mCurrElem != null) {
DTDAttribute idAttr = mCurrElem.getIdAttribute();
if (idAttr != null) {
DTDAttribute[] attrs = mAttrSpecs;
for (int i = 0, len = attrs.length; i < len; ++i) {
if (attrs[i] == idAttr) {
ix = i; // depends on control dependency: [if], data = [none]
break;
}
}
}
}
mIdAttrIndex = ix; // depends on control dependency: [if], data = [none]
}
return ix;
} } |
public class class_name {
public void addIceServers(List<PeerConnection.IceServer> iceServers){
if(this.iceServers!=null) {
iceServers.addAll(this.iceServers);
}
this.iceServers = iceServers;
} } | public class class_name {
public void addIceServers(List<PeerConnection.IceServer> iceServers){
if(this.iceServers!=null) {
iceServers.addAll(this.iceServers); // depends on control dependency: [if], data = [(this.iceServers]
}
this.iceServers = iceServers;
} } |
public class class_name {
private static boolean isPropertyTree(Node expectedGetprop) {
if (!expectedGetprop.isGetProp()) {
return false;
}
Node leftChild = expectedGetprop.getFirstChild();
if (!leftChild.isThis() && !isPropertyTree(leftChild)) {
return false;
}
Node retVal = leftChild.getNext();
return NodeUtil.getStringValue(retVal) != null;
} } | public class class_name {
private static boolean isPropertyTree(Node expectedGetprop) {
if (!expectedGetprop.isGetProp()) {
return false; // depends on control dependency: [if], data = [none]
}
Node leftChild = expectedGetprop.getFirstChild();
if (!leftChild.isThis() && !isPropertyTree(leftChild)) {
return false; // depends on control dependency: [if], data = [none]
}
Node retVal = leftChild.getNext();
return NodeUtil.getStringValue(retVal) != null;
} } |
public class class_name {
public final List<Change> diff(final HourRanges toOther) {
ensureSingleDayOnly("from", this);
ensureSingleDayOnly("to", toOther);
final BitSet thisMinutes = this.toMinutes();
final BitSet otherMinutes = toOther.toMinutes();
final BitSet addedMinutes = new BitSet(1440);
final BitSet removedMinutes = new BitSet(1440);
for (int i = 0; i < 1440; i++) {
if (thisMinutes.get(i) && !otherMinutes.get(i)) {
removedMinutes.set(i);
}
if (!thisMinutes.get(i) && otherMinutes.get(i)) {
addedMinutes.set(i);
}
}
final List<Change> changes = new ArrayList<>();
if (!removedMinutes.isEmpty()) {
final HourRanges removed = HourRanges.valueOf(removedMinutes);
for (final HourRange hr : removed) {
changes.add(new Change(ChangeType.REMOVED, hr));
}
}
if (!addedMinutes.isEmpty()) {
final HourRanges added = HourRanges.valueOf(addedMinutes);
for (final HourRange hr : added) {
changes.add(new Change(ChangeType.ADDED, hr));
}
}
return changes;
} } | public class class_name {
public final List<Change> diff(final HourRanges toOther) {
ensureSingleDayOnly("from", this);
ensureSingleDayOnly("to", toOther);
final BitSet thisMinutes = this.toMinutes();
final BitSet otherMinutes = toOther.toMinutes();
final BitSet addedMinutes = new BitSet(1440);
final BitSet removedMinutes = new BitSet(1440);
for (int i = 0; i < 1440; i++) {
if (thisMinutes.get(i) && !otherMinutes.get(i)) {
removedMinutes.set(i); // depends on control dependency: [if], data = [none]
}
if (!thisMinutes.get(i) && otherMinutes.get(i)) {
addedMinutes.set(i); // depends on control dependency: [if], data = [none]
}
}
final List<Change> changes = new ArrayList<>();
if (!removedMinutes.isEmpty()) {
final HourRanges removed = HourRanges.valueOf(removedMinutes);
for (final HourRange hr : removed) {
changes.add(new Change(ChangeType.REMOVED, hr)); // depends on control dependency: [for], data = [hr]
}
}
if (!addedMinutes.isEmpty()) {
final HourRanges added = HourRanges.valueOf(addedMinutes);
for (final HourRange hr : added) {
changes.add(new Change(ChangeType.ADDED, hr)); // depends on control dependency: [for], data = [hr]
}
}
return changes;
} } |
public class class_name {
private static ImmutableMap<String, Annotation> buildAnnotations(Iterable<String> whitelist) {
ImmutableMap.Builder<String, Annotation> annotationsBuilder = ImmutableMap.builder();
annotationsBuilder.putAll(Annotation.recognizedAnnotations);
for (String unrecognizedAnnotation : whitelist) {
if (!unrecognizedAnnotation.isEmpty()
&& !Annotation.recognizedAnnotations.containsKey(unrecognizedAnnotation)) {
annotationsBuilder.put(unrecognizedAnnotation, Annotation.NOT_IMPLEMENTED);
}
}
return annotationsBuilder.build();
} } | public class class_name {
private static ImmutableMap<String, Annotation> buildAnnotations(Iterable<String> whitelist) {
ImmutableMap.Builder<String, Annotation> annotationsBuilder = ImmutableMap.builder();
annotationsBuilder.putAll(Annotation.recognizedAnnotations);
for (String unrecognizedAnnotation : whitelist) {
if (!unrecognizedAnnotation.isEmpty()
&& !Annotation.recognizedAnnotations.containsKey(unrecognizedAnnotation)) {
annotationsBuilder.put(unrecognizedAnnotation, Annotation.NOT_IMPLEMENTED); // depends on control dependency: [if], data = [none]
}
}
return annotationsBuilder.build();
} } |
public class class_name {
@Override
public void transform(Transform3D transform) {
if (transform!=null) {
Point3D p = new Point3d();
for(int i=0; i<this.numCoordsProperty.get();) {
p.set(this.coordsProperty[i].get(), this.coordsProperty[i+1].get(), this.coordsProperty[i+2].get());
transform.transform(p);
this.coordsProperty[i++].set(p.getX());
this.coordsProperty[i++].set(p.getY());
this.coordsProperty[i++].set(p.getZ());
}
this.graphicalBounds = null;
this.logicalBounds = null;
}
} } | public class class_name {
@Override
public void transform(Transform3D transform) {
if (transform!=null) {
Point3D p = new Point3d();
for(int i=0; i<this.numCoordsProperty.get();) {
p.set(this.coordsProperty[i].get(), this.coordsProperty[i+1].get(), this.coordsProperty[i+2].get()); // depends on control dependency: [for], data = [i]
transform.transform(p); // depends on control dependency: [for], data = [none]
this.coordsProperty[i++].set(p.getX()); // depends on control dependency: [for], data = [i]
this.coordsProperty[i++].set(p.getY()); // depends on control dependency: [for], data = [i]
this.coordsProperty[i++].set(p.getZ()); // depends on control dependency: [for], data = [i]
}
this.graphicalBounds = null; // depends on control dependency: [if], data = [none]
this.logicalBounds = null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static AxisAligner getInstance(QuatSymmetryResults results) {
String symmetry = results.getSymmetry();
if (symmetry.equals("H")) {
return new HelixAxisAligner(results);
} else {
return new RotationAxisAligner(results);
}
} } | public class class_name {
public static AxisAligner getInstance(QuatSymmetryResults results) {
String symmetry = results.getSymmetry();
if (symmetry.equals("H")) {
return new HelixAxisAligner(results); // depends on control dependency: [if], data = [none]
} else {
return new RotationAxisAligner(results); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public Widget getWidget(int index) {
if (index == PAGE_LIMIT - 1) {
if (offPageContainer.isEmpty()) {
return offPageContainer.asWidget();
}
int deckIndex = offPageContainer.getSelectedDeck() == -1 ? 0 : offPageContainer.getSelectedDeck();
return offPageContainer.getDeck(deckIndex);
}
return super.getWidget(index);
} } | public class class_name {
@Override
public Widget getWidget(int index) {
if (index == PAGE_LIMIT - 1) {
if (offPageContainer.isEmpty()) {
return offPageContainer.asWidget(); // depends on control dependency: [if], data = [none]
}
int deckIndex = offPageContainer.getSelectedDeck() == -1 ? 0 : offPageContainer.getSelectedDeck();
return offPageContainer.getDeck(deckIndex); // depends on control dependency: [if], data = [none]
}
return super.getWidget(index);
} } |
public class class_name {
public TreeRule getRule(final int level, final int order) {
if (rules == null || rules.isEmpty()) {
return null;
}
TreeMap<Integer, TreeRule> rule_level = rules.get(level);
if (rule_level == null || rule_level.isEmpty()) {
return null;
}
return rule_level.get(order);
} } | public class class_name {
public TreeRule getRule(final int level, final int order) {
if (rules == null || rules.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
TreeMap<Integer, TreeRule> rule_level = rules.get(level);
if (rule_level == null || rule_level.isEmpty()) {
return null; // depends on control dependency: [if], data = [none]
}
return rule_level.get(order);
} } |
public class class_name {
private void reportResults(boolean replace, I_CmsReport report, int nrOfFiles) {
// report entries
if (replace) {
// finish searching and replacing
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_END_SEARCHING_REPLACING_0),
I_CmsReport.FORMAT_HEADLINE);
} else {
// finish searching
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_END_SEARCHING_0),
I_CmsReport.FORMAT_HEADLINE);
}
// the results are written in the report
report.println(Messages.get().container(Messages.RPT_SOURCESEARCH_RESULT_0), I_CmsReport.FORMAT_HEADLINE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_NR_OF_FILES_TO_SEARCH_IN_1,
new Integer(nrOfFiles).toString()),
I_CmsReport.FORMAT_NOTE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_NR_OF_FILES_MATCHED_1,
new Integer(m_matchedResources.size()).toString()),
I_CmsReport.FORMAT_NOTE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_SEARCH_ERROR_COUNT_1,
new Integer(m_errorSearch).toString()),
I_CmsReport.FORMAT_NOTE);
if (replace) {
// replace report entries
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_REPLACE_ERROR_COUNT_1,
new Integer(m_errorUpdate).toString()),
I_CmsReport.FORMAT_NOTE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_LOCKED_FILES_1,
new Integer(m_lockedFiles).toString()),
I_CmsReport.FORMAT_NOTE);
if (m_matchedResources.size() == 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_NO_FILES_FOUND_0),
I_CmsReport.FORMAT_OK);
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_CLICK_OK_TO_GET_LIST_0),
I_CmsReport.FORMAT_OK);
}
if (m_lockedFiles > 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_REPLACE_FAILED_0),
I_CmsReport.FORMAT_ERROR);
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_REPLACE_SUCCESS_0),
I_CmsReport.FORMAT_OK);
}
} else {
// search report entries
if (m_matchedResources.size() == 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_NO_FILES_FOUND_0),
I_CmsReport.FORMAT_OK);
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_CLICK_OK_TO_GET_LIST_0),
I_CmsReport.FORMAT_OK);
}
if (m_errorSearch > 0) {
// only searching failed
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_SEARCH_FAILED_0),
I_CmsReport.FORMAT_ERROR);
} else {
// only searching was successful
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_SEARCH_SUCCESS_0),
I_CmsReport.FORMAT_OK);
}
}
} } | public class class_name {
private void reportResults(boolean replace, I_CmsReport report, int nrOfFiles) {
// report entries
if (replace) {
// finish searching and replacing
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_END_SEARCHING_REPLACING_0),
I_CmsReport.FORMAT_HEADLINE); // depends on control dependency: [if], data = [none]
} else {
// finish searching
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_END_SEARCHING_0),
I_CmsReport.FORMAT_HEADLINE); // depends on control dependency: [if], data = [none]
}
// the results are written in the report
report.println(Messages.get().container(Messages.RPT_SOURCESEARCH_RESULT_0), I_CmsReport.FORMAT_HEADLINE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_NR_OF_FILES_TO_SEARCH_IN_1,
new Integer(nrOfFiles).toString()),
I_CmsReport.FORMAT_NOTE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_NR_OF_FILES_MATCHED_1,
new Integer(m_matchedResources.size()).toString()),
I_CmsReport.FORMAT_NOTE);
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_SEARCH_ERROR_COUNT_1,
new Integer(m_errorSearch).toString()),
I_CmsReport.FORMAT_NOTE);
if (replace) {
// replace report entries
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_REPLACE_ERROR_COUNT_1,
new Integer(m_errorUpdate).toString()),
I_CmsReport.FORMAT_NOTE); // depends on control dependency: [if], data = [none]
report.println(
Messages.get().container(
Messages.RPT_SOURCESEARCH_LOCKED_FILES_1,
new Integer(m_lockedFiles).toString()),
I_CmsReport.FORMAT_NOTE); // depends on control dependency: [if], data = [none]
if (m_matchedResources.size() == 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_NO_FILES_FOUND_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_CLICK_OK_TO_GET_LIST_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
}
if (m_lockedFiles > 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_REPLACE_FAILED_0),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_REPLACE_SUCCESS_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
}
} else {
// search report entries
if (m_matchedResources.size() == 0) {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_NO_FILES_FOUND_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
} else {
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_CLICK_OK_TO_GET_LIST_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
}
if (m_errorSearch > 0) {
// only searching failed
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_SEARCH_FAILED_0),
I_CmsReport.FORMAT_ERROR); // depends on control dependency: [if], data = [none]
} else {
// only searching was successful
report.println(
Messages.get().container(Messages.RPT_SOURCESEARCH_SEARCH_SUCCESS_0),
I_CmsReport.FORMAT_OK); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private void loadPlugins() {
final List<T> finalPluginsList = new ArrayList<T>();
pluginsList = new ArrayList<T>();
pluginsMap = new HashMap<String, T>();
String className = null;
try {
final Class<T>[] classes = getClasses();
for (final Class<T> clazz : classes) {
className = clazz.getName();
final T plugin = clazz.newInstance();
if (plugin instanceof DelegatingModuleParser) {
((DelegatingModuleParser) plugin).setFeedParser(parentParser);
}
if (plugin instanceof DelegatingModuleGenerator) {
((DelegatingModuleGenerator) plugin).setFeedGenerator(parentGenerator);
}
pluginsMap.put(getKey(plugin), plugin);
// to preserve the order of definition in the rome.properties files
pluginsList.add(plugin);
}
final Collection<T> plugins = pluginsMap.values();
for (final T plugin : plugins) {
// to remove overridden plugin impls
finalPluginsList.add(plugin);
}
final Iterator<T> iterator = pluginsList.iterator();
while (iterator.hasNext()) {
final T plugin = iterator.next();
if (!finalPluginsList.contains(plugin)) {
iterator.remove();
}
}
} catch (final Exception ex) {
throw new RuntimeException("could not instantiate plugin " + className, ex);
} catch (final ExceptionInInitializerError er) {
throw new RuntimeException("could not instantiate plugin " + className, er);
}
} } | public class class_name {
private void loadPlugins() {
final List<T> finalPluginsList = new ArrayList<T>();
pluginsList = new ArrayList<T>();
pluginsMap = new HashMap<String, T>();
String className = null;
try {
final Class<T>[] classes = getClasses();
for (final Class<T> clazz : classes) {
className = clazz.getName(); // depends on control dependency: [for], data = [clazz]
final T plugin = clazz.newInstance();
if (plugin instanceof DelegatingModuleParser) {
((DelegatingModuleParser) plugin).setFeedParser(parentParser); // depends on control dependency: [if], data = [none]
}
if (plugin instanceof DelegatingModuleGenerator) {
((DelegatingModuleGenerator) plugin).setFeedGenerator(parentGenerator); // depends on control dependency: [if], data = [none]
}
pluginsMap.put(getKey(plugin), plugin); // depends on control dependency: [for], data = [none]
// to preserve the order of definition in the rome.properties files
pluginsList.add(plugin); // depends on control dependency: [for], data = [none]
}
final Collection<T> plugins = pluginsMap.values();
for (final T plugin : plugins) {
// to remove overridden plugin impls
finalPluginsList.add(plugin); // depends on control dependency: [for], data = [plugin]
}
final Iterator<T> iterator = pluginsList.iterator();
while (iterator.hasNext()) {
final T plugin = iterator.next();
if (!finalPluginsList.contains(plugin)) {
iterator.remove(); // depends on control dependency: [if], data = [none]
}
}
} catch (final Exception ex) {
throw new RuntimeException("could not instantiate plugin " + className, ex);
} catch (final ExceptionInInitializerError er) { // depends on control dependency: [catch], data = [none]
throw new RuntimeException("could not instantiate plugin " + className, er);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private int getPixelSize() {
if (getSize() == Size.NORMAL) {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_normal);
} else if (getSize() == Size.SMALL) {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_small);
} else {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_large);
}
} } | public class class_name {
private int getPixelSize() {
if (getSize() == Size.NORMAL) {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_normal); // depends on control dependency: [if], data = [none]
} else if (getSize() == Size.SMALL) {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_small); // depends on control dependency: [if], data = [none]
} else {
return getResources().getDimensionPixelSize(R.dimen.floating_action_button_size_large); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private Integer mapTaskID(Integer id)
{
Integer mappedID = m_clashMap.get(id);
if (mappedID == null)
{
mappedID = id;
}
return (mappedID);
} } | public class class_name {
private Integer mapTaskID(Integer id)
{
Integer mappedID = m_clashMap.get(id);
if (mappedID == null)
{
mappedID = id; // depends on control dependency: [if], data = [none]
}
return (mappedID);
} } |
public class class_name {
protected void startHtml(PrintWriter out) {
logger.entering(out);
try {
Template t = ve.getTemplate("/templates/header.part.html");
VelocityContext context = new VelocityContext();
StringBuilder output = new StringBuilder();
for (Entry<String, String> temp : ConfigSummaryData.getConfigSummary().entrySet()) {
Entry<String, String> formattedTemp = ReporterDateFormatter.formatReportDataForBrowsableReports(temp);
output.append(formattedTemp.getKey()).append(" : <b>").append(formattedTemp.getValue()).append("</b><br>");
}
context.put("configSummary", output.toString());
StringWriter writer = new StringWriter();
t.merge(context, writer);
out.write(writer.toString());
} catch (Exception e) {
logger.log(Level.SEVERE, e.getMessage(), e);
}
logger.exiting();
} } | public class class_name {
protected void startHtml(PrintWriter out) {
logger.entering(out);
try {
Template t = ve.getTemplate("/templates/header.part.html");
VelocityContext context = new VelocityContext();
StringBuilder output = new StringBuilder();
for (Entry<String, String> temp : ConfigSummaryData.getConfigSummary().entrySet()) {
Entry<String, String> formattedTemp = ReporterDateFormatter.formatReportDataForBrowsableReports(temp);
output.append(formattedTemp.getKey()).append(" : <b>").append(formattedTemp.getValue()).append("</b><br>"); // depends on control dependency: [for], data = [none]
}
context.put("configSummary", output.toString()); // depends on control dependency: [try], data = [none]
StringWriter writer = new StringWriter();
t.merge(context, writer); // depends on control dependency: [try], data = [none]
out.write(writer.toString()); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
logger.log(Level.SEVERE, e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
logger.exiting();
} } |
public class class_name {
@Override
public Set<SeparatorSet> getComputableOutboundMessages(Map<SeparatorSet, Factor> inboundMessages) {
Preconditions.checkNotNull(inboundMessages);
Set<SeparatorSet> possibleOutbound = Sets.newHashSet();
for (Map.Entry<SeparatorSet, Factor> inboundMessage : inboundMessages.entrySet()) {
if (inboundMessage.getValue() == null) {
possibleOutbound.add(inboundMessage.getKey());
}
}
if (possibleOutbound.size() == 1) {
return possibleOutbound;
} else if (possibleOutbound.size() == 0) {
return inboundMessages.keySet();
} else {
return Collections.emptySet();
}
} } | public class class_name {
@Override
public Set<SeparatorSet> getComputableOutboundMessages(Map<SeparatorSet, Factor> inboundMessages) {
Preconditions.checkNotNull(inboundMessages);
Set<SeparatorSet> possibleOutbound = Sets.newHashSet();
for (Map.Entry<SeparatorSet, Factor> inboundMessage : inboundMessages.entrySet()) {
if (inboundMessage.getValue() == null) {
possibleOutbound.add(inboundMessage.getKey()); // depends on control dependency: [if], data = [none]
}
}
if (possibleOutbound.size() == 1) {
return possibleOutbound; // depends on control dependency: [if], data = [none]
} else if (possibleOutbound.size() == 0) {
return inboundMessages.keySet(); // depends on control dependency: [if], data = [none]
} else {
return Collections.emptySet(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@GwtIncompatible("Unnecessary")
private ImmutableList<JSError> deduplicateIjsFiles(
List<FlagEntry<JsSourceType>> files, List<String> moduleRoots, boolean hasModuleSpecs) {
ImmutableList.Builder<JSError> errors = ImmutableList.builder();
// First pass: collect the (module root relative) names of --js and --weakdep files.
Map<String, String> relativeToAbsoluteName = new HashMap<>();
for (FlagEntry<JsSourceType> file : files) {
// TODO(tjgq): Handle zip files.
if (file.flag == JsSourceType.JS || file.flag == JsSourceType.WEAKDEP) {
String absoluteName = file.value;
String relativeName = getModuleRootRelativeName(absoluteName, moduleRoots);
relativeToAbsoluteName.put(relativeName, absoluteName);
}
}
// Second pass: drop --ijs files whose (module root relative) name matches a --js or --weakdep
// file.
Iterator<FlagEntry<JsSourceType>> iterator = files.iterator();
while (iterator.hasNext()) {
FlagEntry<JsSourceType> file = iterator.next();
if (file.flag == JsSourceType.IJS) {
if (hasModuleSpecs) {
throw new FlagUsageException("--ijs is incompatible with --chunk or --module.");
}
String absoluteName = file.value;
if (!absoluteName.endsWith(".i.js")) {
errors.add(JSError.make(IjsErrors.BAD_IJS_FILE_NAME, absoluteName));
continue;
}
String relativeName = getModuleRootRelativeName(absoluteName, moduleRoots);
String relativeNonIjsName =
relativeName.substring(0, relativeName.length() - ".i.js".length());
if (relativeToAbsoluteName.containsKey(relativeNonIjsName)) {
errors.add(
JSError.make(
IjsErrors.CONFLICTING_IJS_FILE,
relativeToAbsoluteName.get(relativeNonIjsName),
absoluteName));
iterator.remove();
}
}
}
return errors.build();
} } | public class class_name {
@GwtIncompatible("Unnecessary")
private ImmutableList<JSError> deduplicateIjsFiles(
List<FlagEntry<JsSourceType>> files, List<String> moduleRoots, boolean hasModuleSpecs) {
ImmutableList.Builder<JSError> errors = ImmutableList.builder();
// First pass: collect the (module root relative) names of --js and --weakdep files.
Map<String, String> relativeToAbsoluteName = new HashMap<>();
for (FlagEntry<JsSourceType> file : files) {
// TODO(tjgq): Handle zip files.
if (file.flag == JsSourceType.JS || file.flag == JsSourceType.WEAKDEP) {
String absoluteName = file.value;
String relativeName = getModuleRootRelativeName(absoluteName, moduleRoots);
relativeToAbsoluteName.put(relativeName, absoluteName); // depends on control dependency: [if], data = [none]
}
}
// Second pass: drop --ijs files whose (module root relative) name matches a --js or --weakdep
// file.
Iterator<FlagEntry<JsSourceType>> iterator = files.iterator();
while (iterator.hasNext()) {
FlagEntry<JsSourceType> file = iterator.next();
if (file.flag == JsSourceType.IJS) {
if (hasModuleSpecs) {
throw new FlagUsageException("--ijs is incompatible with --chunk or --module.");
}
String absoluteName = file.value;
if (!absoluteName.endsWith(".i.js")) {
errors.add(JSError.make(IjsErrors.BAD_IJS_FILE_NAME, absoluteName)); // depends on control dependency: [if], data = [none]
continue;
}
String relativeName = getModuleRootRelativeName(absoluteName, moduleRoots);
String relativeNonIjsName =
relativeName.substring(0, relativeName.length() - ".i.js".length());
if (relativeToAbsoluteName.containsKey(relativeNonIjsName)) {
errors.add(
JSError.make(
IjsErrors.CONFLICTING_IJS_FILE,
relativeToAbsoluteName.get(relativeNonIjsName),
absoluteName));
iterator.remove(); // depends on control dependency: [if], data = [none]
}
}
}
return errors.build();
} } |
public class class_name {
public void execute() {
Iterator it = memberList.iterator();
while (it.hasNext()) {
GroupMember member = (GroupMember) it.next();
member.getCommand().execute();
}
} } | public class class_name {
public void execute() {
Iterator it = memberList.iterator();
while (it.hasNext()) {
GroupMember member = (GroupMember) it.next();
member.getCommand().execute(); // depends on control dependency: [while], data = [none]
}
} } |
public class class_name {
public EEnum getFontResolutionMetTech() {
if (fontResolutionMetTechEEnum == null) {
fontResolutionMetTechEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(85);
}
return fontResolutionMetTechEEnum;
} } | public class class_name {
public EEnum getFontResolutionMetTech() {
if (fontResolutionMetTechEEnum == null) {
fontResolutionMetTechEEnum = (EEnum)EPackage.Registry.INSTANCE.getEPackage(AfplibPackage.eNS_URI).getEClassifiers().get(85); // depends on control dependency: [if], data = [none]
}
return fontResolutionMetTechEEnum;
} } |
public class class_name {
public static List<String> listByPrefix(String directory, String prefix, String suffix) {
List<String> ret = new ArrayList<>();
File d = new File(directory);
if( !d.isDirectory() ) {
try {
URL url = new URL(directory);
if( url.getProtocol().equals("file")) {
d = new File(url.getFile());
} else if( url.getProtocol().equals("jar")){
return listJarPrefix(url,prefix,suffix);
}
} catch( MalformedURLException ignore){}
}
if( !d.isDirectory() )
throw new IllegalArgumentException("Must specify an directory. "+directory);
File files[] = d.listFiles();
for( File f : files ) {
if( f.isDirectory() || f.isHidden() )
continue;
if( prefix == null || f.getName().startsWith(prefix )) {
if( suffix ==null || f.getName().endsWith(suffix)) {
ret.add(f.getAbsolutePath());
}
}
}
return ret;
} } | public class class_name {
public static List<String> listByPrefix(String directory, String prefix, String suffix) {
List<String> ret = new ArrayList<>();
File d = new File(directory);
if( !d.isDirectory() ) {
try {
URL url = new URL(directory);
if( url.getProtocol().equals("file")) {
d = new File(url.getFile()); // depends on control dependency: [if], data = [none]
} else if( url.getProtocol().equals("jar")){
return listJarPrefix(url,prefix,suffix); // depends on control dependency: [if], data = [none]
}
} catch( MalformedURLException ignore){} // depends on control dependency: [catch], data = [none]
}
if( !d.isDirectory() )
throw new IllegalArgumentException("Must specify an directory. "+directory);
File files[] = d.listFiles();
for( File f : files ) {
if( f.isDirectory() || f.isHidden() )
continue;
if( prefix == null || f.getName().startsWith(prefix )) {
if( suffix ==null || f.getName().endsWith(suffix)) {
ret.add(f.getAbsolutePath()); // depends on control dependency: [if], data = [none]
}
}
}
return ret;
} } |
public class class_name {
public static String getSuffix(File file) {
String filename = file.getName();
int index = filename.lastIndexOf(".");
if (index != -1) {
return filename.substring(index + 1);
}
return filename;
} } | public class class_name {
public static String getSuffix(File file) {
String filename = file.getName();
int index = filename.lastIndexOf(".");
if (index != -1) {
return filename.substring(index + 1); // depends on control dependency: [if], data = [(index]
}
return filename;
} } |
public class class_name {
private TableCellRenderer getCellRenderer(Class<?> type) {
// try to create one from the factory
TableCellRenderer renderer = getRendererFactory().createTableCellRenderer(type);
// if that fails, recursively try again with the superclass
if (renderer == null && type != null) {
renderer = getCellRenderer(type.getSuperclass());
}
// if that fails, just use the default Object renderer
if (renderer == null) {
renderer = super.getDefaultRenderer(Object.class);
}
return renderer;
} } | public class class_name {
private TableCellRenderer getCellRenderer(Class<?> type) {
// try to create one from the factory
TableCellRenderer renderer = getRendererFactory().createTableCellRenderer(type);
// if that fails, recursively try again with the superclass
if (renderer == null && type != null) {
renderer = getCellRenderer(type.getSuperclass());
// depends on control dependency: [if], data = [none]
}
// if that fails, just use the default Object renderer
if (renderer == null) {
renderer = super.getDefaultRenderer(Object.class);
// depends on control dependency: [if], data = [none]
}
return renderer;
} } |
public class class_name {
public void blockLocationInfoExpiresIfNeeded() {
if (blkLocInfoExpireTimeout < 0) {
return;
}
long timeNow = System.currentTimeMillis();
if (timeBlkLocInfoExpire < timeNow) {
this.writeLock();
try {
long newTimeBlockExpire = Long.MAX_VALUE;
List<LocatedBlock> listToRemove = new ArrayList<LocatedBlock>();
for (LocatedBlock lb : blkLocInfoExpireMap.keySet()) {
long expireTime = blkLocInfoExpireMap.get(lb);
if (expireTime < timeNow) {
if (DFSClient.LOG.isDebugEnabled()) {
DFSClient.LOG.debug("Expire cached block location for " + lb);
}
listToRemove.add(lb);
} else if (expireTime < newTimeBlockExpire) {
newTimeBlockExpire = expireTime;
} else {
}
}
super.getLocatedBlocks().removeAll(listToRemove);
for (LocatedBlock lb : listToRemove) {
blkLocInfoExpireMap.remove(lb);
}
this.timeBlkLocInfoExpire = newTimeBlockExpire;
} finally {
this.writeUnlock();
}
}
} } | public class class_name {
public void blockLocationInfoExpiresIfNeeded() {
if (blkLocInfoExpireTimeout < 0) {
return; // depends on control dependency: [if], data = [none]
}
long timeNow = System.currentTimeMillis();
if (timeBlkLocInfoExpire < timeNow) {
this.writeLock(); // depends on control dependency: [if], data = [none]
try {
long newTimeBlockExpire = Long.MAX_VALUE;
List<LocatedBlock> listToRemove = new ArrayList<LocatedBlock>();
for (LocatedBlock lb : blkLocInfoExpireMap.keySet()) {
long expireTime = blkLocInfoExpireMap.get(lb);
if (expireTime < timeNow) {
if (DFSClient.LOG.isDebugEnabled()) {
DFSClient.LOG.debug("Expire cached block location for " + lb); // depends on control dependency: [if], data = [none]
}
listToRemove.add(lb); // depends on control dependency: [if], data = [none]
} else if (expireTime < newTimeBlockExpire) {
newTimeBlockExpire = expireTime; // depends on control dependency: [if], data = [none]
} else {
}
}
super.getLocatedBlocks().removeAll(listToRemove); // depends on control dependency: [try], data = [none]
for (LocatedBlock lb : listToRemove) {
blkLocInfoExpireMap.remove(lb); // depends on control dependency: [for], data = [lb]
}
this.timeBlkLocInfoExpire = newTimeBlockExpire; // depends on control dependency: [try], data = [none]
} finally {
this.writeUnlock();
}
}
} } |
public class class_name {
@SuppressWarnings("unchecked")
private void handlePublish(PublishRequest request) {
state.getLogger().trace("{} - Received {}", state.getSessionId(), request);
// If the request is for another session ID, this may be a session that was previously opened
// for this client.
if (request.session() != state.getSessionId()) {
state.getLogger().trace("{} - Inconsistent session ID: {}", state.getSessionId(), request.session());
return;
}
// If the request event index has already been processed, return.
if (request.eventIndex() <= state.getEventIndex()) {
return;
}
// If the request's previous event index doesn't equal the previous received event index,
// respond with an undefined error and the last index received. This will cause the cluster
// to resend events starting at eventIndex + 1.
if (request.previousIndex() != state.getEventIndex()) {
state.getLogger().trace("{} - Inconsistent event index: {}", state.getSessionId(), request.previousIndex());
connection.send(ResetRequest.builder()
.withSession(state.getSessionId())
.withIndex(state.getEventIndex())
.build());
return;
}
// Store the event index. This will be used to verify that events are received in sequential order.
state.setEventIndex(request.eventIndex());
sequencer.sequenceEvent(request, () -> {
for (Event<?> event : request.events()) {
Set<Consumer> listeners = eventListeners.get(event.name());
if (listeners != null) {
for (Consumer listener : listeners) {
listener.accept(event.message());
}
}
}
});
} } | public class class_name {
@SuppressWarnings("unchecked")
private void handlePublish(PublishRequest request) {
state.getLogger().trace("{} - Received {}", state.getSessionId(), request);
// If the request is for another session ID, this may be a session that was previously opened
// for this client.
if (request.session() != state.getSessionId()) {
state.getLogger().trace("{} - Inconsistent session ID: {}", state.getSessionId(), request.session()); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// If the request event index has already been processed, return.
if (request.eventIndex() <= state.getEventIndex()) {
return; // depends on control dependency: [if], data = [none]
}
// If the request's previous event index doesn't equal the previous received event index,
// respond with an undefined error and the last index received. This will cause the cluster
// to resend events starting at eventIndex + 1.
if (request.previousIndex() != state.getEventIndex()) {
state.getLogger().trace("{} - Inconsistent event index: {}", state.getSessionId(), request.previousIndex()); // depends on control dependency: [if], data = [none]
connection.send(ResetRequest.builder()
.withSession(state.getSessionId())
.withIndex(state.getEventIndex())
.build()); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// Store the event index. This will be used to verify that events are received in sequential order.
state.setEventIndex(request.eventIndex());
sequencer.sequenceEvent(request, () -> {
for (Event<?> event : request.events()) {
Set<Consumer> listeners = eventListeners.get(event.name());
if (listeners != null) {
for (Consumer listener : listeners) {
listener.accept(event.message());
}
}
}
});
} } |
public class class_name {
public static IntStreamEx iterate(int seed, IntPredicate predicate, IntUnaryOperator f) {
Objects.requireNonNull(f);
Objects.requireNonNull(predicate);
Spliterator.OfInt spliterator = new Spliterators.AbstractIntSpliterator(Long.MAX_VALUE, Spliterator.ORDERED
| Spliterator.IMMUTABLE | Spliterator.NONNULL) {
int prev;
boolean started, finished;
@Override
public boolean tryAdvance(IntConsumer action) {
Objects.requireNonNull(action);
if (finished)
return false;
int t;
if (started)
t = f.applyAsInt(prev);
else {
t = seed;
started = true;
}
if (!predicate.test(t)) {
finished = true;
return false;
}
action.accept(prev = t);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
Objects.requireNonNull(action);
if (finished)
return;
finished = true;
int t = started ? f.applyAsInt(prev) : seed;
while (predicate.test(t)) {
action.accept(t);
t = f.applyAsInt(t);
}
}
};
return of(spliterator);
} } | public class class_name {
public static IntStreamEx iterate(int seed, IntPredicate predicate, IntUnaryOperator f) {
Objects.requireNonNull(f);
Objects.requireNonNull(predicate);
Spliterator.OfInt spliterator = new Spliterators.AbstractIntSpliterator(Long.MAX_VALUE, Spliterator.ORDERED
| Spliterator.IMMUTABLE | Spliterator.NONNULL) {
int prev;
boolean started, finished;
@Override
public boolean tryAdvance(IntConsumer action) {
Objects.requireNonNull(action);
if (finished)
return false;
int t;
if (started)
t = f.applyAsInt(prev);
else {
t = seed; // depends on control dependency: [if], data = [none]
started = true; // depends on control dependency: [if], data = [none]
}
if (!predicate.test(t)) {
finished = true; // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
action.accept(prev = t);
return true;
}
@Override
public void forEachRemaining(IntConsumer action) {
Objects.requireNonNull(action);
if (finished)
return;
finished = true;
int t = started ? f.applyAsInt(prev) : seed;
while (predicate.test(t)) {
action.accept(t); // depends on control dependency: [while], data = [none]
t = f.applyAsInt(t); // depends on control dependency: [while], data = [none]
}
}
};
return of(spliterator);
} } |
public class class_name {
public static String encode(byte[] source) {
byte[] encoded = encodeBytesToBytes( source, source.length);
try {
return new String(encoded, "US-ASCII");
}
catch (java.io.UnsupportedEncodingException uue) {
return new String( encoded );
}
} } | public class class_name {
public static String encode(byte[] source) {
byte[] encoded = encodeBytesToBytes( source, source.length);
try {
return new String(encoded, "US-ASCII"); // depends on control dependency: [try], data = [none]
}
catch (java.io.UnsupportedEncodingException uue) {
return new String( encoded );
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public com.google.protobuf.ByteString
getInstanceClassBytes() {
java.lang.Object ref = instanceClass_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
instanceClass_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
} } | public class class_name {
public com.google.protobuf.ByteString
getInstanceClassBytes() {
java.lang.Object ref = instanceClass_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
instanceClass_ = b; // depends on control dependency: [if], data = [none]
return b; // depends on control dependency: [if], data = [none]
} else {
return (com.google.protobuf.ByteString) ref; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected void scrubExportFolders(I_CmsReport report) {
if (report != null) {
report.println(
Messages.get().container(Messages.RPT_DELETING_EXPORT_FOLDERS_BEGIN_0),
I_CmsReport.FORMAT_HEADLINE);
}
synchronized (m_lockScrubExportFolders) {
int count = 0;
Integer size = new Integer(m_rfsRules.size() + 1);
// default case
String exportFolderName = CmsFileUtil.normalizePath(m_staticExportPath + '/');
try {
File exportFolder = new File(exportFolderName);
// check if export file exists, if so delete it
if (exportFolder.exists() && exportFolder.canWrite()) {
CmsFileUtil.purgeDirectory(exportFolder);
}
count++;
if (report != null) {
report.println(
Messages.get().container(
Messages.RPT_DELETE_EXPORT_FOLDER_3,
new Integer(count),
size,
exportFolderName),
I_CmsReport.FORMAT_NOTE);
} else {
// write log message
if (LOG.isInfoEnabled()) {
LOG.info(Messages.get().getBundle().key(Messages.LOG_DEL_MAIN_SE_FOLDER_1, exportFolderName));
}
}
} catch (Throwable t) {
// ignore, nothing to do about the
if (LOG.isWarnEnabled()) {
LOG.warn(
Messages.get().getBundle().key(Messages.LOG_FOLDER_DELETION_FAILED_1, exportFolderName),
t);
}
}
// iterate over the rules
Iterator<CmsStaticExportRfsRule> it = m_rfsRules.iterator();
while (it.hasNext()) {
CmsStaticExportRfsRule rule = it.next();
exportFolderName = CmsFileUtil.normalizePath(rule.getExportPath() + '/');
try {
File exportFolder = new File(exportFolderName);
// check if export file exists, if so delete it
if (exportFolder.exists() && exportFolder.canWrite()) {
CmsFileUtil.purgeDirectory(exportFolder);
}
count++;
if (report != null) {
report.println(
Messages.get().container(
Messages.RPT_DELETE_EXPORT_FOLDER_3,
new Integer(count),
size,
exportFolderName),
I_CmsReport.FORMAT_NOTE);
} else {
// write log message
if (LOG.isInfoEnabled()) {
LOG.info(
Messages.get().getBundle().key(Messages.LOG_DEL_MAIN_SE_FOLDER_1, exportFolderName));
}
}
} catch (Throwable t) {
// ignore, nothing to do about the
if (LOG.isWarnEnabled()) {
LOG.warn(
Messages.get().getBundle().key(Messages.LOG_FOLDER_DELETION_FAILED_1, exportFolderName),
t);
}
}
}
}
if (report != null) {
report.println(
Messages.get().container(Messages.RPT_DELETING_EXPORT_FOLDERS_END_0),
I_CmsReport.FORMAT_HEADLINE);
}
} } | public class class_name {
protected void scrubExportFolders(I_CmsReport report) {
if (report != null) {
report.println(
Messages.get().container(Messages.RPT_DELETING_EXPORT_FOLDERS_BEGIN_0),
I_CmsReport.FORMAT_HEADLINE); // depends on control dependency: [if], data = [none]
}
synchronized (m_lockScrubExportFolders) {
int count = 0;
Integer size = new Integer(m_rfsRules.size() + 1);
// default case
String exportFolderName = CmsFileUtil.normalizePath(m_staticExportPath + '/');
try {
File exportFolder = new File(exportFolderName);
// check if export file exists, if so delete it
if (exportFolder.exists() && exportFolder.canWrite()) {
CmsFileUtil.purgeDirectory(exportFolder); // depends on control dependency: [if], data = [none]
}
count++; // depends on control dependency: [try], data = [none]
if (report != null) {
report.println(
Messages.get().container(
Messages.RPT_DELETE_EXPORT_FOLDER_3,
new Integer(count),
size,
exportFolderName),
I_CmsReport.FORMAT_NOTE); // depends on control dependency: [if], data = [none]
} else {
// write log message
if (LOG.isInfoEnabled()) {
LOG.info(Messages.get().getBundle().key(Messages.LOG_DEL_MAIN_SE_FOLDER_1, exportFolderName)); // depends on control dependency: [if], data = [none]
}
}
} catch (Throwable t) {
// ignore, nothing to do about the
if (LOG.isWarnEnabled()) {
LOG.warn(
Messages.get().getBundle().key(Messages.LOG_FOLDER_DELETION_FAILED_1, exportFolderName),
t); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
// iterate over the rules
Iterator<CmsStaticExportRfsRule> it = m_rfsRules.iterator();
while (it.hasNext()) {
CmsStaticExportRfsRule rule = it.next();
exportFolderName = CmsFileUtil.normalizePath(rule.getExportPath() + '/'); // depends on control dependency: [while], data = [none]
try {
File exportFolder = new File(exportFolderName);
// check if export file exists, if so delete it
if (exportFolder.exists() && exportFolder.canWrite()) {
CmsFileUtil.purgeDirectory(exportFolder); // depends on control dependency: [if], data = [none]
}
count++; // depends on control dependency: [try], data = [none]
if (report != null) {
report.println(
Messages.get().container(
Messages.RPT_DELETE_EXPORT_FOLDER_3,
new Integer(count),
size,
exportFolderName),
I_CmsReport.FORMAT_NOTE); // depends on control dependency: [if], data = [none]
} else {
// write log message
if (LOG.isInfoEnabled()) {
LOG.info(
Messages.get().getBundle().key(Messages.LOG_DEL_MAIN_SE_FOLDER_1, exportFolderName)); // depends on control dependency: [if], data = [none]
}
}
} catch (Throwable t) {
// ignore, nothing to do about the
if (LOG.isWarnEnabled()) {
LOG.warn(
Messages.get().getBundle().key(Messages.LOG_FOLDER_DELETION_FAILED_1, exportFolderName),
t); // depends on control dependency: [if], data = [none]
}
} // depends on control dependency: [catch], data = [none]
}
}
if (report != null) {
report.println(
Messages.get().container(Messages.RPT_DELETING_EXPORT_FOLDERS_END_0),
I_CmsReport.FORMAT_HEADLINE); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void init() throws V4L4JException{
try {
super.init();
setJPGQuality(quality);
} catch (ImageFormatException ife){
if(format == -1){
String msg ="v4l4j was unable to find image format supported by"
+ "the \nvideo device and that can be encoded in JPEG.\n"
+ "Please let the author know about this, so that support\n"
+ "for this video device can be improved. See \nREADME file"
+ " on how to submit v4l4j reports.";
System.out.println(msg);
ife = new ImageFormatException(msg);
}
throw ife;
}
} } | public class class_name {
void init() throws V4L4JException{
try {
super.init();
setJPGQuality(quality);
} catch (ImageFormatException ife){
if(format == -1){
String msg ="v4l4j was unable to find image format supported by"
+ "the \nvideo device and that can be encoded in JPEG.\n"
+ "Please let the author know about this, so that support\n"
+ "for this video device can be improved. See \nREADME file"
+ " on how to submit v4l4j reports.";
System.out.println(msg); // depends on control dependency: [if], data = [none]
ife = new ImageFormatException(msg); // depends on control dependency: [if], data = [none]
}
throw ife;
}
} } |
public class class_name {
public String printRelativeOrDateTime(
UnixTime moment,
Timezone tz,
TimeUnit precision,
long maxdelta,
TemporalFormatter<Moment> formatter
) {
UnixTime ref = this.getReferenceClock().currentTime();
Moment t1 = Moment.from(ref);
Moment t2 = Moment.from(moment);
long delta = t1.until(t2, TimeUnit.SECONDS);
if (Math.abs(delta) > maxdelta) {
return formatter.format(t2);
} else if (
(precision.compareTo(TimeUnit.SECONDS) <= 0)
&& (Math.abs(delta) < 60L)
) {
return this.printRelativeSeconds(t1, t2, delta);
}
return this.printRelativeTime(t1, t2, tz, precision, null, null);
} } | public class class_name {
public String printRelativeOrDateTime(
UnixTime moment,
Timezone tz,
TimeUnit precision,
long maxdelta,
TemporalFormatter<Moment> formatter
) {
UnixTime ref = this.getReferenceClock().currentTime();
Moment t1 = Moment.from(ref);
Moment t2 = Moment.from(moment);
long delta = t1.until(t2, TimeUnit.SECONDS);
if (Math.abs(delta) > maxdelta) {
return formatter.format(t2); // depends on control dependency: [if], data = [none]
} else if (
(precision.compareTo(TimeUnit.SECONDS) <= 0)
&& (Math.abs(delta) < 60L)
) {
return this.printRelativeSeconds(t1, t2, delta); // depends on control dependency: [if], data = []
}
return this.printRelativeTime(t1, t2, tz, precision, null, null);
} } |
public class class_name {
public String getTitle(int type)
{
if (allow(type&ID3V1))
{
return id3v1.getTitle();
}
if (allow(type&ID3V2))
{
return id3v2.getFrameDataString(ID3v2Frames.TITLE);
}
return null;
} } | public class class_name {
public String getTitle(int type)
{
if (allow(type&ID3V1))
{
return id3v1.getTitle(); // depends on control dependency: [if], data = [none]
}
if (allow(type&ID3V2))
{
return id3v2.getFrameDataString(ID3v2Frames.TITLE); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
private int combine_blocks(long addr)
throws IOException
{
long cum_length;
int length;
long next_addr = addr;
length = readInt();
if (length > 0) {
while ((length > 0) && (next_addr + length < tail_ptr)) {
next_addr += length;
seek(next_addr);
length = readInt();
}
if (length < 0) {
cum_length = next_addr - addr;
seek(addr);
writeInt((int) cum_length);
} else {
cum_length = tail_ptr - addr;
}
return(int) cum_length;
} else {
return length;
}
} } | public class class_name {
private int combine_blocks(long addr)
throws IOException
{
long cum_length;
int length;
long next_addr = addr;
length = readInt();
if (length > 0) {
while ((length > 0) && (next_addr + length < tail_ptr)) {
next_addr += length; // depends on control dependency: [while], data = [none]
seek(next_addr); // depends on control dependency: [while], data = [none]
length = readInt(); // depends on control dependency: [while], data = [none]
}
if (length < 0) {
cum_length = next_addr - addr; // depends on control dependency: [if], data = [none]
seek(addr); // depends on control dependency: [if], data = [none]
writeInt((int) cum_length); // depends on control dependency: [if], data = [none]
} else {
cum_length = tail_ptr - addr; // depends on control dependency: [if], data = [none]
}
return(int) cum_length;
} else {
return length;
}
} } |
public class class_name {
private void process(String tebase) {
deleteConfigFile(tebase);
docMain = builder.newDocument();
config = docMain.createElement("config");
docMain.appendChild(config);
scripts = docMain.createElement("scripts");
config.appendChild(scripts);
String scriptsDir = tebase + "scripts";
LOGR.info("Scripts directory found at " + scriptsDir);
File[] testScriptsDir = new File(scriptsDir).listFiles();
if (testScriptsDir != null) {
// if no tests are found under scripts, listFiles will return null,
// if not iterated over the scripts directories
for (File dir : testScriptsDir) {
processDir(dir);
}
}
String mainconfig = tebase + "config.xml";
saveConfigFile(docMain, mainconfig);
} } | public class class_name {
private void process(String tebase) {
deleteConfigFile(tebase);
docMain = builder.newDocument();
config = docMain.createElement("config");
docMain.appendChild(config);
scripts = docMain.createElement("scripts");
config.appendChild(scripts);
String scriptsDir = tebase + "scripts";
LOGR.info("Scripts directory found at " + scriptsDir);
File[] testScriptsDir = new File(scriptsDir).listFiles();
if (testScriptsDir != null) {
// if no tests are found under scripts, listFiles will return null,
// if not iterated over the scripts directories
for (File dir : testScriptsDir) {
processDir(dir); // depends on control dependency: [for], data = [dir]
}
}
String mainconfig = tebase + "config.xml";
saveConfigFile(docMain, mainconfig);
} } |
public class class_name {
protected Diff processRevision(final Revision revision)
throws UnsupportedEncodingException
{
// ----------------------------------------------------//
// ** HERE IS THE POINT TO INCLUDE ADDITIONAL FILTERS //
// TO REMOVE FAULTY REVISIONS FROM FURTHER PROCESSING //
// ----------------------------------------------------//
try{
if(revision.getRevisionText()==null){
return null;
}
}catch(NullPointerException e){
return null;
}
revTemp = revision.getRevisionText().toCharArray();
if (MODE_SURROGATES == SurrogateModes.DISCARD_REVISION) {
// Ignore Revision with surrogate characters
if (Surrogates.scan(revTemp)) {
return null;
}
}
Diff diff;
// Full revision
if (revisionCounter % COUNTER_FULL_REVISION == 0) {
diff = generateFullRevision(revision);
// Diffed revision
}
else {
diff = generateDiff(revPrevious, revTemp);
// if the current revision is identical to the last valid revision
if (diff.size() == 0) {
return null;
}
}
return diff;
} } | public class class_name {
protected Diff processRevision(final Revision revision)
throws UnsupportedEncodingException
{
// ----------------------------------------------------//
// ** HERE IS THE POINT TO INCLUDE ADDITIONAL FILTERS //
// TO REMOVE FAULTY REVISIONS FROM FURTHER PROCESSING //
// ----------------------------------------------------//
try{
if(revision.getRevisionText()==null){
return null;
}
}catch(NullPointerException e){
return null;
}
revTemp = revision.getRevisionText().toCharArray();
if (MODE_SURROGATES == SurrogateModes.DISCARD_REVISION) {
// Ignore Revision with surrogate characters
if (Surrogates.scan(revTemp)) {
return null; // depends on control dependency: [if], data = [none]
}
}
Diff diff;
// Full revision
if (revisionCounter % COUNTER_FULL_REVISION == 0) {
diff = generateFullRevision(revision);
// Diffed revision
}
else {
diff = generateDiff(revPrevious, revTemp);
// if the current revision is identical to the last valid revision
if (diff.size() == 0) {
return null; // depends on control dependency: [if], data = [none]
}
}
return diff;
} } |
public class class_name {
protected static void showBoot() {
logger.info("Lasta Di boot successfully.");
logger.info(" SmartDeploy Mode: {}", SmartDeployUtil.getDeployMode(container));
if (getContainer().hasComponentDef(NamingConvention.class)) { // just in case
final NamingConvention convention = getContainer().getComponent(NamingConvention.class);
final StringBuilder sb = new StringBuilder();
for (String rootPkg : convention.getRootPackageNames()) {
if (sb.length() > 0) {
sb.append(", ");
}
sb.append(rootPkg);
}
logger.info(" Smart Package: {}", sb.toString());
}
} } | public class class_name {
protected static void showBoot() {
logger.info("Lasta Di boot successfully.");
logger.info(" SmartDeploy Mode: {}", SmartDeployUtil.getDeployMode(container));
if (getContainer().hasComponentDef(NamingConvention.class)) { // just in case
final NamingConvention convention = getContainer().getComponent(NamingConvention.class);
final StringBuilder sb = new StringBuilder();
for (String rootPkg : convention.getRootPackageNames()) {
if (sb.length() > 0) {
sb.append(", "); // depends on control dependency: [if], data = [none]
}
sb.append(rootPkg); // depends on control dependency: [for], data = [rootPkg]
}
logger.info(" Smart Package: {}", sb.toString());
}
} } |
public class class_name {
public PageContextImpl initialize(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp, String errorPageURL, boolean needsSession, int bufferSize,
boolean autoFlush, boolean isChild, boolean ignoreScopes) {
parent = null;
requestId = counter++;
appListenerType = ApplicationListener.TYPE_NONE;
this.ignoreScopes = ignoreScopes;
ReqRspUtil.setContentType(rsp, "text/html; charset=" + config.getWebCharset().name());
this.isChild = isChild;
applicationContext = defaultApplicationContext;
startTime = System.currentTimeMillis();
thread = Thread.currentThread();
this.req = new HTTPServletRequestWrap(req);
this.rsp = rsp;
this.servlet = servlet;
// Writers
if (config.debugLogOutput()) {
CFMLWriter w = config.getCFMLWriter(this, req, rsp);
w.setAllowCompression(false);
DebugCFMLWriter dcw = new DebugCFMLWriter(w);
bodyContentStack.init(dcw);
debugger.setOutputLog(dcw);
}
else {
bodyContentStack.init(config.getCFMLWriter(this, req, rsp));
}
writer = bodyContentStack.getWriter();
forceWriter = writer;
// Scopes
server = ScopeContext.getServerScope(this, ignoreScopes);
if (hasFamily) {
variablesRoot = new VariablesImpl();
variables = variablesRoot;
request = new RequestImpl();
_url = new URLImpl();
_form = new FormImpl();
urlForm = new UrlFormImpl(_form, _url);
undefined = new UndefinedImpl(this, getScopeCascadingType());
hasFamily = false;
}
else if (variables == null) {
variablesRoot = new VariablesImpl();
variables = variablesRoot;
}
request.initialize(this);
if (config.mergeFormAndURL()) {
url = urlForm;
form = urlForm;
}
else {
url = _url;
form = _form;
}
// url.initialize(this);
// form.initialize(this);
// undefined.initialize(this);
psq = config.getPSQL();
fdEnabled = !config.allowRequestTimeout();
if (config.getExecutionLogEnabled()) this.execLog = config.getExecutionLogFactory().getInstance(this);
if (debugger != null) debugger.init(config);
undefined.initialize(this);
timeoutStacktrace = null;
return this;
} } | public class class_name {
public PageContextImpl initialize(HttpServlet servlet, HttpServletRequest req, HttpServletResponse rsp, String errorPageURL, boolean needsSession, int bufferSize,
boolean autoFlush, boolean isChild, boolean ignoreScopes) {
parent = null;
requestId = counter++;
appListenerType = ApplicationListener.TYPE_NONE;
this.ignoreScopes = ignoreScopes;
ReqRspUtil.setContentType(rsp, "text/html; charset=" + config.getWebCharset().name());
this.isChild = isChild;
applicationContext = defaultApplicationContext;
startTime = System.currentTimeMillis();
thread = Thread.currentThread();
this.req = new HTTPServletRequestWrap(req);
this.rsp = rsp;
this.servlet = servlet;
// Writers
if (config.debugLogOutput()) {
CFMLWriter w = config.getCFMLWriter(this, req, rsp);
w.setAllowCompression(false); // depends on control dependency: [if], data = [none]
DebugCFMLWriter dcw = new DebugCFMLWriter(w);
bodyContentStack.init(dcw); // depends on control dependency: [if], data = [none]
debugger.setOutputLog(dcw); // depends on control dependency: [if], data = [none]
}
else {
bodyContentStack.init(config.getCFMLWriter(this, req, rsp)); // depends on control dependency: [if], data = [none]
}
writer = bodyContentStack.getWriter();
forceWriter = writer;
// Scopes
server = ScopeContext.getServerScope(this, ignoreScopes);
if (hasFamily) {
variablesRoot = new VariablesImpl(); // depends on control dependency: [if], data = [none]
variables = variablesRoot; // depends on control dependency: [if], data = [none]
request = new RequestImpl(); // depends on control dependency: [if], data = [none]
_url = new URLImpl(); // depends on control dependency: [if], data = [none]
_form = new FormImpl(); // depends on control dependency: [if], data = [none]
urlForm = new UrlFormImpl(_form, _url); // depends on control dependency: [if], data = [none]
undefined = new UndefinedImpl(this, getScopeCascadingType()); // depends on control dependency: [if], data = [none]
hasFamily = false; // depends on control dependency: [if], data = [none]
}
else if (variables == null) {
variablesRoot = new VariablesImpl(); // depends on control dependency: [if], data = [none]
variables = variablesRoot; // depends on control dependency: [if], data = [none]
}
request.initialize(this);
if (config.mergeFormAndURL()) {
url = urlForm; // depends on control dependency: [if], data = [none]
form = urlForm; // depends on control dependency: [if], data = [none]
}
else {
url = _url; // depends on control dependency: [if], data = [none]
form = _form; // depends on control dependency: [if], data = [none]
}
// url.initialize(this);
// form.initialize(this);
// undefined.initialize(this);
psq = config.getPSQL();
fdEnabled = !config.allowRequestTimeout();
if (config.getExecutionLogEnabled()) this.execLog = config.getExecutionLogFactory().getInstance(this);
if (debugger != null) debugger.init(config);
undefined.initialize(this);
timeoutStacktrace = null;
return this;
} } |
public class class_name {
private void addAnnotation(PsiElementFactory elementFactory, JavaCodeStyleManager styleManager) {
boolean annotated = AnnotationUtil.isAnnotated(psiClass, ANNOTATION_PACKAGE+"."+ANNOTATION_NAME, false);
if (!annotated) {
styleManager.shortenClassReferences(psiClass.getModifierList().addAnnotation(
ANNOTATION_NAME));
}
} } | public class class_name {
private void addAnnotation(PsiElementFactory elementFactory, JavaCodeStyleManager styleManager) {
boolean annotated = AnnotationUtil.isAnnotated(psiClass, ANNOTATION_PACKAGE+"."+ANNOTATION_NAME, false);
if (!annotated) {
styleManager.shortenClassReferences(psiClass.getModifierList().addAnnotation(
ANNOTATION_NAME)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public EClass getModelMergerPluginConfiguration() {
if (modelMergerPluginConfigurationEClass == null) {
modelMergerPluginConfigurationEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(52);
}
return modelMergerPluginConfigurationEClass;
} } | public class class_name {
@Override
public EClass getModelMergerPluginConfiguration() {
if (modelMergerPluginConfigurationEClass == null) {
modelMergerPluginConfigurationEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(StorePackage.eNS_URI).getEClassifiers().get(52);
// depends on control dependency: [if], data = [none]
}
return modelMergerPluginConfigurationEClass;
} } |
public class class_name {
public void addAccessEntry(String key, String value) {
m_accessControl.put(key, value);
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_ADD_ACCESS_ENTRY_2, key, value));
}
} } | public class class_name {
public void addAccessEntry(String key, String value) {
m_accessControl.put(key, value);
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_ADD_ACCESS_ENTRY_2, key, value)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private synchronized static Context recoverAppContext()
{
try
{
final Class<?> activityThreadClass = Class.forName( "android.app.ActivityThread" );
final Method method = activityThreadClass.getMethod( "currentApplication" );
Application app = (Application) method.invoke( null, (Object[]) null );
return app.getApplicationContext();
}
catch ( NoSuchMethodException e )
{
return recoverAppContextOldAndroid();
}
catch ( Throwable e )
{
e.printStackTrace();
}
return null;
} } | public class class_name {
private synchronized static Context recoverAppContext()
{
try
{
final Class<?> activityThreadClass = Class.forName( "android.app.ActivityThread" );
final Method method = activityThreadClass.getMethod( "currentApplication" );
Application app = (Application) method.invoke( null, (Object[]) null );
return app.getApplicationContext(); // depends on control dependency: [try], data = [none]
}
catch ( NoSuchMethodException e )
{
return recoverAppContextOldAndroid();
} // depends on control dependency: [catch], data = [none]
catch ( Throwable e )
{
e.printStackTrace();
} // depends on control dependency: [catch], data = [none]
return null;
} } |
public class class_name {
@Override
public void changed(Database.ChangeEvent event) {
synchronized (changesLock) {
if (isTimeout)
return;
lastChangesTimestamp = System.currentTimeMillis();
// Stop timeout timer:
stopTimeout();
// In race condition, new doc or update doc is fired before starting to observe the
// DatabaseChangeEvent, it allows to skip few document changes with /_changes REST API.
// Make sure all document changes are tread by /_changes REST API.
if (!filled) {
filled = true;
RevisionList changes = db.changesSince(changesSince, changesOptions,
changesFilter, changesFilterParams);
if (changes.size() > 0) {
sendLongpollChanges(changes, changesSince);
return;
}
}
List<RevisionInternal> revs = new ArrayList<RevisionInternal>();
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
RevisionInternal rev = change.getAddedRevision();
if (rev == null)
continue;
String winningRevID = change.getWinningRevisionID();
if (!this.changesIncludesConflicts) {
if (winningRevID == null)
continue; // // this change doesn't affect the winning rev ID, no need to send it
else if (!winningRevID.equals(rev.getRevID())) {
// This rev made a _different_ rev current, so substitute that one.
// We need to emit the current sequence # in the feed, so put it in the rev.
// This isn't correct internally (this is an old rev so it has an older sequence)
// but consumers of the _changes feed don't care about the internal state.
RevisionInternal mRev = db.getDocument(rev.getDocID(), winningRevID, changesIncludesDocs);
mRev.setSequence(rev.getSequence());
rev = mRev;
}
}
if (!event.getSource().runFilter(changesFilter, changesFilterParams, rev))
continue;
if (longpoll) {
revs.add(rev);
} else {
Log.d(TAG, "Router: Sending continuous change chunk");
sendContinuousChange(rev);
}
timeoutLastSeqence = rev.getSequence();
}
if (longpoll && revs.size() > 0)
sendLongpollChanges(revs, changesSince);
else
// Restart timeout timer for continuous feed request:
startTimeout();
}
} } | public class class_name {
@Override
public void changed(Database.ChangeEvent event) {
synchronized (changesLock) {
if (isTimeout)
return;
lastChangesTimestamp = System.currentTimeMillis();
// Stop timeout timer:
stopTimeout();
// In race condition, new doc or update doc is fired before starting to observe the
// DatabaseChangeEvent, it allows to skip few document changes with /_changes REST API.
// Make sure all document changes are tread by /_changes REST API.
if (!filled) {
filled = true; // depends on control dependency: [if], data = [none]
RevisionList changes = db.changesSince(changesSince, changesOptions,
changesFilter, changesFilterParams);
if (changes.size() > 0) {
sendLongpollChanges(changes, changesSince); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
List<RevisionInternal> revs = new ArrayList<RevisionInternal>();
List<DocumentChange> changes = event.getChanges();
for (DocumentChange change : changes) {
RevisionInternal rev = change.getAddedRevision();
if (rev == null)
continue;
String winningRevID = change.getWinningRevisionID();
if (!this.changesIncludesConflicts) {
if (winningRevID == null)
continue; // // this change doesn't affect the winning rev ID, no need to send it
else if (!winningRevID.equals(rev.getRevID())) {
// This rev made a _different_ rev current, so substitute that one.
// We need to emit the current sequence # in the feed, so put it in the rev.
// This isn't correct internally (this is an old rev so it has an older sequence)
// but consumers of the _changes feed don't care about the internal state.
RevisionInternal mRev = db.getDocument(rev.getDocID(), winningRevID, changesIncludesDocs);
mRev.setSequence(rev.getSequence()); // depends on control dependency: [if], data = [none]
rev = mRev; // depends on control dependency: [if], data = [none]
}
}
if (!event.getSource().runFilter(changesFilter, changesFilterParams, rev))
continue;
if (longpoll) {
revs.add(rev); // depends on control dependency: [if], data = [none]
} else {
Log.d(TAG, "Router: Sending continuous change chunk"); // depends on control dependency: [if], data = [none]
sendContinuousChange(rev); // depends on control dependency: [if], data = [none]
}
timeoutLastSeqence = rev.getSequence(); // depends on control dependency: [for], data = [none]
}
if (longpoll && revs.size() > 0)
sendLongpollChanges(revs, changesSince);
else
// Restart timeout timer for continuous feed request:
startTimeout();
}
} } |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.