code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public static <S extends Model> Object getAndRemovePrimaryKey(S model) { String fieldName = getPKField(model.getClass()); Object value = invokeMethod(model, getGetterName(fieldName), EMPTY_ARG); if (null != value) { invokeMethod(model, getSetterName(fieldName), NULL_ARG); } return value; } }
public class class_name { public static <S extends Model> Object getAndRemovePrimaryKey(S model) { String fieldName = getPKField(model.getClass()); Object value = invokeMethod(model, getGetterName(fieldName), EMPTY_ARG); if (null != value) { invokeMethod(model, getSetterName(fieldName), NULL_ARG); // depends on control dependency: [if], data = [none] } return value; } }
public class class_name { private Expression parseTildeExpression() { consumeNextToken(TILDE); int major = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return new GreaterOrEqual(versionOf(major, 0, 0)); } consumeNextToken(DOT); int minor = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return new And(new GreaterOrEqual(versionOf(major, minor, 0)), new Less(versionOf(major + 1, 0, 0))); } consumeNextToken(DOT); int patch = intOf(consumeNextToken(NUMERIC).lexeme); return new And(new GreaterOrEqual(versionOf(major, minor, patch)), new Less(versionOf(major, minor + 1, 0))); } }
public class class_name { private Expression parseTildeExpression() { consumeNextToken(TILDE); int major = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return new GreaterOrEqual(versionOf(major, 0, 0)); // depends on control dependency: [if], data = [none] } consumeNextToken(DOT); int minor = intOf(consumeNextToken(NUMERIC).lexeme); if (!tokens.positiveLookahead(DOT)) { return new And(new GreaterOrEqual(versionOf(major, minor, 0)), new Less(versionOf(major + 1, 0, 0))); // depends on control dependency: [if], data = [none] } consumeNextToken(DOT); int patch = intOf(consumeNextToken(NUMERIC).lexeme); return new And(new GreaterOrEqual(versionOf(major, minor, patch)), new Less(versionOf(major, minor + 1, 0))); } }
public class class_name { protected void buildAdditionalPublicanCfg(final BuildData buildData) throws BuildProcessingException { final ContentSpec contentSpec = buildData.getContentSpec(); final Map<String, String> overrides = buildData.getBuildOptions().getOverrides(); final Map<String, String> additionalPublicanCfgs = contentSpec.getAllAdditionalPublicanCfgs(); for (final Map.Entry<String, String> entry : additionalPublicanCfgs.entrySet()) { final String brandOverride = overrides.containsKey(CSConstants.BRAND_OVERRIDE) ? overrides.get( CSConstants.BRAND_OVERRIDE) : (overrides.containsKey(CSConstants.BRAND_ALT_OVERRIDE) ? overrides.get( CSConstants.BRAND_ALT_OVERRIDE) : null); final String brand = brandOverride != null ? brandOverride : (contentSpec.getBrand() == null ? getDefaultBrand( buildData) : contentSpec.getBrand()); // Setup publican.cfg final StringBuilder publicanCfg = new StringBuilder("xml_lang: ").append(buildData.getOutputLocale()).append("\n"); publicanCfg.append("type: ").append(contentSpec.getBookType().toString().replaceAll("-Draft", "") + "\n"); publicanCfg.append("brand: ").append(brand).append("\n"); // Add the custom content publicanCfg.append(DocBookBuildUtilities.cleanUserPublicanCfg(entry.getValue())); // Add the dtdver property if (buildData.getDocBookVersion() == DocBookVersion.DOCBOOK_50) { publicanCfg.append("dtdver: \"5.0\"\n"); } // Add docname if it wasn't specified and the escaped title is valid Matcher m = null; if (BuilderConstants.VALID_PUBLICAN_DOCNAME_PATTERN.matcher(buildData.getEscapedBookTitle()).matches()) { m = DOCNAME_PATTERN.matcher(publicanCfg); if (!m.find()) { publicanCfg.append("docname: ").append(buildData.getEscapedBookTitle().replaceAll("_", " ")).append("\n"); } } // Add product if it wasn't specified m = PRODUCT_PATTERN.matcher(publicanCfg); if (!m.find()) { publicanCfg.append("product: ").append(escapeProduct(buildData.getOriginalBookProduct())).append("\n"); } // Add the mainfile attribute publicanCfg.append("mainfile: ").append(buildData.getRootBookFileName()).append("\n"); // Add version if it wasn't specified m = VERSION_PATTERN.matcher(publicanCfg); if (!m.find()) { String version = contentSpec.getBookVersion(); if (isNullOrEmpty(version)) { version = DocBookBuildUtilities.getKeyValueNodeText(buildData, contentSpec.getVersionNode()); } if (isNullOrEmpty(version)) { version = BuilderConstants.DEFAULT_VERSION; } publicanCfg.append("version: ").append(escapeVersion(version)).append("\n"); } String fixedPublicanCfg = publicanCfg.toString(); if (buildData.getBuildOptions().getPublicanShowRemarks()) { // Remove any current show_remarks definitions if (publicanCfg.indexOf("show_remarks") != -1) { fixedPublicanCfg = fixedPublicanCfg.replaceAll("show_remarks\\s*:\\s*\\d+\\s*(\\r)?(\\n)?", ""); } fixedPublicanCfg += "show_remarks: 1\n"; } addToZip(buildData.getRootBookFolder() + entry.getKey(), fixedPublicanCfg, buildData); } } }
public class class_name { protected void buildAdditionalPublicanCfg(final BuildData buildData) throws BuildProcessingException { final ContentSpec contentSpec = buildData.getContentSpec(); final Map<String, String> overrides = buildData.getBuildOptions().getOverrides(); final Map<String, String> additionalPublicanCfgs = contentSpec.getAllAdditionalPublicanCfgs(); for (final Map.Entry<String, String> entry : additionalPublicanCfgs.entrySet()) { final String brandOverride = overrides.containsKey(CSConstants.BRAND_OVERRIDE) ? overrides.get( CSConstants.BRAND_OVERRIDE) : (overrides.containsKey(CSConstants.BRAND_ALT_OVERRIDE) ? overrides.get( CSConstants.BRAND_ALT_OVERRIDE) : null); final String brand = brandOverride != null ? brandOverride : (contentSpec.getBrand() == null ? getDefaultBrand( buildData) : contentSpec.getBrand()); // Setup publican.cfg final StringBuilder publicanCfg = new StringBuilder("xml_lang: ").append(buildData.getOutputLocale()).append("\n"); publicanCfg.append("type: ").append(contentSpec.getBookType().toString().replaceAll("-Draft", "") + "\n"); publicanCfg.append("brand: ").append(brand).append("\n"); // Add the custom content publicanCfg.append(DocBookBuildUtilities.cleanUserPublicanCfg(entry.getValue())); // Add the dtdver property if (buildData.getDocBookVersion() == DocBookVersion.DOCBOOK_50) { publicanCfg.append("dtdver: \"5.0\"\n"); } // Add docname if it wasn't specified and the escaped title is valid Matcher m = null; if (BuilderConstants.VALID_PUBLICAN_DOCNAME_PATTERN.matcher(buildData.getEscapedBookTitle()).matches()) { m = DOCNAME_PATTERN.matcher(publicanCfg); if (!m.find()) { publicanCfg.append("docname: ").append(buildData.getEscapedBookTitle().replaceAll("_", " ")).append("\n"); // depends on control dependency: [if], data = [none] } } // Add product if it wasn't specified m = PRODUCT_PATTERN.matcher(publicanCfg); if (!m.find()) { publicanCfg.append("product: ").append(escapeProduct(buildData.getOriginalBookProduct())).append("\n"); } // Add the mainfile attribute publicanCfg.append("mainfile: ").append(buildData.getRootBookFileName()).append("\n"); // Add version if it wasn't specified m = VERSION_PATTERN.matcher(publicanCfg); if (!m.find()) { String version = contentSpec.getBookVersion(); if (isNullOrEmpty(version)) { version = DocBookBuildUtilities.getKeyValueNodeText(buildData, contentSpec.getVersionNode()); } if (isNullOrEmpty(version)) { version = BuilderConstants.DEFAULT_VERSION; } publicanCfg.append("version: ").append(escapeVersion(version)).append("\n"); } String fixedPublicanCfg = publicanCfg.toString(); if (buildData.getBuildOptions().getPublicanShowRemarks()) { // Remove any current show_remarks definitions if (publicanCfg.indexOf("show_remarks") != -1) { fixedPublicanCfg = fixedPublicanCfg.replaceAll("show_remarks\\s*:\\s*\\d+\\s*(\\r)?(\\n)?", ""); } fixedPublicanCfg += "show_remarks: 1\n"; } addToZip(buildData.getRootBookFolder() + entry.getKey(), fixedPublicanCfg, buildData); } } }
public class class_name { @Override final public void setSharingPolicy(int sharingPolicy) { ValidateUtility.sharingPolicy(sharingPolicy); this.sharingPolicy = sharingPolicy; if (tc.isDebugEnabled()) { Tr.debug(tc, "setSharingPolicy() set global sharingPolicy to " + this.sharingPolicy + " for cacheName=" + cache.getCacheName()); } } }
public class class_name { @Override final public void setSharingPolicy(int sharingPolicy) { ValidateUtility.sharingPolicy(sharingPolicy); this.sharingPolicy = sharingPolicy; if (tc.isDebugEnabled()) { Tr.debug(tc, "setSharingPolicy() set global sharingPolicy to " + this.sharingPolicy + " for cacheName=" + cache.getCacheName()); // depends on control dependency: [if], data = [none] } } }
public class class_name { void _emitJcc(INST_CODE code, Label label, final int hint) { if (hint == 0) { emitX86(code, label); } else { emitX86(code, label, Immediate.imm(hint)); } } }
public class class_name { void _emitJcc(INST_CODE code, Label label, final int hint) { if (hint == 0) { emitX86(code, label); // depends on control dependency: [if], data = [none] } else { emitX86(code, label, Immediate.imm(hint)); // depends on control dependency: [if], data = [(hint] } } }
public class class_name { public static ExpressionTree stripParentheses(ExpressionTree tree) { while (tree instanceof ParenthesizedTree) { tree = ((ParenthesizedTree) tree).getExpression(); } return tree; } }
public class class_name { public static ExpressionTree stripParentheses(ExpressionTree tree) { while (tree instanceof ParenthesizedTree) { tree = ((ParenthesizedTree) tree).getExpression(); // depends on control dependency: [while], data = [none] } return tree; } }
public class class_name { public void loadLibrary(Class<?> clazz, String name, LoadPolicy policy) { if (loaded.contains(name)) { return; } switch (policy) { case PREFER_SHIPPED: try { loadShippedLibrary(clazz, name); } catch (LoadLibraryException ex) { try { loadSystemLibrary(name); } catch (LoadLibraryException ex2) { throw ex; } } break; case PREFER_SYSTEM: try { loadSystemLibrary(name); } catch (LoadLibraryException ex) { try { loadShippedLibrary(clazz, name); } catch (LoadLibraryException ex2) { throw ex; } } break; case SHIPPED_ONLY: loadShippedLibrary(clazz, name); break; case SYSTEM_ONLY: loadSystemLibrary(name); break; default: throw new IllegalStateException("Unknown policy " + policy); } loaded.add(name); } }
public class class_name { public void loadLibrary(Class<?> clazz, String name, LoadPolicy policy) { if (loaded.contains(name)) { return; // depends on control dependency: [if], data = [none] } switch (policy) { case PREFER_SHIPPED: try { loadShippedLibrary(clazz, name); // depends on control dependency: [try], data = [none] } catch (LoadLibraryException ex) { try { loadSystemLibrary(name); // depends on control dependency: [try], data = [none] } catch (LoadLibraryException ex2) { throw ex; } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] break; case PREFER_SYSTEM: try { loadSystemLibrary(name); // depends on control dependency: [try], data = [none] } catch (LoadLibraryException ex) { try { loadShippedLibrary(clazz, name); // depends on control dependency: [try], data = [none] } catch (LoadLibraryException ex2) { throw ex; } // depends on control dependency: [catch], data = [none] } // depends on control dependency: [catch], data = [none] break; case SHIPPED_ONLY: loadShippedLibrary(clazz, name); break; case SYSTEM_ONLY: loadSystemLibrary(name); break; default: throw new IllegalStateException("Unknown policy " + policy); } loaded.add(name); } }
public class class_name { public CalibratedCurves getCloneShifted(Map<String,Double> shifts) throws SolverException, CloneNotSupportedException { // Clone calibration specs, shifting the desired symbol List<CalibrationSpec> calibrationSpecsShifted = new ArrayList<>(); for(CalibrationSpec calibrationSpec : calibrationSpecs) { if(shifts.containsKey(calibrationSpec)) { calibrationSpecsShifted.add(calibrationSpec.getCloneShifted(shifts.get(calibrationSpec))); } else { calibrationSpecsShifted.add(calibrationSpec); } } return new CalibratedCurves(calibrationSpecsShifted, model, evaluationTime, calibrationAccuracy); } }
public class class_name { public CalibratedCurves getCloneShifted(Map<String,Double> shifts) throws SolverException, CloneNotSupportedException { // Clone calibration specs, shifting the desired symbol List<CalibrationSpec> calibrationSpecsShifted = new ArrayList<>(); for(CalibrationSpec calibrationSpec : calibrationSpecs) { if(shifts.containsKey(calibrationSpec)) { calibrationSpecsShifted.add(calibrationSpec.getCloneShifted(shifts.get(calibrationSpec))); // depends on control dependency: [if], data = [none] } else { calibrationSpecsShifted.add(calibrationSpec); // depends on control dependency: [if], data = [none] } } return new CalibratedCurves(calibrationSpecsShifted, model, evaluationTime, calibrationAccuracy); } }
public class class_name { private void addToArray(JsonNode j) { if (j instanceof ArrayNode) { for (Iterator<JsonNode> itr = ((ArrayNode) j).elements(); itr.hasNext();) { addToArray(itr.next()); } } else { ((ArrayNode) node).add(j); } } }
public class class_name { private void addToArray(JsonNode j) { if (j instanceof ArrayNode) { for (Iterator<JsonNode> itr = ((ArrayNode) j).elements(); itr.hasNext();) { addToArray(itr.next()); // depends on control dependency: [for], data = [itr] } } else { ((ArrayNode) node).add(j); // depends on control dependency: [if], data = [none] } } }
public class class_name { public void setItems(java.util.Collection<java.util.Map<String,AttributeValue>> items) { if (items == null) { this.items = null; return; } java.util.List<java.util.Map<String,AttributeValue>> itemsCopy = new java.util.ArrayList<java.util.Map<String,AttributeValue>>(items.size()); itemsCopy.addAll(items); this.items = itemsCopy; } }
public class class_name { public void setItems(java.util.Collection<java.util.Map<String,AttributeValue>> items) { if (items == null) { this.items = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } java.util.List<java.util.Map<String,AttributeValue>> itemsCopy = new java.util.ArrayList<java.util.Map<String,AttributeValue>>(items.size()); itemsCopy.addAll(items); this.items = itemsCopy; } }
public class class_name { public Collection<KeywordAnnotation> getKeywordAnnotationsByCountMetric( Integer minCount, Double minMetric) { final Integer count = minCount == null || minCount < 1 ? null : minCount; final Double metric = minMetric == null || minMetric <= 0 ? null : minMetric; if(count == null && metric == null){ return getKeywordAnnotations(); } else { return FluentIterable.from(getKeywordAnnotations()) .filter(new Predicate<KeywordAnnotation>() { @Override public boolean apply(KeywordAnnotation e) { boolean apply = true; if(metric != null && (e.getMetric() == null || metric > e.getMetric())){ apply = false; //filter because metric is to small } if(count != null && (count > e.getCount())){ apply = false; //filter because count is to low } return apply; } }).toList(); } } }
public class class_name { public Collection<KeywordAnnotation> getKeywordAnnotationsByCountMetric( Integer minCount, Double minMetric) { final Integer count = minCount == null || minCount < 1 ? null : minCount; final Double metric = minMetric == null || minMetric <= 0 ? null : minMetric; if(count == null && metric == null){ return getKeywordAnnotations(); // depends on control dependency: [if], data = [none] } else { return FluentIterable.from(getKeywordAnnotations()) .filter(new Predicate<KeywordAnnotation>() { @Override public boolean apply(KeywordAnnotation e) { boolean apply = true; if(metric != null && (e.getMetric() == null || metric > e.getMetric())){ apply = false; //filter because metric is to small // depends on control dependency: [if], data = [none] } if(count != null && (count > e.getCount())){ apply = false; //filter because count is to low // depends on control dependency: [if], data = [none] } return apply; } }).toList(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void fireMusicSwapped(Music newMusic) { playing = false; for (int i=0;i<listeners.size();i++) { ((MusicListener) listeners.get(i)).musicSwapped(this, newMusic); } } }
public class class_name { private void fireMusicSwapped(Music newMusic) { playing = false; for (int i=0;i<listeners.size();i++) { ((MusicListener) listeners.get(i)).musicSwapped(this, newMusic); // depends on control dependency: [for], data = [i] } } }
public class class_name { @Override public void onClose(final AjaxRequestTarget _target) { final AbstractUIPageObject pageObject = (AbstractUIPageObject) this.pageReference.getPage() .getDefaultModelObject(); if (pageObject.isOpenedByPicker()) { final UIPicker picker = pageObject.getPicker(); pageObject.setPicker(null); if (picker.isExecuted()) { final Map<String, Object> map = picker.getReturnMap(); final boolean escape = escape(map); final StringBuilder js = new StringBuilder(); final String value = (String) map.get(EFapsKey.PICKER_VALUE.getKey()); if (value != null) { js.append("require(['dojo/dom'], function(dom){\n") .append("dom.byId('").append(this.targetMarkupId).append("').value ='") .append(escape ? StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(value)) : value).append("';").append("});"); } for (final String keyString : map.keySet()) { // if the map contains a key that is not defined in this // class it is assumed to be the name of a field if (!(EFapsKey.PICKER_JAVASCRIPT.getKey().equals(keyString) || EFapsKey.PICKER_DEACTIVATEESCAPE.getKey().equals(keyString) || EFapsKey.PICKER_VALUE.getKey().equals(keyString))) { final Object valueObj = map.get(keyString); final String strValue; final String strLabel; if (valueObj instanceof String[] && ((String[]) valueObj).length == 2) { strValue = escape && !((String[]) valueObj)[0].contains("Array(") ? StringEscapeUtils.escapeEcmaScript(((String[]) valueObj)[0]) : ((String[]) valueObj)[0]; strLabel = escape && !((String[]) valueObj)[0].contains("Array(") ? StringEscapeUtils.escapeEcmaScript(((String[]) valueObj)[1]) : ((String[]) valueObj)[1]; } else { strValue = escape && !String.valueOf(valueObj).contains("Array(") ? StringEscapeUtils.escapeEcmaScript(String.valueOf(valueObj)) : String.valueOf(valueObj); strLabel = null; } js.append("eFapsSetFieldValue(") .append(this.targetMarkupId == null ? 0 : "'" + this.targetMarkupId + "'").append(",'") .append(keyString).append("',") .append(strValue.contains("Array(") ? "" : "'") .append(strValue) .append(strValue.contains("Array(") ? "" : "'"); if (strLabel != null) { js.append(",'").append(strLabel).append("'"); } js.append(");"); } } if (map.containsKey(EFapsKey.PICKER_JAVASCRIPT.getKey())) { js.append(map.get(EFapsKey.PICKER_JAVASCRIPT.getKey())); } _target.prependJavaScript(js.toString()); picker.setExecuted(false); } } } }
public class class_name { @Override public void onClose(final AjaxRequestTarget _target) { final AbstractUIPageObject pageObject = (AbstractUIPageObject) this.pageReference.getPage() .getDefaultModelObject(); if (pageObject.isOpenedByPicker()) { final UIPicker picker = pageObject.getPicker(); pageObject.setPicker(null); if (picker.isExecuted()) { final Map<String, Object> map = picker.getReturnMap(); final boolean escape = escape(map); final StringBuilder js = new StringBuilder(); final String value = (String) map.get(EFapsKey.PICKER_VALUE.getKey()); if (value != null) { js.append("require(['dojo/dom'], function(dom){\n") .append("dom.byId('").append(this.targetMarkupId).append("').value ='") .append(escape ? StringEscapeUtils.escapeEcmaScript(StringEscapeUtils.escapeHtml4(value)) : value).append("';").append("});"); // depends on control dependency: [if], data = [none] } for (final String keyString : map.keySet()) { // if the map contains a key that is not defined in this // class it is assumed to be the name of a field if (!(EFapsKey.PICKER_JAVASCRIPT.getKey().equals(keyString) || EFapsKey.PICKER_DEACTIVATEESCAPE.getKey().equals(keyString) || EFapsKey.PICKER_VALUE.getKey().equals(keyString))) { final Object valueObj = map.get(keyString); final String strValue; final String strLabel; if (valueObj instanceof String[] && ((String[]) valueObj).length == 2) { strValue = escape && !((String[]) valueObj)[0].contains("Array(") ? StringEscapeUtils.escapeEcmaScript(((String[]) valueObj)[0]) : ((String[]) valueObj)[0]; strLabel = escape && !((String[]) valueObj)[0].contains("Array(") ? StringEscapeUtils.escapeEcmaScript(((String[]) valueObj)[1]) : ((String[]) valueObj)[1]; } else { strValue = escape && !String.valueOf(valueObj).contains("Array(") ? StringEscapeUtils.escapeEcmaScript(String.valueOf(valueObj)) : String.valueOf(valueObj); strLabel = null; } js.append("eFapsSetFieldValue(") .append(this.targetMarkupId == null ? 0 : "'" + this.targetMarkupId + "'").append(",'") .append(keyString).append("',") .append(strValue.contains("Array(") ? "" : "'") .append(strValue) .append(strValue.contains("Array(") ? "" : "'"); if (strLabel != null) { js.append(",'").append(strLabel).append("'"); } js.append(");"); } } if (map.containsKey(EFapsKey.PICKER_JAVASCRIPT.getKey())) { js.append(map.get(EFapsKey.PICKER_JAVASCRIPT.getKey())); } _target.prependJavaScript(js.toString()); picker.setExecuted(false); } } } }
public class class_name { public DecisionDefinitionEntity getPreviousDefinition() { DecisionDefinitionEntity previousDecisionDefinition = null; String previousDecisionDefinitionId = getPreviousDecisionDefinitionId(); if (previousDecisionDefinitionId != null) { previousDecisionDefinition = loadDecisionDefinition(previousDecisionDefinitionId); if (previousDecisionDefinition == null) { resetPreviousDecisionDefinitionId(); previousDecisionDefinitionId = getPreviousDecisionDefinitionId(); if (previousDecisionDefinitionId != null) { previousDecisionDefinition = loadDecisionDefinition(previousDecisionDefinitionId); } } } return previousDecisionDefinition; } }
public class class_name { public DecisionDefinitionEntity getPreviousDefinition() { DecisionDefinitionEntity previousDecisionDefinition = null; String previousDecisionDefinitionId = getPreviousDecisionDefinitionId(); if (previousDecisionDefinitionId != null) { previousDecisionDefinition = loadDecisionDefinition(previousDecisionDefinitionId); // depends on control dependency: [if], data = [(previousDecisionDefinitionId] if (previousDecisionDefinition == null) { resetPreviousDecisionDefinitionId(); // depends on control dependency: [if], data = [none] previousDecisionDefinitionId = getPreviousDecisionDefinitionId(); // depends on control dependency: [if], data = [none] if (previousDecisionDefinitionId != null) { previousDecisionDefinition = loadDecisionDefinition(previousDecisionDefinitionId); // depends on control dependency: [if], data = [(previousDecisionDefinitionId] } } } return previousDecisionDefinition; } }
public class class_name { static final byte[] fromHex(final String hex) throws ParseException { final int len = hex.length(); final byte[] out = new byte[len / 2]; for (int i = 0, j = 0; i < len; i++) { char c = hex.charAt(i); int v = 0; if ((c >= '0') && (c <= '9')) { v = (c - '0'); } else if ((c >= 'A') && (c <= 'F')) { v = (c - 'A') + 0xA; } else if ((c >= 'a') && (c <= 'f')) { v = (c - 'a') + 0xA; } else { throw new ParseException("Invalid char", j); } if ((i & 1) == 0) { out[j] |= (v << 4); } else { out[j++] |= v; } } return out; } }
public class class_name { static final byte[] fromHex(final String hex) throws ParseException { final int len = hex.length(); final byte[] out = new byte[len / 2]; for (int i = 0, j = 0; i < len; i++) { char c = hex.charAt(i); int v = 0; if ((c >= '0') && (c <= '9')) { v = (c - '0'); // depends on control dependency: [if], data = [none] } else if ((c >= 'A') && (c <= 'F')) { v = (c - 'A') + 0xA; // depends on control dependency: [if], data = [none] } else if ((c >= 'a') && (c <= 'f')) { v = (c - 'a') + 0xA; // depends on control dependency: [if], data = [none] } else { throw new ParseException("Invalid char", j); } if ((i & 1) == 0) { out[j] |= (v << 4); // depends on control dependency: [if], data = [none] } else { out[j++] |= v; // depends on control dependency: [if], data = [none] } } return out; } }
public class class_name { public int[] feature(double[] x) { if (x.length != attributes.length) { throw new IllegalArgumentException(String.format("Invalid feature vector size %d, expected %d", x.length, attributes.length)); } int[] features = new int[attributes.length]; for (int i = 0; i < features.length; i++) { int f = (int) x[i]; if (Math.floor(x[i]) != x[i] || f < 0 || f >= attributes[i].size()) { throw new IllegalArgumentException(String.format("Invalid value of attribute %s: %d", attributes[i].toString(), f)); } features[i] = f + base[i]; } return features; } }
public class class_name { public int[] feature(double[] x) { if (x.length != attributes.length) { throw new IllegalArgumentException(String.format("Invalid feature vector size %d, expected %d", x.length, attributes.length)); } int[] features = new int[attributes.length]; for (int i = 0; i < features.length; i++) { int f = (int) x[i]; if (Math.floor(x[i]) != x[i] || f < 0 || f >= attributes[i].size()) { throw new IllegalArgumentException(String.format("Invalid value of attribute %s: %d", attributes[i].toString(), f)); } features[i] = f + base[i]; // depends on control dependency: [for], data = [i] } return features; } }
public class class_name { private void cleanUpAction() { try { m_model.deleteDescriptorIfNecessary(); } catch (CmsException e) { LOG.error(m_messages.key(Messages.ERR_DELETING_DESCRIPTOR_0), e); } // unlock resource m_model.unlock(); } }
public class class_name { private void cleanUpAction() { try { m_model.deleteDescriptorIfNecessary(); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.error(m_messages.key(Messages.ERR_DELETING_DESCRIPTOR_0), e); } // depends on control dependency: [catch], data = [none] // unlock resource m_model.unlock(); } }
public class class_name { @Deprecated public void dump(java.io.PrintStream out) { if (out == null) { out = System.out; } this.fRData.dump(out); } }
public class class_name { @Deprecated public void dump(java.io.PrintStream out) { if (out == null) { out = System.out; // depends on control dependency: [if], data = [none] } this.fRData.dump(out); } }
public class class_name { public void addEmitter(ConfigurableEmitter emitter) { emitters.add(emitter); if (system == null) { waiting.add(emitter); } else { system.addEmitter(emitter); } } }
public class class_name { public void addEmitter(ConfigurableEmitter emitter) { emitters.add(emitter); if (system == null) { waiting.add(emitter); // depends on control dependency: [if], data = [none] } else { system.addEmitter(emitter); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Double getProgress() { if ((shardLogQuery.getStartTimeUsec() == null) || (shardLogQuery.getEndTimeUsec() == null)) { return null; } else if (lastLog == null) { return 0.0; } else { long processedTimeUsec = shardLogQuery.getEndTimeUsec() - lastLog.getEndTimeUsec(); long totalTimeUsec = shardLogQuery.getEndTimeUsec() - shardLogQuery.getStartTimeUsec(); return ((double) processedTimeUsec / totalTimeUsec); } } }
public class class_name { @Override public Double getProgress() { if ((shardLogQuery.getStartTimeUsec() == null) || (shardLogQuery.getEndTimeUsec() == null)) { return null; // depends on control dependency: [if], data = [none] } else if (lastLog == null) { return 0.0; // depends on control dependency: [if], data = [none] } else { long processedTimeUsec = shardLogQuery.getEndTimeUsec() - lastLog.getEndTimeUsec(); long totalTimeUsec = shardLogQuery.getEndTimeUsec() - shardLogQuery.getStartTimeUsec(); return ((double) processedTimeUsec / totalTimeUsec); // depends on control dependency: [if], data = [none] } } }
public class class_name { private int reverseIndex(int k) { if (reverseIndexMap == null) { reverseIndexMap = new int[attributes.getLength()]; for (int i = 0, len = indexSet.size(); i < len; i++) reverseIndexMap[indexSet.get(i)] = i + 1; } return reverseIndexMap[k] - 1; } }
public class class_name { private int reverseIndex(int k) { if (reverseIndexMap == null) { reverseIndexMap = new int[attributes.getLength()]; // depends on control dependency: [if], data = [none] for (int i = 0, len = indexSet.size(); i < len; i++) reverseIndexMap[indexSet.get(i)] = i + 1; } return reverseIndexMap[k] - 1; } }
public class class_name { public void marshall(DescribeExclusionsRequest describeExclusionsRequest, ProtocolMarshaller protocolMarshaller) { if (describeExclusionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeExclusionsRequest.getExclusionArns(), EXCLUSIONARNS_BINDING); protocolMarshaller.marshall(describeExclusionsRequest.getLocale(), LOCALE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DescribeExclusionsRequest describeExclusionsRequest, ProtocolMarshaller protocolMarshaller) { if (describeExclusionsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(describeExclusionsRequest.getExclusionArns(), EXCLUSIONARNS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(describeExclusionsRequest.getLocale(), LOCALE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public String getDescription() { if (fullDescription == null) { final String temp; if (description == null || isExtensionListInDescription()) { final StringBuilder sb = new StringBuilder(); sb.append(description == null ? "(" : description + " ("); // build the description from the extension list boolean first = true; for (final String filterKey : filters.keySet()) { if (first) { first = false; } else { sb.append(", "); } sb.append("*."); sb.append(filterKey); } sb.append(')'); temp = sb.toString(); } else { temp = description; } // Command Query Separation with lazy initialization : set fullDescription only once fullDescription = temp; } return fullDescription; } }
public class class_name { @Override public String getDescription() { if (fullDescription == null) { final String temp; if (description == null || isExtensionListInDescription()) { final StringBuilder sb = new StringBuilder(); sb.append(description == null ? "(" : description + " ("); // depends on control dependency: [if], data = [(description] // build the description from the extension list boolean first = true; for (final String filterKey : filters.keySet()) { if (first) { first = false; // depends on control dependency: [if], data = [none] } else { sb.append(", "); // depends on control dependency: [if], data = [none] } sb.append("*."); // depends on control dependency: [for], data = [none] sb.append(filterKey); // depends on control dependency: [for], data = [filterKey] } sb.append(')'); // depends on control dependency: [if], data = [none] temp = sb.toString(); // depends on control dependency: [if], data = [none] } else { temp = description; // depends on control dependency: [if], data = [none] } // Command Query Separation with lazy initialization : set fullDescription only once fullDescription = temp; // depends on control dependency: [if], data = [none] } return fullDescription; } }
public class class_name { public void setUserProfiles(java.util.Collection<UserProfile> userProfiles) { if (userProfiles == null) { this.userProfiles = null; return; } this.userProfiles = new com.amazonaws.internal.SdkInternalList<UserProfile>(userProfiles); } }
public class class_name { public void setUserProfiles(java.util.Collection<UserProfile> userProfiles) { if (userProfiles == null) { this.userProfiles = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.userProfiles = new com.amazonaws.internal.SdkInternalList<UserProfile>(userProfiles); } }
public class class_name { public final void garbageCollect() { if (timeout != null && !data.isEmpty()) { long boundaryTime = System.currentTimeMillis() - (garbageTimeout); data.removeIf((MtasSolrStatus solrStatus) -> solrStatus.finished() || solrStatus.getStartTime() < boundaryTime); index.clear(); data.forEach((MtasSolrStatus solrStatus) -> index.put(solrStatus.key(), solrStatus)); } } }
public class class_name { public final void garbageCollect() { if (timeout != null && !data.isEmpty()) { long boundaryTime = System.currentTimeMillis() - (garbageTimeout); data.removeIf((MtasSolrStatus solrStatus) -> solrStatus.finished() || solrStatus.getStartTime() < boundaryTime); // depends on control dependency: [if], data = [none] index.clear(); // depends on control dependency: [if], data = [none] data.forEach((MtasSolrStatus solrStatus) -> index.put(solrStatus.key(), solrStatus)); // depends on control dependency: [if], data = [none] } } }
public class class_name { public final ProtoParser.ignore_block_return ignore_block() throws RecognitionException { ProtoParser.ignore_block_return retval = new ProtoParser.ignore_block_return(); retval.start = input.LT(1); Object root_0 = null; Token LEFTCURLY166=null; Token RIGHTCURLY168=null; ProtoParser.ignore_block_body_return ignore_block_body167 = null; Object LEFTCURLY166_tree=null; Object RIGHTCURLY168_tree=null; try { // com/dyuproject/protostuff/parser/ProtoParser.g:642:5: ( LEFTCURLY ( ignore_block_body )* RIGHTCURLY ) // com/dyuproject/protostuff/parser/ProtoParser.g:642:9: LEFTCURLY ( ignore_block_body )* RIGHTCURLY { root_0 = (Object)adaptor.nil(); LEFTCURLY166=(Token)match(input,LEFTCURLY,FOLLOW_LEFTCURLY_in_ignore_block2623); if (state.failed) return retval; if ( state.backtracking==0 ) { LEFTCURLY166_tree = (Object)adaptor.create(LEFTCURLY166); adaptor.addChild(root_0, LEFTCURLY166_tree); } // com/dyuproject/protostuff/parser/ProtoParser.g:642:19: ( ignore_block_body )* loop38: do { int alt38=2; switch ( input.LA(1) ) { case ASSIGN: case AT: case LEFTCURLY: case LEFTPAREN: case RIGHTPAREN: case LEFTSQUARE: case RIGHTSQUARE: case SEMICOLON: case COMMA: case PLUS: case MINUS: case TO: case TRUE: case FALSE: case PKG: case SYNTAX: case IMPORT: case OPTION: case MESSAGE: case SERVICE: case ENUM: case REQUIRED: case OPTIONAL: case REPEATED: case EXTENSIONS: case EXTEND: case GROUP: case RPC: case RETURNS: case INT32: case INT64: case UINT32: case UINT64: case SINT32: case SINT64: case FIXED32: case FIXED64: case SFIXED32: case SFIXED64: case FLOAT: case DOUBLE: case BOOL: case STRING: case BYTES: case DEFAULT: case MAX: case VOID: case ID: case FULL_ID: case NUMINT: case EXP: case NUMFLOAT: case NUMDOUBLE: case HEX_DIGIT: case HEX: case OCTAL: case COMMENT: case WS: case ESC_SEQ: case STRING_LITERAL: case UNICODE_ESC: case OCTAL_ESC: { alt38=1; } break; } switch (alt38) { case 1 : // com/dyuproject/protostuff/parser/ProtoParser.g:642:19: ignore_block_body { pushFollow(FOLLOW_ignore_block_body_in_ignore_block2625); ignore_block_body167=ignore_block_body(); state._fsp--; if (state.failed) return retval; if ( state.backtracking==0 ) adaptor.addChild(root_0, ignore_block_body167.getTree()); } break; default : break loop38; } } while (true); RIGHTCURLY168=(Token)match(input,RIGHTCURLY,FOLLOW_RIGHTCURLY_in_ignore_block2628); if (state.failed) return retval; if ( state.backtracking==0 ) { RIGHTCURLY168_tree = (Object)adaptor.create(RIGHTCURLY168); adaptor.addChild(root_0, RIGHTCURLY168_tree); } } retval.stop = input.LT(-1); if ( state.backtracking==0 ) { retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { } return retval; } }
public class class_name { public final ProtoParser.ignore_block_return ignore_block() throws RecognitionException { ProtoParser.ignore_block_return retval = new ProtoParser.ignore_block_return(); retval.start = input.LT(1); Object root_0 = null; Token LEFTCURLY166=null; Token RIGHTCURLY168=null; ProtoParser.ignore_block_body_return ignore_block_body167 = null; Object LEFTCURLY166_tree=null; Object RIGHTCURLY168_tree=null; try { // com/dyuproject/protostuff/parser/ProtoParser.g:642:5: ( LEFTCURLY ( ignore_block_body )* RIGHTCURLY ) // com/dyuproject/protostuff/parser/ProtoParser.g:642:9: LEFTCURLY ( ignore_block_body )* RIGHTCURLY { root_0 = (Object)adaptor.nil(); LEFTCURLY166=(Token)match(input,LEFTCURLY,FOLLOW_LEFTCURLY_in_ignore_block2623); if (state.failed) return retval; if ( state.backtracking==0 ) { LEFTCURLY166_tree = (Object)adaptor.create(LEFTCURLY166); // depends on control dependency: [if], data = [none] adaptor.addChild(root_0, LEFTCURLY166_tree); // depends on control dependency: [if], data = [none] } // com/dyuproject/protostuff/parser/ProtoParser.g:642:19: ( ignore_block_body )* loop38: do { int alt38=2; switch ( input.LA(1) ) { case ASSIGN: case AT: case LEFTCURLY: case LEFTPAREN: case RIGHTPAREN: case LEFTSQUARE: case RIGHTSQUARE: case SEMICOLON: case COMMA: case PLUS: case MINUS: case TO: case TRUE: case FALSE: case PKG: case SYNTAX: case IMPORT: case OPTION: case MESSAGE: case SERVICE: case ENUM: case REQUIRED: case OPTIONAL: case REPEATED: case EXTENSIONS: case EXTEND: case GROUP: case RPC: case RETURNS: case INT32: case INT64: case UINT32: case UINT64: case SINT32: case SINT64: case FIXED32: case FIXED64: case SFIXED32: case SFIXED64: case FLOAT: case DOUBLE: case BOOL: case STRING: case BYTES: case DEFAULT: case MAX: case VOID: case ID: case FULL_ID: case NUMINT: case EXP: case NUMFLOAT: case NUMDOUBLE: case HEX_DIGIT: case HEX: case OCTAL: case COMMENT: case WS: case ESC_SEQ: case STRING_LITERAL: case UNICODE_ESC: case OCTAL_ESC: { alt38=1; } break; } switch (alt38) { case 1 : // com/dyuproject/protostuff/parser/ProtoParser.g:642:19: ignore_block_body { pushFollow(FOLLOW_ignore_block_body_in_ignore_block2625); ignore_block_body167=ignore_block_body(); state._fsp--; if (state.failed) return retval; if ( state.backtracking==0 ) adaptor.addChild(root_0, ignore_block_body167.getTree()); } break; default : break loop38; } } while (true); RIGHTCURLY168=(Token)match(input,RIGHTCURLY,FOLLOW_RIGHTCURLY_in_ignore_block2628); if (state.failed) return retval; if ( state.backtracking==0 ) { RIGHTCURLY168_tree = (Object)adaptor.create(RIGHTCURLY168); adaptor.addChild(root_0, RIGHTCURLY168_tree); } } retval.stop = input.LT(-1); if ( state.backtracking==0 ) { retval.tree = (Object)adaptor.rulePostProcessing(root_0); adaptor.setTokenBoundaries(retval.tree, retval.start, retval.stop); } } catch (RecognitionException re) { reportError(re); recover(input,re); retval.tree = (Object)adaptor.errorNode(input, retval.start, input.LT(-1), re); } finally { } return retval; } }
public class class_name { public static ImageTransformProcess fromYaml(String yaml) { try { return JsonMappers.getMapperYaml().readValue(yaml, ImageTransformProcess.class); } catch (IOException e) { //TODO better exceptions throw new RuntimeException(e); } } }
public class class_name { public static ImageTransformProcess fromYaml(String yaml) { try { return JsonMappers.getMapperYaml().readValue(yaml, ImageTransformProcess.class); // depends on control dependency: [try], data = [none] } catch (IOException e) { //TODO better exceptions throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void recreateUISharedContexts(Session session) { uiContexts.clear(); for (Context context : session.getContexts()) { Context uiContext = context.duplicate(); uiContexts.put(context.getIndex(), uiContext); } } }
public class class_name { public void recreateUISharedContexts(Session session) { uiContexts.clear(); for (Context context : session.getContexts()) { Context uiContext = context.duplicate(); uiContexts.put(context.getIndex(), uiContext); // depends on control dependency: [for], data = [context] } } }
public class class_name { public boolean detectTierRichCss() { boolean result = false; //The following devices are explicitly ok. //Note: 'High' BlackBerry devices ONLY if (detectMobileQuick()) { //Exclude iPhone Tier and e-Ink Kindle devices. if (!detectTierIphone() && !detectKindle()) { //The following devices are explicitly ok. //Note: 'High' BlackBerry devices ONLY //Older Windows 'Mobile' isn't good enough for iPhone Tier. if (detectWebkit() || detectS60OssBrowser() || detectBlackBerryHigh() || detectWindowsMobile() || (userAgent.indexOf(engineTelecaQ) != -1)) { result = true; } // if detectWebkit() } //if !detectTierIphone() } //if detectMobileQuick() return result; } }
public class class_name { public boolean detectTierRichCss() { boolean result = false; //The following devices are explicitly ok. //Note: 'High' BlackBerry devices ONLY if (detectMobileQuick()) { //Exclude iPhone Tier and e-Ink Kindle devices. if (!detectTierIphone() && !detectKindle()) { //The following devices are explicitly ok. //Note: 'High' BlackBerry devices ONLY //Older Windows 'Mobile' isn't good enough for iPhone Tier. if (detectWebkit() || detectS60OssBrowser() || detectBlackBerryHigh() || detectWindowsMobile() || (userAgent.indexOf(engineTelecaQ) != -1)) { result = true; // depends on control dependency: [if], data = [none] } // if detectWebkit() } //if !detectTierIphone() } //if detectMobileQuick() return result; } }
public class class_name { public MediaType withParameters(Map<String, ? extends Iterable<String>> parameters) { final ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder(); for (Map.Entry<String, ? extends Iterable<String>> e : parameters.entrySet()) { final String k = e.getKey(); for (String v : e.getValue()) { builder.put(k, v); } } return create(type, subtype, builder.build()); } }
public class class_name { public MediaType withParameters(Map<String, ? extends Iterable<String>> parameters) { final ImmutableListMultimap.Builder<String, String> builder = ImmutableListMultimap.builder(); for (Map.Entry<String, ? extends Iterable<String>> e : parameters.entrySet()) { final String k = e.getKey(); for (String v : e.getValue()) { builder.put(k, v); // depends on control dependency: [for], data = [v] } } return create(type, subtype, builder.build()); } }
public class class_name { private static char firstNonWhitespace( StringBuilder builder ) { for( int i = 0; i < builder.length(); i++ ) { if( !Character.isWhitespace( builder.charAt( i ) ) ) { return builder.charAt( i ); } } return ' '; } }
public class class_name { private static char firstNonWhitespace( StringBuilder builder ) { for( int i = 0; i < builder.length(); i++ ) { if( !Character.isWhitespace( builder.charAt( i ) ) ) { return builder.charAt( i ); // depends on control dependency: [if], data = [none] } } return ' '; } }
public class class_name { public void decode(ByteBuf in, List<Object> out, ByteBufAllocator allocator) { while (in.isReadable()) { final int startIndex = in.readerIndex(); int lastReadIndex = in.forEachByte(LINE_END_FINDER); if (-1 == lastReadIndex) { // Buffer end without line termination if (null == incompleteBuffer) { incompleteBuffer = allocator.buffer(DEFAULT_INITIAL_CAPACITY, maxLineLength); } /*Add to the incomplete buffer*/ incompleteBuffer.ensureWritable(in.readableBytes()); incompleteBuffer.writeBytes(in); } else { ByteBuf lineBuf = in.readSlice(lastReadIndex - startIndex); String line; if (null != incompleteBuffer) { line = incompleteBuffer.toString(encoding) + lineBuf.toString(encoding); incompleteBuffer.release(); incompleteBuffer = null; } else { line = lineBuf.toString(encoding); } out.add(line); in.skipBytes(1); // Skip new line character. } } } }
public class class_name { public void decode(ByteBuf in, List<Object> out, ByteBufAllocator allocator) { while (in.isReadable()) { final int startIndex = in.readerIndex(); int lastReadIndex = in.forEachByte(LINE_END_FINDER); if (-1 == lastReadIndex) { // Buffer end without line termination if (null == incompleteBuffer) { incompleteBuffer = allocator.buffer(DEFAULT_INITIAL_CAPACITY, maxLineLength); // depends on control dependency: [if], data = [none] } /*Add to the incomplete buffer*/ incompleteBuffer.ensureWritable(in.readableBytes()); // depends on control dependency: [if], data = [none] incompleteBuffer.writeBytes(in); // depends on control dependency: [if], data = [none] } else { ByteBuf lineBuf = in.readSlice(lastReadIndex - startIndex); String line; if (null != incompleteBuffer) { line = incompleteBuffer.toString(encoding) + lineBuf.toString(encoding); // depends on control dependency: [if], data = [none] incompleteBuffer.release(); // depends on control dependency: [if], data = [none] incompleteBuffer = null; // depends on control dependency: [if], data = [none] } else { line = lineBuf.toString(encoding); // depends on control dependency: [if], data = [none] } out.add(line); // depends on control dependency: [if], data = [none] in.skipBytes(1); // Skip new line character. // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public void doSerialize( JsonWriter writer, T[][] values, JsonSerializationContext ctx, JsonSerializerParameters params ) { if ( !ctx.isWriteEmptyJsonArrays() && values.length == 0 ) { writer.cancelName(); return; } writer.beginArray(); for ( T[] array : values ) { writer.beginArray(); for ( T value : array ) { serializer.serialize( writer, value, ctx, params ); } writer.endArray(); } writer.endArray(); } }
public class class_name { @Override public void doSerialize( JsonWriter writer, T[][] values, JsonSerializationContext ctx, JsonSerializerParameters params ) { if ( !ctx.isWriteEmptyJsonArrays() && values.length == 0 ) { writer.cancelName(); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } writer.beginArray(); for ( T[] array : values ) { writer.beginArray(); // depends on control dependency: [for], data = [none] for ( T value : array ) { serializer.serialize( writer, value, ctx, params ); // depends on control dependency: [for], data = [value] } writer.endArray(); // depends on control dependency: [for], data = [none] } writer.endArray(); } }
public class class_name { @Override @Deprecated public void dump(PrintStream out, int indent) { dumpFirstPart(out, indent); if (realViewGroup.getChildCount() > 0) { out.println(">"); for (int i = 0; i < realViewGroup.getChildCount(); i++) { View child = realViewGroup.getChildAt(i); ShadowView shadowChild = Shadow.extract(child); shadowChild.dump(out, indent + 2); } dumpIndent(out, indent); out.println("</" + realView.getClass().getSimpleName() + ">"); } else { out.println("/>"); } } }
public class class_name { @Override @Deprecated public void dump(PrintStream out, int indent) { dumpFirstPart(out, indent); if (realViewGroup.getChildCount() > 0) { out.println(">"); // depends on control dependency: [if], data = [none] for (int i = 0; i < realViewGroup.getChildCount(); i++) { View child = realViewGroup.getChildAt(i); ShadowView shadowChild = Shadow.extract(child); shadowChild.dump(out, indent + 2); // depends on control dependency: [for], data = [none] } dumpIndent(out, indent); // depends on control dependency: [if], data = [none] out.println("</" + realView.getClass().getSimpleName() + ">"); // depends on control dependency: [if], data = [none] } else { out.println("/>"); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void setJobId(String id) { if (id != null && id.startsWith("job_") && id.length() > 4) { this.jobNumber = id.substring(4); } } }
public class class_name { private void setJobId(String id) { if (id != null && id.startsWith("job_") && id.length() > 4) { this.jobNumber = id.substring(4); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Object get(Object key) { if (INSTALL_KERNEL_INIT_CODE.equals(key)) { return initKernel(); } else if (ACTION_RESULT.equals(key)) { if (actionType.equals(ActionType.install)) { Boolean localESAInstall = (Boolean) data.get(INSTALL_LOCAL_ESA); if (localESAInstall == null || !localESAInstall) { return install(); } else { return localESAInstall(); } } else if (actionType.equals(ActionType.uninstall)) { return uninstall(); } else if (actionType.equals(ActionType.resolve)) { Boolean localESAInstall = (Boolean) data.get(INSTALL_LOCAL_ESA); if (localESAInstall == null || !localESAInstall) { return data.get(ACTION_RESULT); } else { return singleFileResolve(); } } } else if (PROGRESS_MONITOR_SIZE.equals(key)) { return getMonitorSize(); } return data.get(key); } }
public class class_name { @Override public Object get(Object key) { if (INSTALL_KERNEL_INIT_CODE.equals(key)) { return initKernel(); // depends on control dependency: [if], data = [none] } else if (ACTION_RESULT.equals(key)) { if (actionType.equals(ActionType.install)) { Boolean localESAInstall = (Boolean) data.get(INSTALL_LOCAL_ESA); if (localESAInstall == null || !localESAInstall) { return install(); // depends on control dependency: [if], data = [none] } else { return localESAInstall(); // depends on control dependency: [if], data = [none] } } else if (actionType.equals(ActionType.uninstall)) { return uninstall(); // depends on control dependency: [if], data = [none] } else if (actionType.equals(ActionType.resolve)) { Boolean localESAInstall = (Boolean) data.get(INSTALL_LOCAL_ESA); if (localESAInstall == null || !localESAInstall) { return data.get(ACTION_RESULT); // depends on control dependency: [if], data = [none] } else { return singleFileResolve(); // depends on control dependency: [if], data = [none] } } } else if (PROGRESS_MONITOR_SIZE.equals(key)) { return getMonitorSize(); // depends on control dependency: [if], data = [none] } return data.get(key); } }
public class class_name { private void shift() { if ((int)word ==0) {wordShift +=32; word = word >>>32; } if ((word & 0x0000FFFF) == 0) { wordShift +=16; word >>>=16; } if ((word & 0x000000FF) == 0) { wordShift +=8; word >>>=8; } indexArray = bitlist[(int)word & 0xff]; } }
public class class_name { private void shift() { if ((int)word ==0) {wordShift +=32; word = word >>>32; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] if ((word & 0x0000FFFF) == 0) { wordShift +=16; word >>>=16; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] if ((word & 0x000000FF) == 0) { wordShift +=8; word >>>=8; } // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] indexArray = bitlist[(int)word & 0xff]; } }
public class class_name { @Override public EClass getIfcTextureVertex() { if (ifcTextureVertexEClass == null) { ifcTextureVertexEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(720); } return ifcTextureVertexEClass; } }
public class class_name { @Override public EClass getIfcTextureVertex() { if (ifcTextureVertexEClass == null) { ifcTextureVertexEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(720); // depends on control dependency: [if], data = [none] } return ifcTextureVertexEClass; } }
public class class_name { public JsonToken nextToken() { int c = buffer.read(); while (c != -1 && Character.isWhitespace(c)) { c = buffer.read(); } if (c == -1) { return new JsonToken(JsonTokenType.END_OF_FILE, "<eof>"); } switch (c) { case '{': return new JsonToken(JsonTokenType.BEGIN_OBJECT, "{"); case '}': return new JsonToken(JsonTokenType.END_OBJECT, "}"); case '[': return new JsonToken(JsonTokenType.BEGIN_ARRAY, "["); case ']': return new JsonToken(JsonTokenType.END_ARRAY, "]"); case '(': return new JsonToken(JsonTokenType.LEFT_PAREN, "("); case ')': return new JsonToken(JsonTokenType.RIGHT_PAREN, ")"); case ':': return new JsonToken(JsonTokenType.COLON, ":"); case ',': return new JsonToken(JsonTokenType.COMMA, ","); case '\'': case '"': return scanString((char) c); case '/': return scanRegularExpression(); default: if (c == '-' || Character.isDigit(c)) { return scanNumber((char) c); } else if (c == '$' || c == '_' || Character.isLetter(c)) { return scanUnquotedString((char) c); } else { int position = buffer.getPosition(); buffer.unread(c); throw new JsonParseException("Invalid JSON input. Position: %d. Character: '%c'.", position, c); } } } }
public class class_name { public JsonToken nextToken() { int c = buffer.read(); while (c != -1 && Character.isWhitespace(c)) { c = buffer.read(); // depends on control dependency: [while], data = [none] } if (c == -1) { return new JsonToken(JsonTokenType.END_OF_FILE, "<eof>"); // depends on control dependency: [if], data = [none] } switch (c) { case '{': return new JsonToken(JsonTokenType.BEGIN_OBJECT, "{"); case '}': return new JsonToken(JsonTokenType.END_OBJECT, "}"); case '[': return new JsonToken(JsonTokenType.BEGIN_ARRAY, "["); case ']': return new JsonToken(JsonTokenType.END_ARRAY, "]"); case '(': return new JsonToken(JsonTokenType.LEFT_PAREN, "("); case ')': return new JsonToken(JsonTokenType.RIGHT_PAREN, ")"); case ':': return new JsonToken(JsonTokenType.COLON, ":"); case ',': return new JsonToken(JsonTokenType.COMMA, ","); case '\'': case '"': return scanString((char) c); case '/': return scanRegularExpression(); default: if (c == '-' || Character.isDigit(c)) { return scanNumber((char) c); // depends on control dependency: [if], data = [(c] } else if (c == '$' || c == '_' || Character.isLetter(c)) { return scanUnquotedString((char) c); // depends on control dependency: [if], data = [(c] } else { int position = buffer.getPosition(); buffer.unread(c); // depends on control dependency: [if], data = [(c] throw new JsonParseException("Invalid JSON input. Position: %d. Character: '%c'.", position, c); } } } }
public class class_name { private static final void setInit(final CClassLoader loader) { loader.booInit = true; for (final Iterator it = loader.getChildLoader(); it.hasNext();) { final Map.Entry entry = (Map.Entry) it.next(); CClassLoader.setInit((CClassLoader) entry.getValue()); } } }
public class class_name { private static final void setInit(final CClassLoader loader) { loader.booInit = true; for (final Iterator it = loader.getChildLoader(); it.hasNext();) { final Map.Entry entry = (Map.Entry) it.next(); CClassLoader.setInit((CClassLoader) entry.getValue()); // depends on control dependency: [for], data = [none] } } }
public class class_name { public boolean isFullRank() { // Find maximum: double t = 0.; for(int j = 0; j < n; j++) { double v = Rdiag[j]; if(v == 0) { return false; } v = Math.abs(v); t = v > t ? v : t; } t *= 1e-15; // Numerical precision threshold. for(int j = 1; j < n; j++) { if(Math.abs(Rdiag[j]) < t) { return false; } } return true; } }
public class class_name { public boolean isFullRank() { // Find maximum: double t = 0.; for(int j = 0; j < n; j++) { double v = Rdiag[j]; if(v == 0) { return false; // depends on control dependency: [if], data = [none] } v = Math.abs(v); // depends on control dependency: [for], data = [none] t = v > t ? v : t; // depends on control dependency: [for], data = [none] } t *= 1e-15; // Numerical precision threshold. for(int j = 1; j < n; j++) { if(Math.abs(Rdiag[j]) < t) { return false; // depends on control dependency: [if], data = [none] } } return true; } }
public class class_name { void handleAgentLogoffEvent(AgentLogoffEvent event) { AsteriskAgentImpl agent = getAgentByAgentId("Agent/" + event.getAgent()); if (agent == null) { logger.error("Ignored AgentLogoffEvent for unknown agent " + event.getAgent() + ". Agents: " + agents.values().toString()); return; } agent.updateState(AgentState.AGENT_LOGGEDOFF); } }
public class class_name { void handleAgentLogoffEvent(AgentLogoffEvent event) { AsteriskAgentImpl agent = getAgentByAgentId("Agent/" + event.getAgent()); if (agent == null) { logger.error("Ignored AgentLogoffEvent for unknown agent " + event.getAgent() + ". Agents: " + agents.values().toString()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } agent.updateState(AgentState.AGENT_LOGGEDOFF); } }
public class class_name { public static void accept(SARLQuickfixProvider provider, Issue issue, IssueResolutionAcceptor acceptor, String label, String[] operationUris) { if (operationUris.length > 0) { final MissedMethodAddModification modification = new MissedMethodAddModification(operationUris); provider.getInjector().injectMembers(modification); modification.setIssue(issue); modification.setTools(provider); acceptor.accept( issue, label, label, JavaPluginImages.IMG_CORRECTION_ADD, modification, IProposalRelevance.ADD_UNIMPLEMENTED_METHODS); } } }
public class class_name { public static void accept(SARLQuickfixProvider provider, Issue issue, IssueResolutionAcceptor acceptor, String label, String[] operationUris) { if (operationUris.length > 0) { final MissedMethodAddModification modification = new MissedMethodAddModification(operationUris); provider.getInjector().injectMembers(modification); // depends on control dependency: [if], data = [none] modification.setIssue(issue); // depends on control dependency: [if], data = [none] modification.setTools(provider); // depends on control dependency: [if], data = [none] acceptor.accept( issue, label, label, JavaPluginImages.IMG_CORRECTION_ADD, modification, IProposalRelevance.ADD_UNIMPLEMENTED_METHODS); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected static double computeH(final int i, double[] dist_i, double[] pij_i, double mbeta) { double sumP = 0.; // Skip point "i", break loop in two: for(int j = 0; j < i; j++) { sumP += (pij_i[j] = FastMath.exp(dist_i[j] * mbeta)); } for(int j = i + 1; j < dist_i.length; j++) { sumP += (pij_i[j] = FastMath.exp(dist_i[j] * mbeta)); } if(!(sumP > 0)) { // All pij are zero. Bad news. return Double.NEGATIVE_INFINITY; } final double s = 1. / sumP; // Scaling factor double sum = 0.; // While we could skip pi[i], it should be 0 anyway. for(int j = 0; j < dist_i.length; j++) { sum += dist_i[j] * (pij_i[j] *= s); } return FastMath.log(sumP) - mbeta * sum; } }
public class class_name { protected static double computeH(final int i, double[] dist_i, double[] pij_i, double mbeta) { double sumP = 0.; // Skip point "i", break loop in two: for(int j = 0; j < i; j++) { sumP += (pij_i[j] = FastMath.exp(dist_i[j] * mbeta)); // depends on control dependency: [for], data = [j] } for(int j = i + 1; j < dist_i.length; j++) { sumP += (pij_i[j] = FastMath.exp(dist_i[j] * mbeta)); // depends on control dependency: [for], data = [j] } if(!(sumP > 0)) { // All pij are zero. Bad news. return Double.NEGATIVE_INFINITY; // depends on control dependency: [if], data = [none] } final double s = 1. / sumP; // Scaling factor double sum = 0.; // While we could skip pi[i], it should be 0 anyway. for(int j = 0; j < dist_i.length; j++) { sum += dist_i[j] * (pij_i[j] *= s); // depends on control dependency: [for], data = [j] } return FastMath.log(sumP) - mbeta * sum; } }
public class class_name { public static int alignmentValue(String alignment) { if (alignment == null) return Element.ALIGN_UNDEFINED; if (ALIGN_CENTER.equalsIgnoreCase(alignment)) { return Element.ALIGN_CENTER; } if (ALIGN_LEFT.equalsIgnoreCase(alignment)) { return Element.ALIGN_LEFT; } if (ALIGN_RIGHT.equalsIgnoreCase(alignment)) { return Element.ALIGN_RIGHT; } if (ALIGN_JUSTIFIED.equalsIgnoreCase(alignment)) { return Element.ALIGN_JUSTIFIED; } if (ALIGN_JUSTIFIED_ALL.equalsIgnoreCase(alignment)) { return Element.ALIGN_JUSTIFIED_ALL; } if (ALIGN_TOP.equalsIgnoreCase(alignment)) { return Element.ALIGN_TOP; } if (ALIGN_MIDDLE.equalsIgnoreCase(alignment)) { return Element.ALIGN_MIDDLE; } if (ALIGN_BOTTOM.equalsIgnoreCase(alignment)) { return Element.ALIGN_BOTTOM; } if (ALIGN_BASELINE.equalsIgnoreCase(alignment)) { return Element.ALIGN_BASELINE; } return Element.ALIGN_UNDEFINED; } }
public class class_name { public static int alignmentValue(String alignment) { if (alignment == null) return Element.ALIGN_UNDEFINED; if (ALIGN_CENTER.equalsIgnoreCase(alignment)) { return Element.ALIGN_CENTER; // depends on control dependency: [if], data = [none] } if (ALIGN_LEFT.equalsIgnoreCase(alignment)) { return Element.ALIGN_LEFT; // depends on control dependency: [if], data = [none] } if (ALIGN_RIGHT.equalsIgnoreCase(alignment)) { return Element.ALIGN_RIGHT; // depends on control dependency: [if], data = [none] } if (ALIGN_JUSTIFIED.equalsIgnoreCase(alignment)) { return Element.ALIGN_JUSTIFIED; // depends on control dependency: [if], data = [none] } if (ALIGN_JUSTIFIED_ALL.equalsIgnoreCase(alignment)) { return Element.ALIGN_JUSTIFIED_ALL; // depends on control dependency: [if], data = [none] } if (ALIGN_TOP.equalsIgnoreCase(alignment)) { return Element.ALIGN_TOP; // depends on control dependency: [if], data = [none] } if (ALIGN_MIDDLE.equalsIgnoreCase(alignment)) { return Element.ALIGN_MIDDLE; // depends on control dependency: [if], data = [none] } if (ALIGN_BOTTOM.equalsIgnoreCase(alignment)) { return Element.ALIGN_BOTTOM; // depends on control dependency: [if], data = [none] } if (ALIGN_BASELINE.equalsIgnoreCase(alignment)) { return Element.ALIGN_BASELINE; // depends on control dependency: [if], data = [none] } return Element.ALIGN_UNDEFINED; } }
public class class_name { public TrailingAverage[] getPerfMetrics () { if (_metrics == null) { _metrics = new TrailingAverage[] { new TrailingAverage(150), new TrailingAverage(150), new TrailingAverage(150) }; } return _metrics; } }
public class class_name { public TrailingAverage[] getPerfMetrics () { if (_metrics == null) { _metrics = new TrailingAverage[] { new TrailingAverage(150), new TrailingAverage(150), new TrailingAverage(150) }; // depends on control dependency: [if], data = [none] } return _metrics; } }
public class class_name { void revoke(SchemaObject object, Right right, Grantee grantor, boolean grantOption) { final HsqlName name = object.getName(); Iterator it = directRightsMap.get(name); Right existing = null; while (it.hasNext()) { existing = (Right) it.next(); if (existing.grantor == grantor) { break; } } if (existing == null) { return; } if (existing.grantableRights != null) { existing.grantableRights.remove(object, right); } if (grantOption) { return; } if (right.isFull) { directRightsMap.remove(name, existing); grantor.grantedRightsMap.remove(name, existing); updateAllRights(); return; } existing.remove(object, right); if (existing.isEmpty()) { directRightsMap.remove(name, existing); grantor.grantedRightsMap.remove(object, existing); } updateAllRights(); return; } }
public class class_name { void revoke(SchemaObject object, Right right, Grantee grantor, boolean grantOption) { final HsqlName name = object.getName(); Iterator it = directRightsMap.get(name); Right existing = null; while (it.hasNext()) { existing = (Right) it.next(); // depends on control dependency: [while], data = [none] if (existing.grantor == grantor) { break; } } if (existing == null) { return; // depends on control dependency: [if], data = [none] } if (existing.grantableRights != null) { existing.grantableRights.remove(object, right); // depends on control dependency: [if], data = [none] } if (grantOption) { return; // depends on control dependency: [if], data = [none] } if (right.isFull) { directRightsMap.remove(name, existing); // depends on control dependency: [if], data = [none] grantor.grantedRightsMap.remove(name, existing); // depends on control dependency: [if], data = [none] updateAllRights(); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } existing.remove(object, right); if (existing.isEmpty()) { directRightsMap.remove(name, existing); // depends on control dependency: [if], data = [none] grantor.grantedRightsMap.remove(object, existing); // depends on control dependency: [if], data = [none] } updateAllRights(); return; } }
public class class_name { public static String encodeBase64(File targetFile) { BufferedImage image = null; try { image = ImageIO.read(targetFile); } catch (IOException e) { throw Lang.wrapThrow(e); } ByteArrayOutputStream baos = new ByteArrayOutputStream(); BufferedOutputStream bos = new BufferedOutputStream(baos); image.flush(); try { ImageIO.write(image, Files.getSuffixName(targetFile), bos); bos.flush(); bos.close(); } catch (IOException e) { throw Lang.wrapThrow(e); } byte[] bImage = baos.toByteArray(); return Base64.encodeToString(bImage, false); } }
public class class_name { public static String encodeBase64(File targetFile) { BufferedImage image = null; try { image = ImageIO.read(targetFile); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw Lang.wrapThrow(e); } // depends on control dependency: [catch], data = [none] ByteArrayOutputStream baos = new ByteArrayOutputStream(); BufferedOutputStream bos = new BufferedOutputStream(baos); image.flush(); try { ImageIO.write(image, Files.getSuffixName(targetFile), bos); // depends on control dependency: [try], data = [none] bos.flush(); // depends on control dependency: [try], data = [none] bos.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw Lang.wrapThrow(e); } // depends on control dependency: [catch], data = [none] byte[] bImage = baos.toByteArray(); return Base64.encodeToString(bImage, false); } }
public class class_name { public void optimizePathVisibility(float[] next, float pathOptimizationRange, NavMeshQuery navquery, QueryFilter filter) { // Clamp the ray to max distance. float dist = vDist2D(m_pos, next); // If too close to the goal, do not try to optimize. if (dist < 0.01f) { return; } // Overshoot a little. This helps to optimize open fields in tiled // meshes. dist = Math.min(dist + 0.01f, pathOptimizationRange); // Adjust ray length. float[] delta = vSub(next, m_pos); float[] goal = vMad(m_pos, delta, pathOptimizationRange / dist); Result<RaycastHit> rc = navquery.raycast(m_path.get(0), m_pos, goal, filter, 0, 0); if (rc.succeeded()) { if (rc.result.path.size() > 1 && rc.result.t > 0.99f) { m_path = mergeCorridorStartShortcut(m_path, rc.result.path); } } } }
public class class_name { public void optimizePathVisibility(float[] next, float pathOptimizationRange, NavMeshQuery navquery, QueryFilter filter) { // Clamp the ray to max distance. float dist = vDist2D(m_pos, next); // If too close to the goal, do not try to optimize. if (dist < 0.01f) { return; // depends on control dependency: [if], data = [none] } // Overshoot a little. This helps to optimize open fields in tiled // meshes. dist = Math.min(dist + 0.01f, pathOptimizationRange); // Adjust ray length. float[] delta = vSub(next, m_pos); float[] goal = vMad(m_pos, delta, pathOptimizationRange / dist); Result<RaycastHit> rc = navquery.raycast(m_path.get(0), m_pos, goal, filter, 0, 0); if (rc.succeeded()) { if (rc.result.path.size() > 1 && rc.result.t > 0.99f) { m_path = mergeCorridorStartShortcut(m_path, rc.result.path); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public DescribeDirectConnectGatewayAssociationsResult withDirectConnectGatewayAssociations( DirectConnectGatewayAssociation... directConnectGatewayAssociations) { if (this.directConnectGatewayAssociations == null) { setDirectConnectGatewayAssociations(new com.amazonaws.internal.SdkInternalList<DirectConnectGatewayAssociation>( directConnectGatewayAssociations.length)); } for (DirectConnectGatewayAssociation ele : directConnectGatewayAssociations) { this.directConnectGatewayAssociations.add(ele); } return this; } }
public class class_name { public DescribeDirectConnectGatewayAssociationsResult withDirectConnectGatewayAssociations( DirectConnectGatewayAssociation... directConnectGatewayAssociations) { if (this.directConnectGatewayAssociations == null) { setDirectConnectGatewayAssociations(new com.amazonaws.internal.SdkInternalList<DirectConnectGatewayAssociation>( directConnectGatewayAssociations.length)); // depends on control dependency: [if], data = [none] } for (DirectConnectGatewayAssociation ele : directConnectGatewayAssociations) { this.directConnectGatewayAssociations.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { protected void doMove(HttpServletRequest req, HttpServletResponse resp) { // Get source path String src = getRelativePath(req); // Check if Webdav is read only if (m_readOnly) { resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_WEBDAV_READ_ONLY_0)); } return; } // Check if resource is locked if (isLocked(req)) { resp.setStatus(CmsWebdavStatus.SC_LOCKED); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_LOCKED_1, src)); } return; } // Parsing destination header String dest = parseDestinationHeader(req); if (dest == null) { resp.setStatus(CmsWebdavStatus.SC_BAD_REQUEST); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_PARSE_DEST_HEADER_0)); } return; } // source and destination are the same if (dest.equals(src)) { resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_SRC_DEST_EQUALS_0)); } return; } // Parsing overwrite header boolean overwrite = parseOverwriteHeader(req); // Check if source exists if (!m_session.exists(src)) { resp.setStatus(CmsWebdavStatus.SC_NOT_FOUND); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_NOT_FOUND_1, src)); } return; } // If the destination exists, then it's a conflict if ((m_session.exists(dest)) && (!overwrite)) { resp.setStatus(CmsWebdavStatus.SC_PRECONDITION_FAILED); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_DEST_PATH_EXISTS_1, dest)); } return; } if ((!m_session.exists(dest)) && (overwrite)) { resp.setStatus(CmsWebdavStatus.SC_CREATED); } // trigger move in session handler try { if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_MOVE_ITEM_2, src, dest)); } m_session.move(src, dest, overwrite); } catch (CmsVfsResourceNotFoundException rnfex) { resp.setStatus(CmsWebdavStatus.SC_NOT_FOUND); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_NOT_FOUND_1, src)); } return; } catch (CmsSecurityException sex) { resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_NO_PERMISSION_0)); } return; } catch (CmsVfsResourceAlreadyExistsException raeex) { resp.setStatus(CmsWebdavStatus.SC_PRECONDITION_FAILED); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_EXISTS_1, dest)); } return; } catch (CmsException ex) { resp.setStatus(CmsWebdavStatus.SC_INTERNAL_SERVER_ERROR); if (LOG.isErrorEnabled()) { LOG.error(Messages.get().getBundle().key(Messages.LOG_REPOSITORY_ERROR_2, "MOVE", src), ex); } return; } if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_MOVE_ITEM_SUCCESS_0)); } } }
public class class_name { protected void doMove(HttpServletRequest req, HttpServletResponse resp) { // Get source path String src = getRelativePath(req); // Check if Webdav is read only if (m_readOnly) { resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_WEBDAV_READ_ONLY_0)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // Check if resource is locked if (isLocked(req)) { resp.setStatus(CmsWebdavStatus.SC_LOCKED); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_LOCKED_1, src)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // Parsing destination header String dest = parseDestinationHeader(req); if (dest == null) { resp.setStatus(CmsWebdavStatus.SC_BAD_REQUEST); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_PARSE_DEST_HEADER_0)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // source and destination are the same if (dest.equals(src)) { resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_SRC_DEST_EQUALS_0)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // Parsing overwrite header boolean overwrite = parseOverwriteHeader(req); // Check if source exists if (!m_session.exists(src)) { resp.setStatus(CmsWebdavStatus.SC_NOT_FOUND); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_NOT_FOUND_1, src)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } // If the destination exists, then it's a conflict if ((m_session.exists(dest)) && (!overwrite)) { resp.setStatus(CmsWebdavStatus.SC_PRECONDITION_FAILED); // depends on control dependency: [if], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_DEST_PATH_EXISTS_1, dest)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } if ((!m_session.exists(dest)) && (overwrite)) { resp.setStatus(CmsWebdavStatus.SC_CREATED); // depends on control dependency: [if], data = [none] } // trigger move in session handler try { if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_MOVE_ITEM_2, src, dest)); // depends on control dependency: [if], data = [none] } m_session.move(src, dest, overwrite); // depends on control dependency: [try], data = [none] } catch (CmsVfsResourceNotFoundException rnfex) { resp.setStatus(CmsWebdavStatus.SC_NOT_FOUND); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_NOT_FOUND_1, src)); // depends on control dependency: [if], data = [none] } return; } catch (CmsSecurityException sex) { // depends on control dependency: [catch], data = [none] resp.setStatus(CmsWebdavStatus.SC_FORBIDDEN); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_NO_PERMISSION_0)); // depends on control dependency: [if], data = [none] } return; } catch (CmsVfsResourceAlreadyExistsException raeex) { // depends on control dependency: [catch], data = [none] resp.setStatus(CmsWebdavStatus.SC_PRECONDITION_FAILED); if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_ITEM_EXISTS_1, dest)); // depends on control dependency: [if], data = [none] } return; } catch (CmsException ex) { // depends on control dependency: [catch], data = [none] resp.setStatus(CmsWebdavStatus.SC_INTERNAL_SERVER_ERROR); if (LOG.isErrorEnabled()) { LOG.error(Messages.get().getBundle().key(Messages.LOG_REPOSITORY_ERROR_2, "MOVE", src), ex); // depends on control dependency: [if], data = [none] } return; } // depends on control dependency: [catch], data = [none] if (LOG.isDebugEnabled()) { LOG.debug(Messages.get().getBundle().key(Messages.LOG_MOVE_ITEM_SUCCESS_0)); // depends on control dependency: [if], data = [none] } } }
public class class_name { static String valueAsString(Map<String, Object> map, Key key, String defaultValue) { Validate.notNullOrEmpty(key.key, "Key for plugin configuration must be set"); if (map.containsKey(key.key)) { return map.get(key.key).toString().length() == 0 ? defaultValue : map.get(key.key).toString(); } return defaultValue; } }
public class class_name { static String valueAsString(Map<String, Object> map, Key key, String defaultValue) { Validate.notNullOrEmpty(key.key, "Key for plugin configuration must be set"); if (map.containsKey(key.key)) { return map.get(key.key).toString().length() == 0 ? defaultValue : map.get(key.key).toString(); // depends on control dependency: [if], data = [none] } return defaultValue; } }
public class class_name { boolean isComment(String line, boolean trim) { final String trimmedLine = trim ? line.trim() : line; for (String comment: getDialect().getSqlLineOneLineComments()) { if (trimmedLine.startsWith(comment)) { return true; } } return false; } }
public class class_name { boolean isComment(String line, boolean trim) { final String trimmedLine = trim ? line.trim() : line; for (String comment: getDialect().getSqlLineOneLineComments()) { if (trimmedLine.startsWith(comment)) { return true; // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public static void encodeHTML4DHTMLAttrs(ResponseWriter rw, Map<String, Object> attrs, String[] alist) throws IOException { // Encode attributes (HTML 4 + DHTML) for (String a : alist) { if (attrs.get(a) != null) { String val = A.asString(attrs.get(a)); if (val != null && val.length() > 0) { rw.writeAttribute(a, val, a); } } } } }
public class class_name { public static void encodeHTML4DHTMLAttrs(ResponseWriter rw, Map<String, Object> attrs, String[] alist) throws IOException { // Encode attributes (HTML 4 + DHTML) for (String a : alist) { if (attrs.get(a) != null) { String val = A.asString(attrs.get(a)); if (val != null && val.length() > 0) { rw.writeAttribute(a, val, a); // depends on control dependency: [if], data = [none] } } } } }
public class class_name { public static HierarchicalConfiguration getHierarchicalConfiguration(final Configuration configuration) { if (configuration instanceof CompositeConfiguration) { final CompositeConfiguration compositeConfig = (CompositeConfiguration) configuration; for (int i = 0; i < compositeConfig.getNumberOfConfigurations(); i++) { if (compositeConfig.getConfiguration(i) instanceof HierarchicalConfiguration) { return (HierarchicalConfiguration) compositeConfig.getConfiguration(i); } } } // maybe I need to send a runtime exception ?? // throw new // ConfigurationRuntimeException("no hierarchical configuration was defined"); return null; } }
public class class_name { public static HierarchicalConfiguration getHierarchicalConfiguration(final Configuration configuration) { if (configuration instanceof CompositeConfiguration) { final CompositeConfiguration compositeConfig = (CompositeConfiguration) configuration; for (int i = 0; i < compositeConfig.getNumberOfConfigurations(); i++) { if (compositeConfig.getConfiguration(i) instanceof HierarchicalConfiguration) { return (HierarchicalConfiguration) compositeConfig.getConfiguration(i); // depends on control dependency: [if], data = [none] } } } // maybe I need to send a runtime exception ?? // throw new // ConfigurationRuntimeException("no hierarchical configuration was defined"); return null; } }
public class class_name { public ExecType execute(DatabaseChangeLog databaseChangeLog, ChangeExecListener listener, Database database) throws MigrationFailedException { Logger log = Scope.getCurrentScope().getLog(getClass()); if (validationFailed) { return ExecType.MARK_RAN; } long startTime = new Date().getTime(); ExecType execType = null; boolean skipChange = false; Executor executor = ExecutorService.getInstance().getExecutor(database); try { // set object quoting strategy database.setObjectQuotingStrategy(objectQuotingStrategy); if (database.supportsDDLInTransaction()) { database.setAutoCommit(!runInTransaction); } executor.comment("Changeset " + toString(false)); if (StringUtil.trimToNull(getComments()) != null) { String comments = getComments(); String[] lines = comments.split("\\n"); for (int i = 0; i < lines.length; i++) { if (i > 0) { lines[i] = database.getLineComment() + " " + lines[i]; } } executor.comment(StringUtil.join(Arrays.asList(lines), "\n")); } try { if (preconditions != null) { preconditions.check(database, databaseChangeLog, this, listener); } } catch (PreconditionFailedException e) { if (listener != null) { listener.preconditionFailed(e, preconditions.getOnFail()); } StringBuffer message = new StringBuffer(); message.append(StreamUtil.getLineSeparator()); for (FailedPrecondition invalid : e.getFailedPreconditions()) { message.append(" ").append(invalid.toString()); message.append(StreamUtil.getLineSeparator()); } if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.HALT)) { throw new MigrationFailedException(this, message.toString(), e); } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.CONTINUE)) { skipChange = true; execType = ExecType.SKIPPED; Scope.getCurrentScope().getLog(getClass()).info(LogType.LOG, "Continuing past: " + toString() + " despite precondition failure due to onFail='CONTINUE': " + message); } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.MARK_RAN)) { execType = ExecType.MARK_RAN; skipChange = true; log.info(LogType.LOG, "Marking ChangeSet: " + toString() + " ran despite precondition failure due to onFail='MARK_RAN': " + message); } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.WARN)) { execType = null; //already warned } else { throw new UnexpectedLiquibaseException("Unexpected precondition onFail attribute: " + preconditions.getOnFail(), e); } } catch (PreconditionErrorException e) { if (listener != null) { listener.preconditionErrored(e, preconditions.getOnError()); } StringBuffer message = new StringBuffer(); message.append(StreamUtil.getLineSeparator()); for (ErrorPrecondition invalid : e.getErrorPreconditions()) { message.append(" ").append(invalid.toString()); message.append(StreamUtil.getLineSeparator()); } if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.HALT)) { throw new MigrationFailedException(this, message.toString(), e); } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.CONTINUE)) { skipChange = true; execType = ExecType.SKIPPED; } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.MARK_RAN)) { execType = ExecType.MARK_RAN; skipChange = true; log.info(LogType.LOG, "Marking ChangeSet: " + toString() + " ran despite precondition error: " + message); } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.WARN)) { execType = null; //already logged } else { throw new UnexpectedLiquibaseException("Unexpected precondition onError attribute: " + preconditions.getOnError(), e); } database.rollback(); } finally { database.rollback(); } if (!skipChange) { for (Change change : changes) { try { change.finishInitialization(); } catch (SetupException se) { throw new MigrationFailedException(this, se); } } log.fine(LogType.LOG, "Reading ChangeSet: " + toString()); for (Change change : getChanges()) { if ((!(change instanceof DbmsTargetedChange)) || DatabaseList.definitionMatches(((DbmsTargetedChange) change).getDbms(), database, true)) { if (listener != null) { listener.willRun(change, this, changeLog, database); } if (change.generateStatementsVolatile(database)) { executor.comment("WARNING The following SQL may change each run and therefore is possibly incorrect and/or invalid:"); } database.executeStatements(change, databaseChangeLog, sqlVisitors); log.info(LogType.LOG, change.getConfirmationMessage()); if (listener != null) { listener.ran(change, this, changeLog, database); } } else { log.fine(LogType.LOG, "Change " + change.getSerializedObjectName() + " not included for database " + database.getShortName()); } } if (runInTransaction) { database.commit(); } log.info(LogType.LOG, "ChangeSet " + toString(false) + " ran successfully in " + (new Date().getTime() - startTime + "ms")); if (execType == null) { execType = ExecType.EXECUTED; } } else { log.fine(LogType.LOG, "Skipping ChangeSet: " + toString()); } } catch (Exception e) { try { database.rollback(); } catch (Exception e1) { throw new MigrationFailedException(this, e); } if ((getFailOnError() != null) && !getFailOnError()) { log.info(LogType.LOG, "Change set " + toString(false) + " failed, but failOnError was false. Error: " + e.getMessage()); log.fine(LogType.LOG, "Failure Stacktrace", e); execType = ExecType.FAILED; } else { // just log the message, dont log the stacktrace by appending exception. Its logged anyway to stdout log.severe(LogType.LOG, "Change Set " + toString(false) + " failed. Error: " + e.getMessage()); if (e instanceof MigrationFailedException) { throw ((MigrationFailedException) e); } else { throw new MigrationFailedException(this, e); } } } finally { // restore auto-commit to false if this ChangeSet was not run in a transaction, // but only if the database supports DDL in transactions if (!runInTransaction && database.supportsDDLInTransaction()) { try { database.setAutoCommit(false); } catch (DatabaseException e) { throw new MigrationFailedException(this, "Could not resetInternalState autocommit", e); } } } return execType; } }
public class class_name { public ExecType execute(DatabaseChangeLog databaseChangeLog, ChangeExecListener listener, Database database) throws MigrationFailedException { Logger log = Scope.getCurrentScope().getLog(getClass()); if (validationFailed) { return ExecType.MARK_RAN; } long startTime = new Date().getTime(); ExecType execType = null; boolean skipChange = false; Executor executor = ExecutorService.getInstance().getExecutor(database); try { // set object quoting strategy database.setObjectQuotingStrategy(objectQuotingStrategy); if (database.supportsDDLInTransaction()) { database.setAutoCommit(!runInTransaction); // depends on control dependency: [if], data = [none] } executor.comment("Changeset " + toString(false)); if (StringUtil.trimToNull(getComments()) != null) { String comments = getComments(); String[] lines = comments.split("\\n"); for (int i = 0; i < lines.length; i++) { if (i > 0) { lines[i] = database.getLineComment() + " " + lines[i]; // depends on control dependency: [if], data = [none] } } executor.comment(StringUtil.join(Arrays.asList(lines), "\n")); // depends on control dependency: [if], data = [none] } try { if (preconditions != null) { preconditions.check(database, databaseChangeLog, this, listener); // depends on control dependency: [if], data = [none] } } catch (PreconditionFailedException e) { if (listener != null) { listener.preconditionFailed(e, preconditions.getOnFail()); // depends on control dependency: [if], data = [none] } StringBuffer message = new StringBuffer(); message.append(StreamUtil.getLineSeparator()); for (FailedPrecondition invalid : e.getFailedPreconditions()) { message.append(" ").append(invalid.toString()); // depends on control dependency: [for], data = [invalid] message.append(StreamUtil.getLineSeparator()); // depends on control dependency: [for], data = [none] } if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.HALT)) { throw new MigrationFailedException(this, message.toString(), e); } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.CONTINUE)) { skipChange = true; // depends on control dependency: [if], data = [none] execType = ExecType.SKIPPED; // depends on control dependency: [if], data = [none] Scope.getCurrentScope().getLog(getClass()).info(LogType.LOG, "Continuing past: " + toString() + " despite precondition failure due to onFail='CONTINUE': " + message); // depends on control dependency: [if], data = [none] } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.MARK_RAN)) { execType = ExecType.MARK_RAN; // depends on control dependency: [if], data = [none] skipChange = true; // depends on control dependency: [if], data = [none] log.info(LogType.LOG, "Marking ChangeSet: " + toString() + " ran despite precondition failure due to onFail='MARK_RAN': " + message); // depends on control dependency: [if], data = [none] } else if (preconditions.getOnFail().equals(PreconditionContainer.FailOption.WARN)) { execType = null; //already warned // depends on control dependency: [if], data = [none] } else { throw new UnexpectedLiquibaseException("Unexpected precondition onFail attribute: " + preconditions.getOnFail(), e); } } catch (PreconditionErrorException e) { // depends on control dependency: [catch], data = [none] if (listener != null) { listener.preconditionErrored(e, preconditions.getOnError()); // depends on control dependency: [if], data = [none] } StringBuffer message = new StringBuffer(); message.append(StreamUtil.getLineSeparator()); for (ErrorPrecondition invalid : e.getErrorPreconditions()) { message.append(" ").append(invalid.toString()); // depends on control dependency: [for], data = [invalid] message.append(StreamUtil.getLineSeparator()); // depends on control dependency: [for], data = [none] } if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.HALT)) { throw new MigrationFailedException(this, message.toString(), e); } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.CONTINUE)) { skipChange = true; // depends on control dependency: [if], data = [none] execType = ExecType.SKIPPED; // depends on control dependency: [if], data = [none] } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.MARK_RAN)) { execType = ExecType.MARK_RAN; // depends on control dependency: [if], data = [none] skipChange = true; // depends on control dependency: [if], data = [none] log.info(LogType.LOG, "Marking ChangeSet: " + toString() + " ran despite precondition error: " + message); // depends on control dependency: [if], data = [none] } else if (preconditions.getOnError().equals(PreconditionContainer.ErrorOption.WARN)) { execType = null; //already logged // depends on control dependency: [if], data = [none] } else { throw new UnexpectedLiquibaseException("Unexpected precondition onError attribute: " + preconditions.getOnError(), e); } database.rollback(); } finally { // depends on control dependency: [catch], data = [none] database.rollback(); } if (!skipChange) { for (Change change : changes) { try { change.finishInitialization(); // depends on control dependency: [try], data = [none] } catch (SetupException se) { throw new MigrationFailedException(this, se); } // depends on control dependency: [catch], data = [none] } log.fine(LogType.LOG, "Reading ChangeSet: " + toString()); // depends on control dependency: [if], data = [none] for (Change change : getChanges()) { if ((!(change instanceof DbmsTargetedChange)) || DatabaseList.definitionMatches(((DbmsTargetedChange) change).getDbms(), database, true)) { if (listener != null) { listener.willRun(change, this, changeLog, database); // depends on control dependency: [if], data = [none] } if (change.generateStatementsVolatile(database)) { executor.comment("WARNING The following SQL may change each run and therefore is possibly incorrect and/or invalid:"); // depends on control dependency: [if], data = [none] } database.executeStatements(change, databaseChangeLog, sqlVisitors); // depends on control dependency: [if], data = [none] log.info(LogType.LOG, change.getConfirmationMessage()); // depends on control dependency: [if], data = [none] if (listener != null) { listener.ran(change, this, changeLog, database); // depends on control dependency: [if], data = [none] } } else { log.fine(LogType.LOG, "Change " + change.getSerializedObjectName() + " not included for database " + database.getShortName()); // depends on control dependency: [if], data = [none] } } if (runInTransaction) { database.commit(); // depends on control dependency: [if], data = [none] } log.info(LogType.LOG, "ChangeSet " + toString(false) + " ran successfully in " + (new Date().getTime() - startTime + "ms")); // depends on control dependency: [if], data = [none] if (execType == null) { execType = ExecType.EXECUTED; // depends on control dependency: [if], data = [none] } } else { log.fine(LogType.LOG, "Skipping ChangeSet: " + toString()); // depends on control dependency: [if], data = [none] } } catch (Exception e) { try { database.rollback(); // depends on control dependency: [try], data = [none] } catch (Exception e1) { throw new MigrationFailedException(this, e); } // depends on control dependency: [catch], data = [none] if ((getFailOnError() != null) && !getFailOnError()) { log.info(LogType.LOG, "Change set " + toString(false) + " failed, but failOnError was false. Error: " + e.getMessage()); // depends on control dependency: [if], data = [none] log.fine(LogType.LOG, "Failure Stacktrace", e); // depends on control dependency: [if], data = [none] execType = ExecType.FAILED; // depends on control dependency: [if], data = [none] } else { // just log the message, dont log the stacktrace by appending exception. Its logged anyway to stdout log.severe(LogType.LOG, "Change Set " + toString(false) + " failed. Error: " + e.getMessage()); // depends on control dependency: [if], data = [none] if (e instanceof MigrationFailedException) { throw ((MigrationFailedException) e); } else { throw new MigrationFailedException(this, e); } } } finally { // restore auto-commit to false if this ChangeSet was not run in a transaction, // but only if the database supports DDL in transactions if (!runInTransaction && database.supportsDDLInTransaction()) { try { database.setAutoCommit(false); // depends on control dependency: [try], data = [none] } catch (DatabaseException e) { throw new MigrationFailedException(this, "Could not resetInternalState autocommit", e); } // depends on control dependency: [catch], data = [none] } } return execType; } }
public class class_name { @SuppressWarnings("unchecked") public Iterable<Obj> asObjSeq(@Nullable Arr arr) { if (arr != null) { return (Iterable<Obj>)(Iterable<?>)arr; } return Collections.emptyIterable(); } }
public class class_name { @SuppressWarnings("unchecked") public Iterable<Obj> asObjSeq(@Nullable Arr arr) { if (arr != null) { return (Iterable<Obj>)(Iterable<?>)arr; // depends on control dependency: [if], data = [none] } return Collections.emptyIterable(); } }
public class class_name { @Override public final BankStatementLine process(final Map<String, Object> pReqVars, final BankStatementLine pEntity, final IRequestData pRequestData) throws Exception { BankStatementLine bsl = this.prcEntityPbEditDelete .process(pReqVars, pEntity, pRequestData); if (bsl.getResultAction() != null) { throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN, "attempt_to_edit_completed_bank_statement_line"); } String amountStr; if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0) { amountStr = bsl.getItsAmount().toString(); } else if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) < 0) { amountStr = bsl.getItsAmount().negate().toString(); } else { throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER, "amount_is_zero"); } long[] startEnd = evalDayStartEndFor(bsl.getItsDate()); String dWhere = "where HASMADEACCENTRIES=1 and REVERSEDID is null and ITSTOTAL=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; Set<String> ndFlDoc = new HashSet<String>(); ndFlDoc.add("itsId"); ndFlDoc.add("idDatabaseBirth"); ndFlDoc.add("idBirth"); ndFlDoc.add("itsTotal"); ndFlDoc.add("itsDate"); ndFlDoc.add("description"); if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0) { //bank account debit pReqVars.put("PrepaymentFromneededFields", ndFlDoc); List<PrepaymentFrom> prepaymentsFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, PrepaymentFrom.class, dWhere); pReqVars.remove("PrepaymentFromneededFields"); if (prepaymentsFrom.size() > 0) { pRequestData.setAttribute("prepayments", prepaymentsFrom); } pReqVars.put("PaymentFromneededFields", ndFlDoc); List<PaymentFrom> paymentsFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, PaymentFrom.class, dWhere); pReqVars.remove("PaymentFromneededFields"); if (paymentsFrom.size() > 0) { pRequestData.setAttribute("payments", paymentsFrom); } String eWhereD = "where REVERSEDID is null and SOURCETYPE in (3,1010)" + " and SUBACCDEBITTYPE=2002 and SUBACCDEBITID=" + bsl.getItsOwner() .getBankAccount().getItsId() + " and DEBIT=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; List<AccountingEntry> entriesFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, AccountingEntry.class, eWhereD); if (entriesFrom.size() > 0) { pRequestData.setAttribute("accentries", entriesFrom); } } else { //bank account credit pReqVars.put("PrepaymentToneededFields", ndFlDoc); List<PrepaymentTo> prepaymentsTo = getSrvOrm() .retrieveListWithConditions(pReqVars, PrepaymentTo.class, dWhere); pReqVars.remove("PrepaymentToneededFields"); if (prepaymentsTo.size() > 0) { pRequestData.setAttribute("prepayments", prepaymentsTo); } pReqVars.put("PaymentToneededFields", ndFlDoc); List<PaymentTo> paymentsTo = getSrvOrm() .retrieveListWithConditions(pReqVars, PaymentTo.class, dWhere); pReqVars.remove("PaymentToneededFields"); if (paymentsTo.size() > 0) { pRequestData.setAttribute("payments", paymentsTo); } String eWhereC = "where REVERSEDID is null and SOURCETYPE in (3,1010)" + " and SUBACCCREDITTYPE=2002 and SUBACCCREDITID=" + bsl.getItsOwner() .getBankAccount().getItsId() + " and CREDIT=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; List<AccountingEntry> entriesTo = getSrvOrm() .retrieveListWithConditions(pReqVars, AccountingEntry.class, eWhereC); if (entriesTo.size() > 0) { pRequestData.setAttribute("accentries", entriesTo); } } pRequestData.setAttribute("typeCodeSubaccMap", this.srvTypeCode.getTypeCodeMap()); return bsl; } }
public class class_name { @Override public final BankStatementLine process(final Map<String, Object> pReqVars, final BankStatementLine pEntity, final IRequestData pRequestData) throws Exception { BankStatementLine bsl = this.prcEntityPbEditDelete .process(pReqVars, pEntity, pRequestData); if (bsl.getResultAction() != null) { throw new ExceptionWithCode(ExceptionWithCode.FORBIDDEN, "attempt_to_edit_completed_bank_statement_line"); } String amountStr; if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0) { amountStr = bsl.getItsAmount().toString(); } else if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) < 0) { amountStr = bsl.getItsAmount().negate().toString(); } else { throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER, "amount_is_zero"); } long[] startEnd = evalDayStartEndFor(bsl.getItsDate()); String dWhere = "where HASMADEACCENTRIES=1 and REVERSEDID is null and ITSTOTAL=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; Set<String> ndFlDoc = new HashSet<String>(); ndFlDoc.add("itsId"); ndFlDoc.add("idDatabaseBirth"); ndFlDoc.add("idBirth"); ndFlDoc.add("itsTotal"); ndFlDoc.add("itsDate"); ndFlDoc.add("description"); if (bsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0) { //bank account debit pReqVars.put("PrepaymentFromneededFields", ndFlDoc); List<PrepaymentFrom> prepaymentsFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, PrepaymentFrom.class, dWhere); pReqVars.remove("PrepaymentFromneededFields"); if (prepaymentsFrom.size() > 0) { pRequestData.setAttribute("prepayments", prepaymentsFrom); // depends on control dependency: [if], data = [none] } pReqVars.put("PaymentFromneededFields", ndFlDoc); List<PaymentFrom> paymentsFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, PaymentFrom.class, dWhere); pReqVars.remove("PaymentFromneededFields"); if (paymentsFrom.size() > 0) { pRequestData.setAttribute("payments", paymentsFrom); // depends on control dependency: [if], data = [none] } String eWhereD = "where REVERSEDID is null and SOURCETYPE in (3,1010)" + " and SUBACCDEBITTYPE=2002 and SUBACCDEBITID=" + bsl.getItsOwner() .getBankAccount().getItsId() + " and DEBIT=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; List<AccountingEntry> entriesFrom = getSrvOrm() .retrieveListWithConditions(pReqVars, AccountingEntry.class, eWhereD); if (entriesFrom.size() > 0) { pRequestData.setAttribute("accentries", entriesFrom); // depends on control dependency: [if], data = [none] } } else { //bank account credit pReqVars.put("PrepaymentToneededFields", ndFlDoc); List<PrepaymentTo> prepaymentsTo = getSrvOrm() .retrieveListWithConditions(pReqVars, PrepaymentTo.class, dWhere); pReqVars.remove("PrepaymentToneededFields"); if (prepaymentsTo.size() > 0) { pRequestData.setAttribute("prepayments", prepaymentsTo); // depends on control dependency: [if], data = [none] } pReqVars.put("PaymentToneededFields", ndFlDoc); List<PaymentTo> paymentsTo = getSrvOrm() .retrieveListWithConditions(pReqVars, PaymentTo.class, dWhere); pReqVars.remove("PaymentToneededFields"); if (paymentsTo.size() > 0) { pRequestData.setAttribute("payments", paymentsTo); // depends on control dependency: [if], data = [none] } String eWhereC = "where REVERSEDID is null and SOURCETYPE in (3,1010)" + " and SUBACCCREDITTYPE=2002 and SUBACCCREDITID=" + bsl.getItsOwner() .getBankAccount().getItsId() + " and CREDIT=" + amountStr + " and ITSDATE >= " + startEnd[0] + " and ITSDATE <= " + startEnd[1]; List<AccountingEntry> entriesTo = getSrvOrm() .retrieveListWithConditions(pReqVars, AccountingEntry.class, eWhereC); if (entriesTo.size() > 0) { pRequestData.setAttribute("accentries", entriesTo); // depends on control dependency: [if], data = [none] } } pRequestData.setAttribute("typeCodeSubaccMap", this.srvTypeCode.getTypeCodeMap()); return bsl; } }
public class class_name { public EClass getIfcProjectionElement() { if (ifcProjectionElementEClass == null) { ifcProjectionElementEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(396); } return ifcProjectionElementEClass; } }
public class class_name { public EClass getIfcProjectionElement() { if (ifcProjectionElementEClass == null) { ifcProjectionElementEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(396); // depends on control dependency: [if], data = [none] } return ifcProjectionElementEClass; } }
public class class_name { public void setOfferingTransactions(java.util.Collection<OfferingTransaction> offeringTransactions) { if (offeringTransactions == null) { this.offeringTransactions = null; return; } this.offeringTransactions = new java.util.ArrayList<OfferingTransaction>(offeringTransactions); } }
public class class_name { public void setOfferingTransactions(java.util.Collection<OfferingTransaction> offeringTransactions) { if (offeringTransactions == null) { this.offeringTransactions = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.offeringTransactions = new java.util.ArrayList<OfferingTransaction>(offeringTransactions); } }
public class class_name { private void applyChangeToGalleryTree(CmsSitemapChangeEvent changeEvent) { CmsSitemapChange change = changeEvent.getChange(); switch (change.getChangeType()) { case delete: CmsGalleryTreeItem deleteItem = m_galleryTreeItems.get(change.getEntryId()); if (deleteItem != null) { deleteItem.removeFromParent(); } break; case undelete: case create: String typeName = m_controller.getGalleryType( new Integer(change.getNewResourceTypeId())).getResourceType(); if (typeName != null) { CmsGalleryFolderEntry galleryFolder = new CmsGalleryFolderEntry(); galleryFolder.setSitePath(change.getSitePath()); galleryFolder.setResourceType(typeName); galleryFolder.setStructureId(change.getEntryId()); galleryFolder.setOwnProperties(change.getOwnProperties()); galleryFolder.setIconClasses( m_controller.getGalleryType(new Integer(change.getNewResourceTypeId())).getBigIconClasses()); CmsGalleryTreeItem folderItem = new CmsGalleryTreeItem(galleryFolder); CmsSitemapHoverbar.installOn(m_controller, folderItem, galleryFolder.getStructureId()); m_galleryTypeItems.get(typeName).addChild(folderItem); m_galleryTreeItems.put(galleryFolder.getStructureId(), folderItem); } break; case modify: CmsGalleryTreeItem changeItem = m_galleryTreeItems.get(change.getEntryId()); if (changeItem != null) { CmsListItemWidget widget = changeItem.getListItemWidget(); for (CmsPropertyModification mod : change.getPropertyChanges()) { if (mod.getName().equals(CmsClientProperty.PROPERTY_TITLE)) { widget.setTitleLabel(mod.getValue()); } } String oldPath = changeItem.getSitePath(); if ((change.getName() != null) && !oldPath.endsWith("/" + change.getName())) { String newPath = CmsResource.getParentFolder(oldPath) + change.getName() + "/"; changeItem.updateSitePath(newPath); } } break; case bumpDetailPage: case clipboardOnly: case remove: default: // nothing to do } } }
public class class_name { private void applyChangeToGalleryTree(CmsSitemapChangeEvent changeEvent) { CmsSitemapChange change = changeEvent.getChange(); switch (change.getChangeType()) { case delete: CmsGalleryTreeItem deleteItem = m_galleryTreeItems.get(change.getEntryId()); if (deleteItem != null) { deleteItem.removeFromParent(); // depends on control dependency: [if], data = [none] } break; case undelete: case create: String typeName = m_controller.getGalleryType( new Integer(change.getNewResourceTypeId())).getResourceType(); if (typeName != null) { CmsGalleryFolderEntry galleryFolder = new CmsGalleryFolderEntry(); galleryFolder.setSitePath(change.getSitePath()); // depends on control dependency: [if], data = [none] galleryFolder.setResourceType(typeName); // depends on control dependency: [if], data = [(typeName] galleryFolder.setStructureId(change.getEntryId()); // depends on control dependency: [if], data = [none] galleryFolder.setOwnProperties(change.getOwnProperties()); // depends on control dependency: [if], data = [none] galleryFolder.setIconClasses( m_controller.getGalleryType(new Integer(change.getNewResourceTypeId())).getBigIconClasses()); // depends on control dependency: [if], data = [none] CmsGalleryTreeItem folderItem = new CmsGalleryTreeItem(galleryFolder); CmsSitemapHoverbar.installOn(m_controller, folderItem, galleryFolder.getStructureId()); // depends on control dependency: [if], data = [none] m_galleryTypeItems.get(typeName).addChild(folderItem); // depends on control dependency: [if], data = [(typeName] m_galleryTreeItems.put(galleryFolder.getStructureId(), folderItem); // depends on control dependency: [if], data = [none] } break; case modify: CmsGalleryTreeItem changeItem = m_galleryTreeItems.get(change.getEntryId()); if (changeItem != null) { CmsListItemWidget widget = changeItem.getListItemWidget(); for (CmsPropertyModification mod : change.getPropertyChanges()) { if (mod.getName().equals(CmsClientProperty.PROPERTY_TITLE)) { widget.setTitleLabel(mod.getValue()); // depends on control dependency: [if], data = [none] } } String oldPath = changeItem.getSitePath(); if ((change.getName() != null) && !oldPath.endsWith("/" + change.getName())) { String newPath = CmsResource.getParentFolder(oldPath) + change.getName() + "/"; changeItem.updateSitePath(newPath); // depends on control dependency: [if], data = [none] } } break; case bumpDetailPage: case clipboardOnly: case remove: default: // nothing to do } } }
public class class_name { void update(boolean[] visit, int[] vs, int n) { int len = atoms.length; double subtract = 0; for (int i = 0; i < n; i++) { final int v = vs[i]; final Point2d p1 = atoms[v].getPoint2d(); for (int w = 0; w < len; w++) { if (visit[w] || contribution[v][w] < 0) continue; subtract += contribution[v][w]; final Point2d p2 = atoms[w].getPoint2d(); final double x = p1.x - p2.x; final double y = p1.y - p2.y; final double len2 = x * x + y * y; score += contribution[w][v] = contribution[v][w] = 1 / Math.max(len2, MIN_SCORE); } } score -= subtract; } }
public class class_name { void update(boolean[] visit, int[] vs, int n) { int len = atoms.length; double subtract = 0; for (int i = 0; i < n; i++) { final int v = vs[i]; final Point2d p1 = atoms[v].getPoint2d(); for (int w = 0; w < len; w++) { if (visit[w] || contribution[v][w] < 0) continue; subtract += contribution[v][w]; // depends on control dependency: [for], data = [w] final Point2d p2 = atoms[w].getPoint2d(); final double x = p1.x - p2.x; final double y = p1.y - p2.y; final double len2 = x * x + y * y; score += contribution[w][v] = contribution[v][w] = 1 / Math.max(len2, MIN_SCORE); // depends on control dependency: [for], data = [w] } } score -= subtract; } }
public class class_name { public void setInsightArns(java.util.Collection<String> insightArns) { if (insightArns == null) { this.insightArns = null; return; } this.insightArns = new java.util.ArrayList<String>(insightArns); } }
public class class_name { public void setInsightArns(java.util.Collection<String> insightArns) { if (insightArns == null) { this.insightArns = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.insightArns = new java.util.ArrayList<String>(insightArns); } }
public class class_name { private void setFileMarkers(IFile file, ParseResult result) throws CoreException { if (file != null) { FileUtility.deleteMarker(file, IMarker.PROBLEM, ICoreConstants.PLUGIN_ID); if (result.hasParseErrors()) { file.deleteMarkers(IMarker.PROBLEM, true, IResource.DEPTH_INFINITE); int previousErrorNumber = -1; for (VDMError error : result.getErrors()) { if (previousErrorNumber == error.number) {// this check is // done to avoid // error fall // through continue; } else { previousErrorNumber = error.number; } FileUtility.addMarker(file, error.toProblemString(), error.location, IMarker.SEVERITY_ERROR, ICoreConstants.PLUGIN_ID, -1); } } IVdmProject vdmProject = (IVdmProject) project.getAdapter(IVdmProject.class); if (result.getWarnings().size() > 0 && vdmProject != null && vdmProject.hasSuppressWarnings()) { for (VDMWarning warning : result.getWarnings()) { FileUtility.addMarker(file, warning.toProblemString(), warning.location, IMarker.SEVERITY_WARNING, ICoreConstants.PLUGIN_ID, -1); } } } } }
public class class_name { private void setFileMarkers(IFile file, ParseResult result) throws CoreException { if (file != null) { FileUtility.deleteMarker(file, IMarker.PROBLEM, ICoreConstants.PLUGIN_ID); if (result.hasParseErrors()) { file.deleteMarkers(IMarker.PROBLEM, true, IResource.DEPTH_INFINITE); int previousErrorNumber = -1; for (VDMError error : result.getErrors()) { if (previousErrorNumber == error.number) {// this check is // done to avoid // error fall // through continue; } else { previousErrorNumber = error.number; // depends on control dependency: [if], data = [none] } FileUtility.addMarker(file, error.toProblemString(), error.location, IMarker.SEVERITY_ERROR, ICoreConstants.PLUGIN_ID, -1); } } IVdmProject vdmProject = (IVdmProject) project.getAdapter(IVdmProject.class); if (result.getWarnings().size() > 0 && vdmProject != null && vdmProject.hasSuppressWarnings()) { for (VDMWarning warning : result.getWarnings()) { FileUtility.addMarker(file, warning.toProblemString(), warning.location, IMarker.SEVERITY_WARNING, ICoreConstants.PLUGIN_ID, -1); } } } } }
public class class_name { private ConfListVo convert(Config config, String appNameString, String envName, ZkDisconfData zkDisconfData) { ConfListVo confListVo = new ConfListVo(); confListVo.setConfigId(config.getId()); confListVo.setAppId(config.getAppId()); confListVo.setAppName(appNameString); confListVo.setEnvName(envName); confListVo.setEnvId(config.getEnvId()); confListVo.setCreateTime(config.getCreateTime()); confListVo.setModifyTime(config.getUpdateTime().substring(0, 12)); confListVo.setKey(config.getName()); // StringEscapeUtils.escapeHtml escape confListVo.setValue(CodeUtils.unicodeToUtf8(config.getValue())); confListVo.setVersion(config.getVersion()); confListVo.setType(DisConfigTypeEnum.getByType(config.getType()).getModelName()); confListVo.setTypeId(config.getType()); // // // if (zkDisconfData != null) { confListVo.setMachineSize(zkDisconfData.getData().size()); List<ZkDisconfDataItem> datalist = zkDisconfData.getData(); MachineListVo machineListVo = getZkData(datalist, config); confListVo.setErrorNum(machineListVo.getErrorNum()); confListVo.setMachineList(machineListVo.getDatalist()); confListVo.setMachineSize(machineListVo.getMachineSize()); } return confListVo; } }
public class class_name { private ConfListVo convert(Config config, String appNameString, String envName, ZkDisconfData zkDisconfData) { ConfListVo confListVo = new ConfListVo(); confListVo.setConfigId(config.getId()); confListVo.setAppId(config.getAppId()); confListVo.setAppName(appNameString); confListVo.setEnvName(envName); confListVo.setEnvId(config.getEnvId()); confListVo.setCreateTime(config.getCreateTime()); confListVo.setModifyTime(config.getUpdateTime().substring(0, 12)); confListVo.setKey(config.getName()); // StringEscapeUtils.escapeHtml escape confListVo.setValue(CodeUtils.unicodeToUtf8(config.getValue())); confListVo.setVersion(config.getVersion()); confListVo.setType(DisConfigTypeEnum.getByType(config.getType()).getModelName()); confListVo.setTypeId(config.getType()); // // // if (zkDisconfData != null) { confListVo.setMachineSize(zkDisconfData.getData().size()); // depends on control dependency: [if], data = [(zkDisconfData] List<ZkDisconfDataItem> datalist = zkDisconfData.getData(); MachineListVo machineListVo = getZkData(datalist, config); confListVo.setErrorNum(machineListVo.getErrorNum()); // depends on control dependency: [if], data = [none] confListVo.setMachineList(machineListVo.getDatalist()); // depends on control dependency: [if], data = [none] confListVo.setMachineSize(machineListVo.getMachineSize()); // depends on control dependency: [if], data = [none] } return confListVo; } }
public class class_name { public static String createResultCountQuery(String query) { String resultCountQueryString = null; int select = query.toLowerCase().indexOf("select"); int from = query.toLowerCase().indexOf("from"); if (select == -1 || from == -1) { return null; } resultCountQueryString = "select count(" + query.substring(select + 6, from).trim() + ") " + query.substring(from); // remove order by // TODO: remove more parts if (resultCountQueryString.toLowerCase().contains("order by")) { resultCountQueryString = resultCountQueryString.substring(0, resultCountQueryString.toLowerCase().indexOf("order by")); } log.debug("Created query for counting results '{}'", resultCountQueryString); return resultCountQueryString; } }
public class class_name { public static String createResultCountQuery(String query) { String resultCountQueryString = null; int select = query.toLowerCase().indexOf("select"); int from = query.toLowerCase().indexOf("from"); if (select == -1 || from == -1) { return null; // depends on control dependency: [if], data = [none] } resultCountQueryString = "select count(" + query.substring(select + 6, from).trim() + ") " + query.substring(from); // remove order by // TODO: remove more parts if (resultCountQueryString.toLowerCase().contains("order by")) { resultCountQueryString = resultCountQueryString.substring(0, resultCountQueryString.toLowerCase().indexOf("order by")); // depends on control dependency: [if], data = [none] } log.debug("Created query for counting results '{}'", resultCountQueryString); return resultCountQueryString; } }
public class class_name { @Override public void onFileCreate(File file) { try { engine.addTemplate(context.getBundle(0), file.toURI().toURL()); } catch (MalformedURLException e) { LOGGER.error("Cannot compute the url of file {}", file.getAbsolutePath(), e); } } }
public class class_name { @Override public void onFileCreate(File file) { try { engine.addTemplate(context.getBundle(0), file.toURI().toURL()); // depends on control dependency: [try], data = [none] } catch (MalformedURLException e) { LOGGER.error("Cannot compute the url of file {}", file.getAbsolutePath(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void parseNesting(Configuration configuration, Document dc) { NodeList list = dc.getElementsByTagNameNS(SCHEMA_LOCATION, TAG_NESTING); if (list.getLength() > 0) { Node el = list.item(0); configuration.setNestingLimit(nodeAttribute(el, TAG_NESTING_ATTR_LIMIT, Configuration.DEFAULT_NESTING_LIMIT)); configuration.setPropagateNestingException( nodeAttribute(el, TAG_NESTING_ATTR_EXCEPTION, Configuration.DEFAULT_PROPAGATE_NESTING_EXCEPTION) ); } } }
public class class_name { private void parseNesting(Configuration configuration, Document dc) { NodeList list = dc.getElementsByTagNameNS(SCHEMA_LOCATION, TAG_NESTING); if (list.getLength() > 0) { Node el = list.item(0); configuration.setNestingLimit(nodeAttribute(el, TAG_NESTING_ATTR_LIMIT, Configuration.DEFAULT_NESTING_LIMIT)); // depends on control dependency: [if], data = [none] configuration.setPropagateNestingException( nodeAttribute(el, TAG_NESTING_ATTR_EXCEPTION, Configuration.DEFAULT_PROPAGATE_NESTING_EXCEPTION) ); // depends on control dependency: [if], data = [none] } } }
public class class_name { private Set<String> searchForIds(Context context, TermQuery query, ConsistencyLevel readConsistency) { Set<String> ids = Sets.newTreeSet(); BoundStatement bindStatement = m_searchStatement.bind(); bindStatement.setString(Schema.C_TERMS_CONTEXT, context.getId()); bindStatement.setString(Schema.C_TERMS_FIELD, query.getTerm().getField(Constants.DEFAULT_TERM_FIELD)); bindStatement.setString(Schema.C_TERMS_VALUE, query.getTerm().getValue()); bindStatement.setConsistencyLevel(readConsistency); for (Row row : m_session.execute(bindStatement)) { ids.add(row.getString(Constants.Schema.C_TERMS_RESOURCE)); } return ids; } }
public class class_name { private Set<String> searchForIds(Context context, TermQuery query, ConsistencyLevel readConsistency) { Set<String> ids = Sets.newTreeSet(); BoundStatement bindStatement = m_searchStatement.bind(); bindStatement.setString(Schema.C_TERMS_CONTEXT, context.getId()); bindStatement.setString(Schema.C_TERMS_FIELD, query.getTerm().getField(Constants.DEFAULT_TERM_FIELD)); bindStatement.setString(Schema.C_TERMS_VALUE, query.getTerm().getValue()); bindStatement.setConsistencyLevel(readConsistency); for (Row row : m_session.execute(bindStatement)) { ids.add(row.getString(Constants.Schema.C_TERMS_RESOURCE)); // depends on control dependency: [for], data = [row] } return ids; } }
public class class_name { public CalendarWeek plus(Years<Weekcycle> years) { if (years.isEmpty()) { return this; } int y = MathUtils.safeAdd(this.year, years.getAmount()); int effectiveWeek = this.week; if ((this.week == 53) && (maximumOfWeek(y) < 53)) { effectiveWeek = 52; } return CalendarWeek.of(y, effectiveWeek); } }
public class class_name { public CalendarWeek plus(Years<Weekcycle> years) { if (years.isEmpty()) { return this; // depends on control dependency: [if], data = [none] } int y = MathUtils.safeAdd(this.year, years.getAmount()); int effectiveWeek = this.week; if ((this.week == 53) && (maximumOfWeek(y) < 53)) { effectiveWeek = 52; // depends on control dependency: [if], data = [none] } return CalendarWeek.of(y, effectiveWeek); } }
public class class_name { public ValidationResult check(Entry entry) { result = new ValidationResult(); if (entry == null) { return result; } //collect all gene features Collection<Feature> gapFeatures = SequenceEntryUtils.getFeatures(Feature.GAP_FEATURE_NAME, entry); gapFeatures.addAll(SequenceEntryUtils.getFeatures(Feature.ASSEMBLY_GAP_FEATURE_NAME, entry)); if (gapFeatures.isEmpty()) { return result; } if (entry.getSequence() == null || entry.getSequence().getSequenceByte() == null) { return result; } for (Feature gapFeature : gapFeatures) { CompoundLocation<Location> compoundLocation = gapFeature.getLocations(); if (compoundLocation == null || compoundLocation.getLocations() == null || compoundLocation.getLocations().size() != 1) { //if there is more than 1 location, just bail, there are other checks to complain if the location is //not a single location return result; } Location location = compoundLocation.getLocations().get(0); Long start = location.getBeginPosition(); Long end = location.getEndPosition(); byte[] sequenceByte=entry.getSequence().getSequenceByte(); if(sequenceByte==null||start==null||end==null) return result; if(start<0||end>sequenceByte.length) { return result; } int beginPosition=start.intValue(); int endPosition=end.intValue(); for (int i=beginPosition;i<endPosition;i++) { if ('n' != (char) sequenceByte[i]) { ValidationMessage<Origin> message = reportError(gapFeature.getOrigin(), MESSAGE_ID); String report = ValidationMessageManager.getString(FAULTY_SEQUENCE_MESSAGE, start, end); message.setReportMessage(report); return result; } } } return result; } }
public class class_name { public ValidationResult check(Entry entry) { result = new ValidationResult(); if (entry == null) { return result; // depends on control dependency: [if], data = [none] } //collect all gene features Collection<Feature> gapFeatures = SequenceEntryUtils.getFeatures(Feature.GAP_FEATURE_NAME, entry); gapFeatures.addAll(SequenceEntryUtils.getFeatures(Feature.ASSEMBLY_GAP_FEATURE_NAME, entry)); if (gapFeatures.isEmpty()) { return result; // depends on control dependency: [if], data = [none] } if (entry.getSequence() == null || entry.getSequence().getSequenceByte() == null) { return result; // depends on control dependency: [if], data = [none] } for (Feature gapFeature : gapFeatures) { CompoundLocation<Location> compoundLocation = gapFeature.getLocations(); if (compoundLocation == null || compoundLocation.getLocations() == null || compoundLocation.getLocations().size() != 1) { //if there is more than 1 location, just bail, there are other checks to complain if the location is //not a single location return result; // depends on control dependency: [if], data = [none] } Location location = compoundLocation.getLocations().get(0); Long start = location.getBeginPosition(); Long end = location.getEndPosition(); byte[] sequenceByte=entry.getSequence().getSequenceByte(); if(sequenceByte==null||start==null||end==null) return result; if(start<0||end>sequenceByte.length) { return result; // depends on control dependency: [if], data = [none] } int beginPosition=start.intValue(); int endPosition=end.intValue(); for (int i=beginPosition;i<endPosition;i++) { if ('n' != (char) sequenceByte[i]) { ValidationMessage<Origin> message = reportError(gapFeature.getOrigin(), MESSAGE_ID); String report = ValidationMessageManager.getString(FAULTY_SEQUENCE_MESSAGE, start, end); message.setReportMessage(report); // depends on control dependency: [if], data = [none] return result; // depends on control dependency: [if], data = [none] } } } return result; } }
public class class_name { private Map<String,Long> getCalculatedMetrics(Map<String,Object> metrics) { long errorsPerMinVal = 0; long callsPerMinVal = 0; Map<String,Long> calculatedMetrics = new HashMap<>(); Iterator it = metrics.keySet().iterator(); while(it.hasNext()){ String key = (String)it.next(); if(key.equals(ERRORS_PER_MINUTE)){ errorsPerMinVal = (long) metrics.get(key); } if(key.equals(CALLS_PER_MINUTE)){ callsPerMinVal = (long) metrics.get(key); } } // Total Errors // Right now the timeframe is hard-coded to 15 min. Change this if that changes. calculatedMetrics.put(TOTAL_ERRORS,errorsPerMinVal * 15); // Total Calls // Right now the timeframe is hard-coded to 15 min. Change this if that changes. calculatedMetrics.put(TOTAL_CALLS,callsPerMinVal * 15); return calculatedMetrics; } }
public class class_name { private Map<String,Long> getCalculatedMetrics(Map<String,Object> metrics) { long errorsPerMinVal = 0; long callsPerMinVal = 0; Map<String,Long> calculatedMetrics = new HashMap<>(); Iterator it = metrics.keySet().iterator(); while(it.hasNext()){ String key = (String)it.next(); if(key.equals(ERRORS_PER_MINUTE)){ errorsPerMinVal = (long) metrics.get(key); // depends on control dependency: [if], data = [none] } if(key.equals(CALLS_PER_MINUTE)){ callsPerMinVal = (long) metrics.get(key); // depends on control dependency: [if], data = [none] } } // Total Errors // Right now the timeframe is hard-coded to 15 min. Change this if that changes. calculatedMetrics.put(TOTAL_ERRORS,errorsPerMinVal * 15); // Total Calls // Right now the timeframe is hard-coded to 15 min. Change this if that changes. calculatedMetrics.put(TOTAL_CALLS,callsPerMinVal * 15); return calculatedMetrics; } }
public class class_name { public void marshall(ResultAttribute resultAttribute, ProtocolMarshaller protocolMarshaller) { if (resultAttribute == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resultAttribute.getTypeName(), TYPENAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ResultAttribute resultAttribute, ProtocolMarshaller protocolMarshaller) { if (resultAttribute == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(resultAttribute.getTypeName(), TYPENAME_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public java.util.List<String> getConnectIps() { if (connectIps == null) { connectIps = new com.amazonaws.internal.SdkInternalList<String>(); } return connectIps; } }
public class class_name { public java.util.List<String> getConnectIps() { if (connectIps == null) { connectIps = new com.amazonaws.internal.SdkInternalList<String>(); // depends on control dependency: [if], data = [none] } return connectIps; } }
public class class_name { private boolean isWebstartAvailable() { try { Class.forName("javax.jnlp.ServiceManager"); // this causes to go and see if the service is available ServiceManager.lookup("javax.jnlp.PersistenceService"); Log.info("Webstart detected using Muffins"); } catch (Exception e) { Log.info("Using Local File System"); return false; } return true; } }
public class class_name { private boolean isWebstartAvailable() { try { Class.forName("javax.jnlp.ServiceManager"); // this causes to go and see if the service is available ServiceManager.lookup("javax.jnlp.PersistenceService"); // depends on control dependency: [try], data = [none] Log.info("Webstart detected using Muffins"); // depends on control dependency: [try], data = [none] } catch (Exception e) { Log.info("Using Local File System"); return false; } // depends on control dependency: [catch], data = [none] return true; } }
public class class_name { @Override Nullness visitMethodInvocation( MethodInvocationNode node, Updates thenUpdates, Updates elseUpdates, Updates bothUpdates) { ClassAndMethod callee = tryGetMethodSymbol(node.getTree(), Types.instance(context)); if (callee != null && !callee.isStatic) { setNonnullIfTrackable(bothUpdates, node.getTarget().getReceiver()); } setUnconditionalArgumentNullness(bothUpdates, node.getArguments(), callee); setConditionalArgumentNullness( thenUpdates, elseUpdates, node.getArguments(), callee, Types.instance(context), Symtab.instance(context)); return returnValueNullness(node, callee); } }
public class class_name { @Override Nullness visitMethodInvocation( MethodInvocationNode node, Updates thenUpdates, Updates elseUpdates, Updates bothUpdates) { ClassAndMethod callee = tryGetMethodSymbol(node.getTree(), Types.instance(context)); if (callee != null && !callee.isStatic) { setNonnullIfTrackable(bothUpdates, node.getTarget().getReceiver()); // depends on control dependency: [if], data = [none] } setUnconditionalArgumentNullness(bothUpdates, node.getArguments(), callee); setConditionalArgumentNullness( thenUpdates, elseUpdates, node.getArguments(), callee, Types.instance(context), Symtab.instance(context)); return returnValueNullness(node, callee); } }
public class class_name { public AiTextureMapMode getTextureMapModeW(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_MAP_MODE_W.m_key); if (null == p || null == p.getData()) { return (AiTextureMapMode) m_defaults.get( PropertyKey.TEX_MAP_MODE_W); } return AiTextureMapMode.fromRawValue(p.getData()); } }
public class class_name { public AiTextureMapMode getTextureMapModeW(AiTextureType type, int index) { checkTexRange(type, index); Property p = getProperty(PropertyKey.TEX_MAP_MODE_W.m_key); if (null == p || null == p.getData()) { return (AiTextureMapMode) m_defaults.get( PropertyKey.TEX_MAP_MODE_W); // depends on control dependency: [if], data = [none] } return AiTextureMapMode.fromRawValue(p.getData()); } }
public class class_name { private void readAsync() throws IOException { stateChangeLock.lock(); final byte[] arr = readAheadBuffer.array(); try { if (endOfStream || readInProgress) { return; } checkReadException(); readAheadBuffer.position(0); readAheadBuffer.flip(); readInProgress = true; } finally { stateChangeLock.unlock(); } executorService.execute(() -> { stateChangeLock.lock(); try { if (isClosed) { readInProgress = false; return; } // Flip this so that the close method will not close the underlying input stream when we // are reading. isReading = true; } finally { stateChangeLock.unlock(); } // Please note that it is safe to release the lock and read into the read ahead buffer // because either of following two conditions will hold - 1. The active buffer has // data available to read so the reader will not read from the read ahead buffer. // 2. This is the first time read is called or the active buffer is exhausted, // in that case the reader waits for this async read to complete. // So there is no race condition in both the situations. int read = 0; int off = 0, len = arr.length; Throwable exception = null; try { // try to fill the read ahead buffer. // if a reader is waiting, possibly return early. do { read = underlyingInputStream.read(arr, off, len); if (read <= 0) break; off += read; len -= read; } while (len > 0 && !isWaiting.get()); } catch (Throwable ex) { exception = ex; if (ex instanceof Error) { // `readException` may not be reported to the user. Rethrow Error to make sure at least // The user can see Error in UncaughtExceptionHandler. throw (Error) ex; } } finally { stateChangeLock.lock(); readAheadBuffer.limit(off); if (read < 0 || (exception instanceof EOFException)) { endOfStream = true; } else if (exception != null) { readAborted = true; readException = exception; } readInProgress = false; signalAsyncReadComplete(); stateChangeLock.unlock(); closeUnderlyingInputStreamIfNecessary(); } }); } }
public class class_name { private void readAsync() throws IOException { stateChangeLock.lock(); final byte[] arr = readAheadBuffer.array(); try { if (endOfStream || readInProgress) { return; // depends on control dependency: [if], data = [none] } checkReadException(); readAheadBuffer.position(0); readAheadBuffer.flip(); readInProgress = true; } finally { stateChangeLock.unlock(); } executorService.execute(() -> { stateChangeLock.lock(); try { if (isClosed) { readInProgress = false; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // Flip this so that the close method will not close the underlying input stream when we // are reading. isReading = true; } finally { stateChangeLock.unlock(); } // Please note that it is safe to release the lock and read into the read ahead buffer // because either of following two conditions will hold - 1. The active buffer has // data available to read so the reader will not read from the read ahead buffer. // 2. This is the first time read is called or the active buffer is exhausted, // in that case the reader waits for this async read to complete. // So there is no race condition in both the situations. int read = 0; int off = 0, len = arr.length; Throwable exception = null; try { // try to fill the read ahead buffer. // if a reader is waiting, possibly return early. do { read = underlyingInputStream.read(arr, off, len); if (read <= 0) break; off += read; len -= read; } while (len > 0 && !isWaiting.get()); } catch (Throwable ex) { exception = ex; if (ex instanceof Error) { // `readException` may not be reported to the user. Rethrow Error to make sure at least // The user can see Error in UncaughtExceptionHandler. throw (Error) ex; } } finally { stateChangeLock.lock(); readAheadBuffer.limit(off); if (read < 0 || (exception instanceof EOFException)) { endOfStream = true; // depends on control dependency: [if], data = [none] } else if (exception != null) { readAborted = true; // depends on control dependency: [if], data = [none] readException = exception; // depends on control dependency: [if], data = [none] } readInProgress = false; signalAsyncReadComplete(); stateChangeLock.unlock(); closeUnderlyingInputStreamIfNecessary(); } }); } }
public class class_name { public void addMessages(CmsMessages messages) throws CmsIllegalArgumentException { Locale locale = messages.getLocale(); if (!getLocale().equals(locale)) { // not the same locale, try to change the locale if this is a simple CmsMessage object if (!(messages instanceof CmsMultiMessages)) { // match locale of multi bundle String bundleName = messages.getBundleName(); messages = new CmsMessages(bundleName, getLocale()); } else { // multi bundles with wrong locales can't be added this way throw new CmsIllegalArgumentException(Messages.get().container( Messages.ERR_MULTIMSG_LOCALE_DOES_NOT_MATCH_2, messages.getLocale(), getLocale())); } } if (!m_messages.contains(messages)) { if ((m_messageCache != null) && (m_messageCache.size() > 0)) { // cache has already been used, must flush because of newly added keys m_messageCache = new Hashtable<String, String>(); } m_messages.add(messages); } } }
public class class_name { public void addMessages(CmsMessages messages) throws CmsIllegalArgumentException { Locale locale = messages.getLocale(); if (!getLocale().equals(locale)) { // not the same locale, try to change the locale if this is a simple CmsMessage object if (!(messages instanceof CmsMultiMessages)) { // match locale of multi bundle String bundleName = messages.getBundleName(); messages = new CmsMessages(bundleName, getLocale()); // depends on control dependency: [if], data = [none] } else { // multi bundles with wrong locales can't be added this way throw new CmsIllegalArgumentException(Messages.get().container( Messages.ERR_MULTIMSG_LOCALE_DOES_NOT_MATCH_2, messages.getLocale(), getLocale())); } } if (!m_messages.contains(messages)) { if ((m_messageCache != null) && (m_messageCache.size() > 0)) { // cache has already been used, must flush because of newly added keys m_messageCache = new Hashtable<String, String>(); // depends on control dependency: [if], data = [none] } m_messages.add(messages); } } }
public class class_name { public static String getDisplayName( TZID tzid, NameStyle style, Locale locale ) { String canonical = tzid.canonical(); int index = canonical.indexOf('~'); ZoneModelProvider provider = DEFAULT_PROVIDER; String zoneID = canonical; if (index >= 0) { String pname = canonical.substring(0, index); if (!pname.equals(NAME_DEFAULT)) { provider = PROVIDERS.get(pname); if (provider == null) { return canonical; } } zoneID = canonical.substring(index + 1); } ZoneNameProvider np = provider.getSpecificZoneNameRepository(); if (np == null) { np = NAME_PROVIDER; } String name = np.getDisplayName(zoneID, style, locale); if (name.isEmpty()) { if (np != NAME_PROVIDER) { name = NAME_PROVIDER.getDisplayName(zoneID, style, locale); } name = (name.isEmpty() ? canonical : name); } return name; } }
public class class_name { public static String getDisplayName( TZID tzid, NameStyle style, Locale locale ) { String canonical = tzid.canonical(); int index = canonical.indexOf('~'); ZoneModelProvider provider = DEFAULT_PROVIDER; String zoneID = canonical; if (index >= 0) { String pname = canonical.substring(0, index); if (!pname.equals(NAME_DEFAULT)) { provider = PROVIDERS.get(pname); // depends on control dependency: [if], data = [none] if (provider == null) { return canonical; // depends on control dependency: [if], data = [none] } } zoneID = canonical.substring(index + 1); // depends on control dependency: [if], data = [(index] } ZoneNameProvider np = provider.getSpecificZoneNameRepository(); if (np == null) { np = NAME_PROVIDER; // depends on control dependency: [if], data = [none] } String name = np.getDisplayName(zoneID, style, locale); if (name.isEmpty()) { if (np != NAME_PROVIDER) { name = NAME_PROVIDER.getDisplayName(zoneID, style, locale); // depends on control dependency: [if], data = [none] } name = (name.isEmpty() ? canonical : name); // depends on control dependency: [if], data = [none] } return name; } }
public class class_name { private void recycleByLayoutStateExpose(RecyclerView.Recycler recycler, LayoutState layoutState) { if (!layoutState.mRecycle) { return; } if (layoutState.mLayoutDirection == LayoutState.LAYOUT_START) { recycleViewsFromEndExpose(recycler, layoutState.mScrollingOffset); } else { recycleViewsFromStartExpose(recycler, layoutState.mScrollingOffset); } } }
public class class_name { private void recycleByLayoutStateExpose(RecyclerView.Recycler recycler, LayoutState layoutState) { if (!layoutState.mRecycle) { return; // depends on control dependency: [if], data = [none] } if (layoutState.mLayoutDirection == LayoutState.LAYOUT_START) { recycleViewsFromEndExpose(recycler, layoutState.mScrollingOffset); // depends on control dependency: [if], data = [none] } else { recycleViewsFromStartExpose(recycler, layoutState.mScrollingOffset); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public Map<String, String> values() { Map<String, String> values = new LinkedHashMap<>(); for (Row each : delegates) { for (Entry<String, String> entry : each.values().entrySet()) { String name = entry.getKey(); if (!values.containsKey(name)) { values.put(name, entry.getValue()); } } } return values; } }
public class class_name { @Override public Map<String, String> values() { Map<String, String> values = new LinkedHashMap<>(); for (Row each : delegates) { for (Entry<String, String> entry : each.values().entrySet()) { String name = entry.getKey(); if (!values.containsKey(name)) { values.put(name, entry.getValue()); // depends on control dependency: [if], data = [none] } } } return values; } }
public class class_name { private MasterEntry updateLpMaps(LWMConfig lp) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "updateLpMaps", lp); } SIBLocalizationPoint lpConfig = ((SIBLocalizationPoint) lp); // Create a new LocalizationDefinition and update the lpMap with it String lpName = lpConfig.getIdentifier(); if (lpMap.containsKey(lpName)) { lpMap.remove(lpName); } LocalizationDefinition ld = ((JsAdminFactoryImpl) jsaf).createLocalizationDefinition(lpConfig); LocalizationEntry lEntry = new LocalizationEntry(ld); lpMap.put(lpName, lEntry); String destName = lpName.substring(0, lpName.indexOf("@")); MasterEntry masterEntry = masterMap.get(destName); if (masterEntry == null) { masterEntry = new MasterEntry(); } masterEntry.setDestinationLocalization(ld); masterMap.put(destName, masterEntry); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "updateLpMaps", lpMap); } return masterEntry; } }
public class class_name { private MasterEntry updateLpMaps(LWMConfig lp) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.entry(tc, "updateLpMaps", lp); // depends on control dependency: [if], data = [none] } SIBLocalizationPoint lpConfig = ((SIBLocalizationPoint) lp); // Create a new LocalizationDefinition and update the lpMap with it String lpName = lpConfig.getIdentifier(); if (lpMap.containsKey(lpName)) { lpMap.remove(lpName); // depends on control dependency: [if], data = [none] } LocalizationDefinition ld = ((JsAdminFactoryImpl) jsaf).createLocalizationDefinition(lpConfig); LocalizationEntry lEntry = new LocalizationEntry(ld); lpMap.put(lpName, lEntry); String destName = lpName.substring(0, lpName.indexOf("@")); MasterEntry masterEntry = masterMap.get(destName); if (masterEntry == null) { masterEntry = new MasterEntry(); // depends on control dependency: [if], data = [none] } masterEntry.setDestinationLocalization(ld); masterMap.put(destName, masterEntry); if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) { SibTr.exit(tc, "updateLpMaps", lpMap); // depends on control dependency: [if], data = [none] } return masterEntry; } }
public class class_name { public X500Principal asX500Principal() { if (x500Principal == null) { try { Object[] args = new Object[] {this}; x500Principal = (X500Principal)principalConstructor.newInstance(args); } catch (Exception e) { throw new RuntimeException("Unexpected exception", e); } } return x500Principal; } }
public class class_name { public X500Principal asX500Principal() { if (x500Principal == null) { try { Object[] args = new Object[] {this}; x500Principal = (X500Principal)principalConstructor.newInstance(args); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new RuntimeException("Unexpected exception", e); } // depends on control dependency: [catch], data = [none] } return x500Principal; } }
public class class_name { public synchronized boolean ifAcceptingWorkIncrementActivityCount() { if (tc.isDebugEnabled()) Tr.debug(tc, "isAcceptingWork", new Boolean(!_disabled)); if(!_disabled) { _activityCount++; if (tc.isDebugEnabled()) Tr.debug(tc, "_activityCount", new Object[]{this, new Integer(_activityCount)}); return true; } return false; } }
public class class_name { public synchronized boolean ifAcceptingWorkIncrementActivityCount() { if (tc.isDebugEnabled()) Tr.debug(tc, "isAcceptingWork", new Boolean(!_disabled)); if(!_disabled) { _activityCount++; // depends on control dependency: [if], data = [none] if (tc.isDebugEnabled()) Tr.debug(tc, "_activityCount", new Object[]{this, new Integer(_activityCount)}); return true; // depends on control dependency: [if], data = [none] } return false; } }
public class class_name { public void addMean( double[] point , double responsibility ) { for (int i = 0; i < mean.numRows; i++) { mean.data[i] += responsibility*point[i]; } weight += responsibility; } }
public class class_name { public void addMean( double[] point , double responsibility ) { for (int i = 0; i < mean.numRows; i++) { mean.data[i] += responsibility*point[i]; // depends on control dependency: [for], data = [i] } weight += responsibility; } }
public class class_name { public SpecTopic getClosestTopicByDBId(final Integer DBId, final SpecNode callerNode, final boolean checkParentNode) { final SpecTopic retValue = super.getClosestTopicByDBId(DBId, callerNode, checkParentNode); if (retValue != null) { return retValue; } else { // Look up the metadata topics final ContentSpec contentSpec = getContentSpec(); for (final Node contentSpecNode : contentSpec.getNodes()) { if (contentSpecNode instanceof KeyValueNode && ((KeyValueNode) contentSpecNode).getValue() instanceof SpecTopic) { final SpecTopic childTopic = (SpecTopic) ((KeyValueNode) contentSpecNode).getValue(); if (childTopic.getDBId().equals(DBId)) { return childTopic; } } } return null; } } }
public class class_name { public SpecTopic getClosestTopicByDBId(final Integer DBId, final SpecNode callerNode, final boolean checkParentNode) { final SpecTopic retValue = super.getClosestTopicByDBId(DBId, callerNode, checkParentNode); if (retValue != null) { return retValue; // depends on control dependency: [if], data = [none] } else { // Look up the metadata topics final ContentSpec contentSpec = getContentSpec(); for (final Node contentSpecNode : contentSpec.getNodes()) { if (contentSpecNode instanceof KeyValueNode && ((KeyValueNode) contentSpecNode).getValue() instanceof SpecTopic) { final SpecTopic childTopic = (SpecTopic) ((KeyValueNode) contentSpecNode).getValue(); if (childTopic.getDBId().equals(DBId)) { return childTopic; // depends on control dependency: [if], data = [none] } } } return null; // depends on control dependency: [if], data = [none] } } }
public class class_name { public Long getTimestamp() { Object timestamp = getHeader(KafkaMessageHeaders.TIMESTAMP); if (timestamp != null) { return Long.valueOf(timestamp.toString()); } return null; } }
public class class_name { public Long getTimestamp() { Object timestamp = getHeader(KafkaMessageHeaders.TIMESTAMP); if (timestamp != null) { return Long.valueOf(timestamp.toString()); // depends on control dependency: [if], data = [(timestamp] } return null; } }
public class class_name { public void removeExtension() { if (geoPackage.isTable(MetadataReference.TABLE_NAME)) { geoPackage.dropTable(MetadataReference.TABLE_NAME); } if (geoPackage.isTable(Metadata.TABLE_NAME)) { geoPackage.dropTable(Metadata.TABLE_NAME); } try { if (extensionsDao.isTableExists()) { extensionsDao.deleteByExtension(EXTENSION_NAME); } } catch (SQLException e) { throw new GeoPackageException( "Failed to delete Metadata extension. GeoPackage: " + geoPackage.getName(), e); } } }
public class class_name { public void removeExtension() { if (geoPackage.isTable(MetadataReference.TABLE_NAME)) { geoPackage.dropTable(MetadataReference.TABLE_NAME); // depends on control dependency: [if], data = [none] } if (geoPackage.isTable(Metadata.TABLE_NAME)) { geoPackage.dropTable(Metadata.TABLE_NAME); // depends on control dependency: [if], data = [none] } try { if (extensionsDao.isTableExists()) { extensionsDao.deleteByExtension(EXTENSION_NAME); // depends on control dependency: [if], data = [none] } } catch (SQLException e) { throw new GeoPackageException( "Failed to delete Metadata extension. GeoPackage: " + geoPackage.getName(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public ProjectCalendarHours getHours(Day day) { ProjectCalendarHours result = getCalendarHours(day); if (result == null) { // // If this is a base calendar and we have no hours, then we // have a problem - so we add the default hours and try again // if (m_parent == null) { // Only add default hours for the day that is 'missing' to avoid overwriting real calendar hours addDefaultCalendarHours(day); result = getCalendarHours(day); } else { result = m_parent.getHours(day); } } return result; } }
public class class_name { public ProjectCalendarHours getHours(Day day) { ProjectCalendarHours result = getCalendarHours(day); if (result == null) { // // If this is a base calendar and we have no hours, then we // have a problem - so we add the default hours and try again // if (m_parent == null) { // Only add default hours for the day that is 'missing' to avoid overwriting real calendar hours addDefaultCalendarHours(day); // depends on control dependency: [if], data = [none] result = getCalendarHours(day); // depends on control dependency: [if], data = [none] } else { result = m_parent.getHours(day); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { public void download() { DataKeeperService service = DataKeeperUtils.getService(); setDocumentId(Integer.parseInt(FacesUtils.getRequest().getParameter("documentId"))); DocumentInfo document = service.findDocumentByDocumentId(getDocumentId()); final HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance() .getExternalContext().getResponse(); ServletOutputStream out = null; try { out = response.getOutputStream(); out.write(document.getData(), 0, 4096); } catch (IOException e) { } finally { if (out != null) { try { out.flush(); out.close(); } catch (IOException e) { } } } FacesContext.getCurrentInstance().responseComplete(); } }
public class class_name { public void download() { DataKeeperService service = DataKeeperUtils.getService(); setDocumentId(Integer.parseInt(FacesUtils.getRequest().getParameter("documentId"))); DocumentInfo document = service.findDocumentByDocumentId(getDocumentId()); final HttpServletResponse response = (HttpServletResponse) FacesContext.getCurrentInstance() .getExternalContext().getResponse(); ServletOutputStream out = null; try { out = response.getOutputStream(); // depends on control dependency: [try], data = [none] out.write(document.getData(), 0, 4096); // depends on control dependency: [try], data = [none] } catch (IOException e) { } // depends on control dependency: [catch], data = [none] finally { if (out != null) { try { out.flush(); // depends on control dependency: [try], data = [none] out.close(); // depends on control dependency: [try], data = [none] } catch (IOException e) { } // depends on control dependency: [catch], data = [none] } } FacesContext.getCurrentInstance().responseComplete(); } }
public class class_name { private void handleReadResult(ReadSegment request, ReadResult result) { String segment = request.getSegment(); ArrayList<ReadResultEntryContents> cachedEntries = new ArrayList<>(); ReadResultEntry nonCachedEntry = collectCachedEntries(request.getOffset(), result, cachedEntries); final String operation = "readSegment"; boolean truncated = nonCachedEntry != null && nonCachedEntry.getType() == Truncated; boolean endOfSegment = nonCachedEntry != null && nonCachedEntry.getType() == EndOfStreamSegment; boolean atTail = nonCachedEntry != null && nonCachedEntry.getType() == Future; if (!cachedEntries.isEmpty() || endOfSegment) { // We managed to collect some data. Send it. ByteBuffer data = copyData(cachedEntries); SegmentRead reply = new SegmentRead(segment, request.getOffset(), atTail, endOfSegment, data, request.getRequestId()); connection.send(reply); this.statsRecorder.read(segment, reply.getData().array().length); } else if (truncated) { // We didn't collect any data, instead we determined that the current read offset was truncated. // Determine the current Start Offset and send that back. segmentStore.getStreamSegmentInfo(segment, TIMEOUT) .thenAccept(info -> connection.send(new SegmentIsTruncated(request.getRequestId(), segment, info.getStartOffset(), EMPTY_STACK_TRACE, nonCachedEntry.getStreamSegmentOffset()))) .exceptionally(e -> handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e))); } else { Preconditions.checkState(nonCachedEntry != null, "No ReadResultEntries returned from read!?"); nonCachedEntry.requestContent(TIMEOUT); nonCachedEntry.getContent() .thenAccept(contents -> { ByteBuffer data = copyData(Collections.singletonList(contents)); SegmentRead reply = new SegmentRead(segment, nonCachedEntry.getStreamSegmentOffset(), false, endOfSegment, data, request.getRequestId()); connection.send(reply); this.statsRecorder.read(segment, reply.getData().array().length); }) .exceptionally(e -> { if (Exceptions.unwrap(e) instanceof StreamSegmentTruncatedException) { // The Segment may have been truncated in Storage after we got this entry but before we managed // to make a read. In that case, send the appropriate error back. final String clientReplyStackTrace = replyWithStackTraceOnError ? e.getMessage() : EMPTY_STACK_TRACE; connection.send(new SegmentIsTruncated(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), clientReplyStackTrace, nonCachedEntry.getStreamSegmentOffset())); } else { handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e)); } return null; }) .exceptionally(e -> handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e))); } } }
public class class_name { private void handleReadResult(ReadSegment request, ReadResult result) { String segment = request.getSegment(); ArrayList<ReadResultEntryContents> cachedEntries = new ArrayList<>(); ReadResultEntry nonCachedEntry = collectCachedEntries(request.getOffset(), result, cachedEntries); final String operation = "readSegment"; boolean truncated = nonCachedEntry != null && nonCachedEntry.getType() == Truncated; boolean endOfSegment = nonCachedEntry != null && nonCachedEntry.getType() == EndOfStreamSegment; boolean atTail = nonCachedEntry != null && nonCachedEntry.getType() == Future; if (!cachedEntries.isEmpty() || endOfSegment) { // We managed to collect some data. Send it. ByteBuffer data = copyData(cachedEntries); SegmentRead reply = new SegmentRead(segment, request.getOffset(), atTail, endOfSegment, data, request.getRequestId()); connection.send(reply); // depends on control dependency: [if], data = [none] this.statsRecorder.read(segment, reply.getData().array().length); // depends on control dependency: [if], data = [none] } else if (truncated) { // We didn't collect any data, instead we determined that the current read offset was truncated. // Determine the current Start Offset and send that back. segmentStore.getStreamSegmentInfo(segment, TIMEOUT) .thenAccept(info -> connection.send(new SegmentIsTruncated(request.getRequestId(), segment, info.getStartOffset(), EMPTY_STACK_TRACE, nonCachedEntry.getStreamSegmentOffset()))) .exceptionally(e -> handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e))); // depends on control dependency: [if], data = [none] } else { Preconditions.checkState(nonCachedEntry != null, "No ReadResultEntries returned from read!?"); // depends on control dependency: [if], data = [none] nonCachedEntry.requestContent(TIMEOUT); // depends on control dependency: [if], data = [none] nonCachedEntry.getContent() .thenAccept(contents -> { ByteBuffer data = copyData(Collections.singletonList(contents)); // depends on control dependency: [if], data = [none] SegmentRead reply = new SegmentRead(segment, nonCachedEntry.getStreamSegmentOffset(), false, endOfSegment, data, request.getRequestId()); connection.send(reply); // depends on control dependency: [if], data = [none] this.statsRecorder.read(segment, reply.getData().array().length); // depends on control dependency: [if], data = [none] }) .exceptionally(e -> { if (Exceptions.unwrap(e) instanceof StreamSegmentTruncatedException) { // The Segment may have been truncated in Storage after we got this entry but before we managed // to make a read. In that case, send the appropriate error back. final String clientReplyStackTrace = replyWithStackTraceOnError ? e.getMessage() : EMPTY_STACK_TRACE; connection.send(new SegmentIsTruncated(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), clientReplyStackTrace, nonCachedEntry.getStreamSegmentOffset())); // depends on control dependency: [if], data = [none] } else { handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e)); // depends on control dependency: [if], data = [none] } return null; }) .exceptionally(e -> handleException(request.getRequestId(), segment, nonCachedEntry.getStreamSegmentOffset(), operation, wrapCancellationException(e))); } } }
public class class_name { private void buttonPasteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_buttonPasteActionPerformed try { this.editorPane.replaceSelection((String) Toolkit.getDefaultToolkit().getSystemClipboard().getData(DataFlavor.stringFlavor)); } catch (UnsupportedFlavorException ex) { // no text data in clipboard } catch (IOException ex) { LOGGER.error("Error during paste from clipboard", ex); //NOI18N } } }
public class class_name { private void buttonPasteActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_buttonPasteActionPerformed try { this.editorPane.replaceSelection((String) Toolkit.getDefaultToolkit().getSystemClipboard().getData(DataFlavor.stringFlavor)); // depends on control dependency: [try], data = [none] } catch (UnsupportedFlavorException ex) { // no text data in clipboard } catch (IOException ex) { // depends on control dependency: [catch], data = [none] LOGGER.error("Error during paste from clipboard", ex); //NOI18N } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void checkAndReleaseAllocatedResource(final ExecutionGraph executionGraph, final AllocatedResource allocatedResource) { if (allocatedResource == null) { LOG.error("Resource to lock is null!"); return; } if (allocatedResource.getInstance() instanceof DummyInstance) { LOG.debug("Available instance is of type DummyInstance!"); return; } boolean resourceCanBeReleased = true; final Iterator<ExecutionVertex> it = allocatedResource.assignedVertices(); while (it.hasNext()) { final ExecutionVertex vertex = it.next(); final ExecutionState state = vertex.getExecutionState(); if (state != ExecutionState.CREATED && state != ExecutionState.FINISHED && state != ExecutionState.FAILED && state != ExecutionState.CANCELED) { resourceCanBeReleased = false; break; } } if (resourceCanBeReleased) { LOG.info("Releasing instance " + allocatedResource.getInstance()); try { getInstanceManager().releaseAllocatedResource(executionGraph.getJobID(), executionGraph .getJobConfiguration(), allocatedResource); } catch (InstanceException e) { LOG.error(StringUtils.stringifyException(e)); } } } }
public class class_name { public void checkAndReleaseAllocatedResource(final ExecutionGraph executionGraph, final AllocatedResource allocatedResource) { if (allocatedResource == null) { LOG.error("Resource to lock is null!"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (allocatedResource.getInstance() instanceof DummyInstance) { LOG.debug("Available instance is of type DummyInstance!"); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } boolean resourceCanBeReleased = true; final Iterator<ExecutionVertex> it = allocatedResource.assignedVertices(); while (it.hasNext()) { final ExecutionVertex vertex = it.next(); final ExecutionState state = vertex.getExecutionState(); if (state != ExecutionState.CREATED && state != ExecutionState.FINISHED && state != ExecutionState.FAILED && state != ExecutionState.CANCELED) { resourceCanBeReleased = false; // depends on control dependency: [if], data = [none] break; } } if (resourceCanBeReleased) { LOG.info("Releasing instance " + allocatedResource.getInstance()); // depends on control dependency: [if], data = [none] try { getInstanceManager().releaseAllocatedResource(executionGraph.getJobID(), executionGraph .getJobConfiguration(), allocatedResource); // depends on control dependency: [try], data = [none] } catch (InstanceException e) { LOG.error(StringUtils.stringifyException(e)); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { private static long remainder(long dividend, long divisor) { if (divisor < 0) { // i.e., divisor >= 2^63: if (compareUnsigned(dividend, divisor) < 0) { return dividend; // dividend < divisor } else { return dividend - divisor; // dividend >= divisor } } // Optimization - use signed modulus if dividend < 2^63 if (dividend >= 0) { return dividend % divisor; } /* * Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is guaranteed to be * either exact or one less than the correct value. This follows from fact that floor(floor(x)/i) == floor(x/i) * for any real x and integer i != 0. The proof is not quite trivial. */ long quotient = ((dividend >>> 1) / divisor) << 1; long rem = dividend - quotient * divisor; return rem - (compareUnsigned(rem, divisor) >= 0 ? divisor : 0); } }
public class class_name { private static long remainder(long dividend, long divisor) { if (divisor < 0) { // i.e., divisor >= 2^63: if (compareUnsigned(dividend, divisor) < 0) { return dividend; // dividend < divisor // depends on control dependency: [if], data = [none] } else { return dividend - divisor; // dividend >= divisor // depends on control dependency: [if], data = [none] } } // Optimization - use signed modulus if dividend < 2^63 if (dividend >= 0) { return dividend % divisor; // depends on control dependency: [if], data = [none] } /* * Otherwise, approximate the quotient, check, and correct if necessary. Our approximation is guaranteed to be * either exact or one less than the correct value. This follows from fact that floor(floor(x)/i) == floor(x/i) * for any real x and integer i != 0. The proof is not quite trivial. */ long quotient = ((dividend >>> 1) / divisor) << 1; long rem = dividend - quotient * divisor; return rem - (compareUnsigned(rem, divisor) >= 0 ? divisor : 0); } }
public class class_name { public PutInstancePublicPortsRequest withPortInfos(PortInfo... portInfos) { if (this.portInfos == null) { setPortInfos(new java.util.ArrayList<PortInfo>(portInfos.length)); } for (PortInfo ele : portInfos) { this.portInfos.add(ele); } return this; } }
public class class_name { public PutInstancePublicPortsRequest withPortInfos(PortInfo... portInfos) { if (this.portInfos == null) { setPortInfos(new java.util.ArrayList<PortInfo>(portInfos.length)); // depends on control dependency: [if], data = [none] } for (PortInfo ele : portInfos) { this.portInfos.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public static String capitalize(String pString, int pIndex) { if (pIndex < 0) { throw new IndexOutOfBoundsException("Negative index not allowed: " + pIndex); } if (pString == null || pString.length() <= pIndex) { return pString; } // This is the fastest method, according to my tests // Skip array duplication if allready capitalized if (Character.isUpperCase(pString.charAt(pIndex))) { return pString; } // Convert to char array, capitalize and create new String char[] charArray = pString.toCharArray(); charArray[pIndex] = Character.toUpperCase(charArray[pIndex]); return new String(charArray); /** StringBuilder buf = new StringBuilder(pString); buf.setCharAt(pIndex, Character.toUpperCase(buf.charAt(pIndex))); return buf.toString(); //*/ /** return pString.substring(0, pIndex) + Character.toUpperCase(pString.charAt(pIndex)) + pString.substring(pIndex + 1); //*/ } }
public class class_name { public static String capitalize(String pString, int pIndex) { if (pIndex < 0) { throw new IndexOutOfBoundsException("Negative index not allowed: " + pIndex); } if (pString == null || pString.length() <= pIndex) { return pString; // depends on control dependency: [if], data = [none] } // This is the fastest method, according to my tests // Skip array duplication if allready capitalized if (Character.isUpperCase(pString.charAt(pIndex))) { return pString; // depends on control dependency: [if], data = [none] } // Convert to char array, capitalize and create new String char[] charArray = pString.toCharArray(); charArray[pIndex] = Character.toUpperCase(charArray[pIndex]); return new String(charArray); /** StringBuilder buf = new StringBuilder(pString); buf.setCharAt(pIndex, Character.toUpperCase(buf.charAt(pIndex))); return buf.toString(); //*/ /** return pString.substring(0, pIndex) + Character.toUpperCase(pString.charAt(pIndex)) + pString.substring(pIndex + 1); //*/ } }
public class class_name { public QueryScopeInfo getQueryServiceMetricInfo(CharacterFilter filter) { if (queryServiceScopeInfo == null) { queryServiceScopeInfo = createQueryServiceMetricInfo(filter); } return queryServiceScopeInfo; } }
public class class_name { public QueryScopeInfo getQueryServiceMetricInfo(CharacterFilter filter) { if (queryServiceScopeInfo == null) { queryServiceScopeInfo = createQueryServiceMetricInfo(filter); // depends on control dependency: [if], data = [none] } return queryServiceScopeInfo; } }
public class class_name { @Override @FFDCIgnore(Exception.class) //manually logged protected int overQualLastAccessTimeUpdate(BackedSession sess, long nowTime) { final boolean trace = TraceComponent.isAnyTracingEnabled(); String id = sess.getId(); int updateCount; try { if (trace && tc.isDebugEnabled()) tcInvoke(tcSessionMetaCache, "get", id); synchronized (sess) { ArrayList<?> oldValue = sessionMetaCache.get(id); if (trace && tc.isDebugEnabled()) tcReturn(tcSessionMetaCache, "get", oldValue); SessionInfo sessionInfo = oldValue == null ? null : new SessionInfo(oldValue).clone(); long curAccessTime = sess.getCurrentAccessTime(); if (sessionInfo == null || sessionInfo.getLastAccess() != curAccessTime) { if (trace && tc.isDebugEnabled()) Tr.debug(this, tc, "session current access time: " + curAccessTime); updateCount = 0; } else if (sessionInfo.getLastAccess() >= nowTime) { // avoid setting last access when the cache already has a later time updateCount = 1; // be consistent with Statement.executeUpdate which returns 1 when the row matches but no changes are made } else { sessionInfo.setLastAccess(nowTime); ArrayList<?> newValue = sessionInfo.getArrayList(); if (trace && tc.isDebugEnabled()) tcInvoke(tcSessionMetaCache, "replace", id, oldValue, newValue); boolean replaced = sessionMetaCache.replace(id, oldValue, newValue); if (trace && tc.isDebugEnabled()) tcReturn(tcSessionMetaCache, "replace", replaced); if (replaced) { sess.updateLastAccessTime(nowTime); updateCount = 1; } else { updateCount = 0; } } } } catch(Exception ex) { FFDCFilter.processException(ex, "com.ibm.ws.session.store.cache.CacheHashMap.overQualLastAccessTimeUpdate", "859", this, new Object[] { sess }); Tr.error(tc, "ERROR_CACHE_ACCESS", ex); throw new RuntimeException(Tr.formatMessage(tc, "INTERNAL_SERVER_ERROR")); } return updateCount; } }
public class class_name { @Override @FFDCIgnore(Exception.class) //manually logged protected int overQualLastAccessTimeUpdate(BackedSession sess, long nowTime) { final boolean trace = TraceComponent.isAnyTracingEnabled(); String id = sess.getId(); int updateCount; try { if (trace && tc.isDebugEnabled()) tcInvoke(tcSessionMetaCache, "get", id); synchronized (sess) { // depends on control dependency: [try], data = [none] ArrayList<?> oldValue = sessionMetaCache.get(id); if (trace && tc.isDebugEnabled()) tcReturn(tcSessionMetaCache, "get", oldValue); SessionInfo sessionInfo = oldValue == null ? null : new SessionInfo(oldValue).clone(); long curAccessTime = sess.getCurrentAccessTime(); if (sessionInfo == null || sessionInfo.getLastAccess() != curAccessTime) { if (trace && tc.isDebugEnabled()) Tr.debug(this, tc, "session current access time: " + curAccessTime); updateCount = 0; // depends on control dependency: [if], data = [none] } else if (sessionInfo.getLastAccess() >= nowTime) { // avoid setting last access when the cache already has a later time updateCount = 1; // be consistent with Statement.executeUpdate which returns 1 when the row matches but no changes are made // depends on control dependency: [if], data = [none] } else { sessionInfo.setLastAccess(nowTime); // depends on control dependency: [if], data = [none] ArrayList<?> newValue = sessionInfo.getArrayList(); if (trace && tc.isDebugEnabled()) tcInvoke(tcSessionMetaCache, "replace", id, oldValue, newValue); boolean replaced = sessionMetaCache.replace(id, oldValue, newValue); if (trace && tc.isDebugEnabled()) tcReturn(tcSessionMetaCache, "replace", replaced); if (replaced) { sess.updateLastAccessTime(nowTime); // depends on control dependency: [if], data = [none] updateCount = 1; // depends on control dependency: [if], data = [none] } else { updateCount = 0; // depends on control dependency: [if], data = [none] } } } } catch(Exception ex) { FFDCFilter.processException(ex, "com.ibm.ws.session.store.cache.CacheHashMap.overQualLastAccessTimeUpdate", "859", this, new Object[] { sess }); Tr.error(tc, "ERROR_CACHE_ACCESS", ex); throw new RuntimeException(Tr.formatMessage(tc, "INTERNAL_SERVER_ERROR")); } // depends on control dependency: [catch], data = [none] return updateCount; } }
public class class_name { public static void main(final String[] args) { // describe all instances in aws-mock List<Instance> allInstances = describeAllInstances(); for (Instance i : allInstances) { System.out.println(i.getInstanceId() + " - " + i.getState().getName()); } // describe specifiled instances in aws-mock List<Instance> someInstances = describeInstances(Arrays.asList("i-12345678", "i=abcdef00")); for (Instance i : someInstances) { System.out.println(i.getInstanceId() + " - " + i.getState().getName()); } } }
public class class_name { public static void main(final String[] args) { // describe all instances in aws-mock List<Instance> allInstances = describeAllInstances(); for (Instance i : allInstances) { System.out.println(i.getInstanceId() + " - " + i.getState().getName()); // depends on control dependency: [for], data = [i] } // describe specifiled instances in aws-mock List<Instance> someInstances = describeInstances(Arrays.asList("i-12345678", "i=abcdef00")); for (Instance i : someInstances) { System.out.println(i.getInstanceId() + " - " + i.getState().getName()); // depends on control dependency: [for], data = [i] } } }