code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
private void makePS() {
String nproj = gds.getParam(GridDefRecord.NPPROJ);
double latOrigin = (nproj == null || nproj.equalsIgnoreCase("true")) ? 90.0 : -90.0;
// Why the scale factor?. according to GRIB docs:
// "Grid lengths are in units of meters, at the 60 degree latitude circle nearest to the pole"
// since the scale factor at 60 degrees = k = 2*k0/(1+sin(60)) [Snyder,Working Manual p157]
// then to make scale = 1 at 60 degrees, k0 = (1+sin(60))/2 = .933
double scale;
double lad = gds.getDouble(GridDefRecord.LAD);
if (Double.isNaN(lad)) {
scale = .933;
} else {
scale = (1.0+Math.sin( Math.toRadians( Math.abs(lad)) ))/2;
}
proj = new Stereographic(latOrigin, gds.getDouble(GridDefRecord.LOV), scale);
// we have to project in order to find the origin
ProjectionPointImpl start = (ProjectionPointImpl) proj.latLonToProj(
new LatLonPointImpl( gds.getDouble(GridDefRecord.LA1), gds.getDouble(GridDefRecord.LO1)));
startx = start.getX();
starty = start.getY();
if (Double.isNaN(getDxInKm()))
setDxDy(startx, starty, proj);
if (GridServiceProvider.debugProj) {
System.out.printf("starting proj coord %s lat/lon %s%n", start, proj.projToLatLon(start));
System.out.println(" should be LA1=" + gds.getDouble(GridDefRecord.LA1) + " l)1=" + gds.getDouble(GridDefRecord.LO1));
}
attributes.add(new Attribute(GridCF.GRID_MAPPING_NAME, "polar_stereographic"));
//attributes.add(new Attribute("longitude_of_projection_origin",
attributes.add(new Attribute(GridCF.LONGITUDE_OF_PROJECTION_ORIGIN, gds.getDouble(GridDefRecord.LOV)));
//attributes.add(new Attribute("straight_vertical_longitude_from_pole",
attributes.add(new Attribute( GridCF.STRAIGHT_VERTICAL_LONGITUDE_FROM_POLE, gds.getDouble(GridDefRecord.LOV)));
//attributes.add(new Attribute("scale_factor_at_projection_origin",
attributes.add(new Attribute(GridCF.SCALE_FACTOR_AT_PROJECTION_ORIGIN, scale));
attributes.add(new Attribute(GridCF.LATITUDE_OF_PROJECTION_ORIGIN, latOrigin));
} } | public class class_name {
private void makePS() {
String nproj = gds.getParam(GridDefRecord.NPPROJ);
double latOrigin = (nproj == null || nproj.equalsIgnoreCase("true")) ? 90.0 : -90.0;
// Why the scale factor?. according to GRIB docs:
// "Grid lengths are in units of meters, at the 60 degree latitude circle nearest to the pole"
// since the scale factor at 60 degrees = k = 2*k0/(1+sin(60)) [Snyder,Working Manual p157]
// then to make scale = 1 at 60 degrees, k0 = (1+sin(60))/2 = .933
double scale;
double lad = gds.getDouble(GridDefRecord.LAD);
if (Double.isNaN(lad)) {
scale = .933; // depends on control dependency: [if], data = [none]
} else {
scale = (1.0+Math.sin( Math.toRadians( Math.abs(lad)) ))/2; // depends on control dependency: [if], data = [none]
}
proj = new Stereographic(latOrigin, gds.getDouble(GridDefRecord.LOV), scale);
// we have to project in order to find the origin
ProjectionPointImpl start = (ProjectionPointImpl) proj.latLonToProj(
new LatLonPointImpl( gds.getDouble(GridDefRecord.LA1), gds.getDouble(GridDefRecord.LO1)));
startx = start.getX();
starty = start.getY();
if (Double.isNaN(getDxInKm()))
setDxDy(startx, starty, proj);
if (GridServiceProvider.debugProj) {
System.out.printf("starting proj coord %s lat/lon %s%n", start, proj.projToLatLon(start)); // depends on control dependency: [if], data = [none]
System.out.println(" should be LA1=" + gds.getDouble(GridDefRecord.LA1) + " l)1=" + gds.getDouble(GridDefRecord.LO1)); // depends on control dependency: [if], data = [none]
}
attributes.add(new Attribute(GridCF.GRID_MAPPING_NAME, "polar_stereographic"));
//attributes.add(new Attribute("longitude_of_projection_origin",
attributes.add(new Attribute(GridCF.LONGITUDE_OF_PROJECTION_ORIGIN, gds.getDouble(GridDefRecord.LOV)));
//attributes.add(new Attribute("straight_vertical_longitude_from_pole",
attributes.add(new Attribute( GridCF.STRAIGHT_VERTICAL_LONGITUDE_FROM_POLE, gds.getDouble(GridDefRecord.LOV)));
//attributes.add(new Attribute("scale_factor_at_projection_origin",
attributes.add(new Attribute(GridCF.SCALE_FACTOR_AT_PROJECTION_ORIGIN, scale));
attributes.add(new Attribute(GridCF.LATITUDE_OF_PROJECTION_ORIGIN, latOrigin));
} } |
public class class_name {
@Override
public void doRender(final WComponent component, final WebXmlRenderContext renderContext) {
WValidationErrors errors = (WValidationErrors) component;
XmlStringBuilder xml = renderContext.getWriter();
if (errors.hasErrors()) {
xml.appendTagOpen("ui:validationerrors");
xml.appendAttribute("id", component.getId());
xml.appendOptionalAttribute("class", component.getHtmlClass());
xml.appendOptionalAttribute("track", component.isTracking(), "true");
xml.appendOptionalAttribute("title", errors.getTitleText());
xml.appendClose();
for (GroupedDiagnositcs nextGroup : errors.getGroupedErrors()) {
// Render each diagnostic message in this group.
for (Diagnostic nextMessage : nextGroup.getDiagnostics()) {
xml.appendTagOpen("ui:error");
WComponent forComponent = nextMessage.getComponent();
if (forComponent != null) {
UIContextHolder.pushContext(nextMessage.getContext());
try {
xml.appendAttribute("for", forComponent.getId());
} finally {
UIContextHolder.popContext();
}
}
xml.appendClose();
// DiagnosticImpl has been extended to support rendering
// of a WComponent as the message.
if (nextMessage instanceof DiagnosticImpl) {
WComponent messageComponent = ((DiagnosticImpl) nextMessage)
.createDiagnosticErrorComponent();
// We add the component to a throw-away container so that it renders with the correct ID.
WContainer container = new WContainer() {
@Override
public String getId() {
return component.getId();
}
};
container.add(messageComponent);
messageComponent.paint(renderContext);
container.remove(messageComponent);
container.reset();
} else {
xml.append(nextMessage.getDescription());
}
xml.appendEndTag("ui:error");
}
}
xml.appendEndTag("ui:validationerrors");
}
} } | public class class_name {
@Override
public void doRender(final WComponent component, final WebXmlRenderContext renderContext) {
WValidationErrors errors = (WValidationErrors) component;
XmlStringBuilder xml = renderContext.getWriter();
if (errors.hasErrors()) {
xml.appendTagOpen("ui:validationerrors"); // depends on control dependency: [if], data = [none]
xml.appendAttribute("id", component.getId()); // depends on control dependency: [if], data = [none]
xml.appendOptionalAttribute("class", component.getHtmlClass()); // depends on control dependency: [if], data = [none]
xml.appendOptionalAttribute("track", component.isTracking(), "true"); // depends on control dependency: [if], data = [none]
xml.appendOptionalAttribute("title", errors.getTitleText()); // depends on control dependency: [if], data = [none]
xml.appendClose(); // depends on control dependency: [if], data = [none]
for (GroupedDiagnositcs nextGroup : errors.getGroupedErrors()) {
// Render each diagnostic message in this group.
for (Diagnostic nextMessage : nextGroup.getDiagnostics()) {
xml.appendTagOpen("ui:error"); // depends on control dependency: [for], data = [none]
WComponent forComponent = nextMessage.getComponent();
if (forComponent != null) {
UIContextHolder.pushContext(nextMessage.getContext()); // depends on control dependency: [if], data = [none]
try {
xml.appendAttribute("for", forComponent.getId()); // depends on control dependency: [try], data = [none]
} finally {
UIContextHolder.popContext();
}
}
xml.appendClose(); // depends on control dependency: [for], data = [none]
// DiagnosticImpl has been extended to support rendering
// of a WComponent as the message.
if (nextMessage instanceof DiagnosticImpl) {
WComponent messageComponent = ((DiagnosticImpl) nextMessage)
.createDiagnosticErrorComponent();
// We add the component to a throw-away container so that it renders with the correct ID.
WContainer container = new WContainer() {
@Override
public String getId() {
return component.getId();
}
};
container.add(messageComponent); // depends on control dependency: [if], data = [none]
messageComponent.paint(renderContext); // depends on control dependency: [if], data = [none]
container.remove(messageComponent); // depends on control dependency: [if], data = [none]
container.reset(); // depends on control dependency: [if], data = [none]
} else {
xml.append(nextMessage.getDescription()); // depends on control dependency: [if], data = [none]
}
xml.appendEndTag("ui:error"); // depends on control dependency: [for], data = [none]
}
}
xml.appendEndTag("ui:validationerrors"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String[] removeEmpties(final String... values) {
if (values == null || values.length == 0){
return new String[0];
}
List<String> validValues = new ArrayList<String>();
for (String value : values){
if (value != null && value.length() > 0){
validValues.add(value);
}
}
return validValues.toArray(new String[validValues.size()]);
} } | public class class_name {
public static String[] removeEmpties(final String... values) {
if (values == null || values.length == 0){
return new String[0]; // depends on control dependency: [if], data = [none]
}
List<String> validValues = new ArrayList<String>();
for (String value : values){
if (value != null && value.length() > 0){
validValues.add(value); // depends on control dependency: [if], data = [(value]
}
}
return validValues.toArray(new String[validValues.size()]);
} } |
public class class_name {
public static int priorityOf(Class<?> someClass) {
while (someClass != null) {
Priority annotation = someClass.getAnnotation(Priority.class);
if (annotation != null) {
return annotation.value();
}
someClass = someClass.getSuperclass();
}
return 0;
} } | public class class_name {
public static int priorityOf(Class<?> someClass) {
while (someClass != null) {
Priority annotation = someClass.getAnnotation(Priority.class);
if (annotation != null) {
return annotation.value(); // depends on control dependency: [if], data = [none]
}
someClass = someClass.getSuperclass(); // depends on control dependency: [while], data = [none]
}
return 0;
} } |
public class class_name {
public String getQualifiedName() {
if (this.module == null || this.module.getName() == null) {
return name;
}
if (this.module.getName().equals("core") || this.module.getName().equals("filters")
|| this.module.getName().equals("elements")) {
return "kurento." + name;
} else {
return this.module.getName() + "." + name;
}
} } | public class class_name {
public String getQualifiedName() {
if (this.module == null || this.module.getName() == null) {
return name; // depends on control dependency: [if], data = [none]
}
if (this.module.getName().equals("core") || this.module.getName().equals("filters")
|| this.module.getName().equals("elements")) {
return "kurento." + name; // depends on control dependency: [if], data = [none]
} else {
return this.module.getName() + "." + name; // depends on control dependency: [if], data = [this]
}
} } |
public class class_name {
private void createCMaps() {
cmaps = new java.util.ArrayList();
TTFCmapEntry tce = new TTFCmapEntry();
Iterator e = unicodeMapping.listIterator();
UnicodeMapping um = (UnicodeMapping)e.next();
UnicodeMapping lastMapping = um;
tce.setUnicodeStart(um.getUnicodeIndex());
tce.setGlyphStartIndex(um.getGlyphIndex());
while (e.hasNext()) {
um = (UnicodeMapping)e.next();
if (((lastMapping.getUnicodeIndex() + 1) != um.getUnicodeIndex())
|| ((lastMapping.getGlyphIndex() + 1) != um.getGlyphIndex())) {
tce.setUnicodeEnd(lastMapping.getUnicodeIndex());
cmaps.add(tce);
tce = new TTFCmapEntry();
tce.setUnicodeStart(um.getUnicodeIndex());
tce.setGlyphStartIndex(um.getGlyphIndex());
}
lastMapping = um;
}
tce.setUnicodeEnd(um.getUnicodeIndex());
cmaps.add(tce);
} } | public class class_name {
private void createCMaps() {
cmaps = new java.util.ArrayList();
TTFCmapEntry tce = new TTFCmapEntry();
Iterator e = unicodeMapping.listIterator();
UnicodeMapping um = (UnicodeMapping)e.next();
UnicodeMapping lastMapping = um;
tce.setUnicodeStart(um.getUnicodeIndex());
tce.setGlyphStartIndex(um.getGlyphIndex());
while (e.hasNext()) {
um = (UnicodeMapping)e.next(); // depends on control dependency: [while], data = [none]
if (((lastMapping.getUnicodeIndex() + 1) != um.getUnicodeIndex())
|| ((lastMapping.getGlyphIndex() + 1) != um.getGlyphIndex())) {
tce.setUnicodeEnd(lastMapping.getUnicodeIndex()); // depends on control dependency: [if], data = [none]
cmaps.add(tce); // depends on control dependency: [if], data = [none]
tce = new TTFCmapEntry(); // depends on control dependency: [if], data = [none]
tce.setUnicodeStart(um.getUnicodeIndex()); // depends on control dependency: [if], data = [um.getUnicodeIndex())]
tce.setGlyphStartIndex(um.getGlyphIndex()); // depends on control dependency: [if], data = [none]
}
lastMapping = um; // depends on control dependency: [while], data = [none]
}
tce.setUnicodeEnd(um.getUnicodeIndex());
cmaps.add(tce);
} } |
public class class_name {
public void synchronize(final Context context) {
SportsState previousState = this.previousState;
boolean firstMessage = false;
if (previousState == null) {
previousState = this.previousState = new SportsState();
firstMessage = true;
}
PebbleDictionary message = new PebbleDictionary();
if (getTimeInSec() != previousState.getTimeInSec() || firstMessage) {
previousState.setTimeInSec(getTimeInSec());
message.addString(Constants.SPORTS_TIME_KEY, convertSecondsToString(getTimeInSec()));
}
if (getDistance() != previousState.getDistance() || firstMessage) {
previousState.setDistance(getDistance());
message.addString(Constants.SPORTS_DISTANCE_KEY, convertDistanceToString(getDistance()));
}
if (this.paceInSec != null) {
message.addUint8(Constants.SPORTS_LABEL_KEY, (byte)Constants.SPORTS_DATA_PACE);
if (getPaceInSec() != previousState.getPaceInSec()) {
previousState.setPaceInSec(getPaceInSec());
message.addString(Constants.SPORTS_DATA_KEY, convertSecondsToString(getPaceInSec()));
}
}
if (this.speed != null) {
message.addUint8(Constants.SPORTS_LABEL_KEY, (byte)Constants.SPORTS_DATA_SPEED);
if (getSpeed() != previousState.getSpeed()) {
previousState.setSpeed(getSpeed());
message.addString(Constants.SPORTS_DATA_KEY, convertDistanceToString(getSpeed()));
}
}
if (this.heartBPM != null) {
if (getHeartBPM() != previousState.getHeartBPM()) {
previousState.setHeartBPM(getHeartBPM());
message.addUint8(Constants.SPORTS_HR_BPM_KEY, getHeartBPM());
}
}
if (getCustomLabel() != null && getCustomValue() != null) {
if (!getCustomLabel().equals(previousState.getCustomLabel())) {
previousState.setCustomLabel(getCustomLabel());
message.addString(Constants.SPORTS_CUSTOM_LABEL_KEY, getCustomLabel());
}
if (!getCustomValue().equals(previousState.getCustomValue())) {
previousState.setCustomValue(getCustomValue());
message.addString(Constants.SPORTS_CUSTOM_VALUE_KEY, getCustomValue());
}
}
PebbleKit.sendDataToPebble(context, Constants.SPORTS_UUID, message);
} } | public class class_name {
public void synchronize(final Context context) {
SportsState previousState = this.previousState;
boolean firstMessage = false;
if (previousState == null) {
previousState = this.previousState = new SportsState(); // depends on control dependency: [if], data = [none]
firstMessage = true; // depends on control dependency: [if], data = [none]
}
PebbleDictionary message = new PebbleDictionary();
if (getTimeInSec() != previousState.getTimeInSec() || firstMessage) {
previousState.setTimeInSec(getTimeInSec()); // depends on control dependency: [if], data = [(getTimeInSec()]
message.addString(Constants.SPORTS_TIME_KEY, convertSecondsToString(getTimeInSec())); // depends on control dependency: [if], data = [(getTimeInSec()]
}
if (getDistance() != previousState.getDistance() || firstMessage) {
previousState.setDistance(getDistance()); // depends on control dependency: [if], data = [(getDistance()]
message.addString(Constants.SPORTS_DISTANCE_KEY, convertDistanceToString(getDistance())); // depends on control dependency: [if], data = [(getDistance()]
}
if (this.paceInSec != null) {
message.addUint8(Constants.SPORTS_LABEL_KEY, (byte)Constants.SPORTS_DATA_PACE); // depends on control dependency: [if], data = [none]
if (getPaceInSec() != previousState.getPaceInSec()) {
previousState.setPaceInSec(getPaceInSec()); // depends on control dependency: [if], data = [(getPaceInSec()]
message.addString(Constants.SPORTS_DATA_KEY, convertSecondsToString(getPaceInSec())); // depends on control dependency: [if], data = [(getPaceInSec()]
}
}
if (this.speed != null) {
message.addUint8(Constants.SPORTS_LABEL_KEY, (byte)Constants.SPORTS_DATA_SPEED); // depends on control dependency: [if], data = [none]
if (getSpeed() != previousState.getSpeed()) {
previousState.setSpeed(getSpeed()); // depends on control dependency: [if], data = [(getSpeed()]
message.addString(Constants.SPORTS_DATA_KEY, convertDistanceToString(getSpeed())); // depends on control dependency: [if], data = [(getSpeed()]
}
}
if (this.heartBPM != null) {
if (getHeartBPM() != previousState.getHeartBPM()) {
previousState.setHeartBPM(getHeartBPM()); // depends on control dependency: [if], data = [(getHeartBPM()]
message.addUint8(Constants.SPORTS_HR_BPM_KEY, getHeartBPM()); // depends on control dependency: [if], data = [none]
}
}
if (getCustomLabel() != null && getCustomValue() != null) {
if (!getCustomLabel().equals(previousState.getCustomLabel())) {
previousState.setCustomLabel(getCustomLabel()); // depends on control dependency: [if], data = [none]
message.addString(Constants.SPORTS_CUSTOM_LABEL_KEY, getCustomLabel()); // depends on control dependency: [if], data = [none]
}
if (!getCustomValue().equals(previousState.getCustomValue())) {
previousState.setCustomValue(getCustomValue()); // depends on control dependency: [if], data = [none]
message.addString(Constants.SPORTS_CUSTOM_VALUE_KEY, getCustomValue()); // depends on control dependency: [if], data = [none]
}
}
PebbleKit.sendDataToPebble(context, Constants.SPORTS_UUID, message);
} } |
public class class_name {
public static String getWthYearDuration(Map wthData) {
String yearDur = "";
ArrayList<Map> wthRecords = (ArrayList) getObjectOr(wthData, "dailyWeather", new ArrayList());
if (!wthRecords.isEmpty()) {
// Get the year of starting date and end date
String startYear = getValueOr((wthRecords.get(0)), "w_date", " ").substring(2, 4).trim();
String endYear = getValueOr((wthRecords.get(wthRecords.size() - 1)), "w_date", " ").substring(2, 4).trim();
// If not available, do not show year and duration in the file name
if (!startYear.equals("") && !endYear.equals("")) {
yearDur += startYear;
try {
int iStartYear = Integer.parseInt(startYear);
int iEndYear = Integer.parseInt(endYear);
iStartYear += iStartYear <= 15 ? 2000 : 1900; // P.S. 2015 is the cross year for the current version
iEndYear += iEndYear <= 15 ? 2000 : 1900; // P.S. 2015 is the cross year for the current version
int duration = iEndYear - iStartYear + 1;
// P.S. Currently the system only support the maximum of 99 years for duration
duration = duration > 99 ? 99 : duration;
yearDur += String.format("%02d", duration);
} catch (Exception e) {
yearDur += "01"; // Default duration uses 01 (minimum value)
}
}
}
return yearDur;
} } | public class class_name {
public static String getWthYearDuration(Map wthData) {
String yearDur = "";
ArrayList<Map> wthRecords = (ArrayList) getObjectOr(wthData, "dailyWeather", new ArrayList());
if (!wthRecords.isEmpty()) {
// Get the year of starting date and end date
String startYear = getValueOr((wthRecords.get(0)), "w_date", " ").substring(2, 4).trim();
String endYear = getValueOr((wthRecords.get(wthRecords.size() - 1)), "w_date", " ").substring(2, 4).trim();
// If not available, do not show year and duration in the file name
if (!startYear.equals("") && !endYear.equals("")) {
yearDur += startYear; // depends on control dependency: [if], data = [none]
try {
int iStartYear = Integer.parseInt(startYear);
int iEndYear = Integer.parseInt(endYear);
iStartYear += iStartYear <= 15 ? 2000 : 1900; // P.S. 2015 is the cross year for the current version // depends on control dependency: [try], data = [none]
iEndYear += iEndYear <= 15 ? 2000 : 1900; // P.S. 2015 is the cross year for the current version // depends on control dependency: [try], data = [none]
int duration = iEndYear - iStartYear + 1;
// P.S. Currently the system only support the maximum of 99 years for duration
duration = duration > 99 ? 99 : duration; // depends on control dependency: [try], data = [none]
yearDur += String.format("%02d", duration); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
yearDur += "01"; // Default duration uses 01 (minimum value)
} // depends on control dependency: [catch], data = [none]
}
}
return yearDur;
} } |
public class class_name {
private String emitDataTypes() {
StringBuilder dtDefs = new StringBuilder();
while (emittedDatatypes.size() < datatypes.size()) {
for (String dt : new HashSet<String>(datatypes)) {
if (!emittedDatatypes.contains(dt)) {
StructureDefinition sd = context.fetchResource(StructureDefinition.class,
ProfileUtilities.sdNs(dt, null));
// TODO: Figure out why the line below doesn't work
// if (sd != null && !uniq_structures.contains(sd))
if(sd != null && !uniq_structure_urls.contains(sd.getUrl()))
dtDefs.append("\n").append(genShapeDefinition(sd, false));
emittedDatatypes.add(dt);
}
}
}
return dtDefs.toString();
} } | public class class_name {
private String emitDataTypes() {
StringBuilder dtDefs = new StringBuilder();
while (emittedDatatypes.size() < datatypes.size()) {
for (String dt : new HashSet<String>(datatypes)) {
if (!emittedDatatypes.contains(dt)) {
StructureDefinition sd = context.fetchResource(StructureDefinition.class,
ProfileUtilities.sdNs(dt, null));
// TODO: Figure out why the line below doesn't work
// if (sd != null && !uniq_structures.contains(sd))
if(sd != null && !uniq_structure_urls.contains(sd.getUrl()))
dtDefs.append("\n").append(genShapeDefinition(sd, false));
emittedDatatypes.add(dt);
// depends on control dependency: [if], data = [none]
}
}
}
return dtDefs.toString();
} } |
public class class_name {
public static double getDouble(Config config, String path, double def) {
if (config.hasPath(path)) {
return config.getDouble(path);
}
return def;
} } | public class class_name {
public static double getDouble(Config config, String path, double def) {
if (config.hasPath(path)) {
return config.getDouble(path); // depends on control dependency: [if], data = [none]
}
return def;
} } |
public class class_name {
public java.util.List<Command> getCommands() {
if (commands == null) {
commands = new com.amazonaws.internal.SdkInternalList<Command>();
}
return commands;
} } | public class class_name {
public java.util.List<Command> getCommands() {
if (commands == null) {
commands = new com.amazonaws.internal.SdkInternalList<Command>(); // depends on control dependency: [if], data = [none]
}
return commands;
} } |
public class class_name {
public CacheId[] getCacheIds(){
CacheId[] cacheIds = new CacheId[configEntry.cacheIds.length];
for ( int i=0; i<configEntry.cacheIds.length; i++ ){
cacheIds[i] = new CacheId(configEntry.cacheIds[i]);
}
return cacheIds;
} } | public class class_name {
public CacheId[] getCacheIds(){
CacheId[] cacheIds = new CacheId[configEntry.cacheIds.length];
for ( int i=0; i<configEntry.cacheIds.length; i++ ){
cacheIds[i] = new CacheId(configEntry.cacheIds[i]); // depends on control dependency: [for], data = [i]
}
return cacheIds;
} } |
public class class_name {
private Object handleNonTxWriteCommand(InvocationContext ctx, WriteCommand command) {
if (trace) log.tracef("handleNonTxWriteCommand for command %s, topology id %d", command, command.getTopologyId());
updateTopologyId(command);
// Only catch OutdatedTopologyExceptions on the originator
if (!ctx.isOriginLocal()) {
return invokeNext(ctx, command);
}
return invokeNextAndHandle(ctx, command, handleNonTxWriteReturn);
} } | public class class_name {
private Object handleNonTxWriteCommand(InvocationContext ctx, WriteCommand command) {
if (trace) log.tracef("handleNonTxWriteCommand for command %s, topology id %d", command, command.getTopologyId());
updateTopologyId(command);
// Only catch OutdatedTopologyExceptions on the originator
if (!ctx.isOriginLocal()) {
return invokeNext(ctx, command); // depends on control dependency: [if], data = [none]
}
return invokeNextAndHandle(ctx, command, handleNonTxWriteReturn);
} } |
public class class_name {
private void removeRec(O obj) {
graph.remove(obj);
for(int i = 0; i < numelems; ++i) {
if(obj == elems[i]) {
System.arraycopy(elems, i + 1, elems, i, --numelems - i);
elems[numelems] = null;
return;
}
}
} } | public class class_name {
private void removeRec(O obj) {
graph.remove(obj);
for(int i = 0; i < numelems; ++i) {
if(obj == elems[i]) {
System.arraycopy(elems, i + 1, elems, i, --numelems - i); // depends on control dependency: [if], data = [none]
elems[numelems] = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Name xClassName(Type t) {
if (t.hasTag(CLASS)) {
return names.fromUtf(externalize(t.tsym.flatName()));
} else if (t.hasTag(ARRAY)) {
return typeSig(types.erasure(t));
} else {
throw new AssertionError("xClassName");
}
} } | public class class_name {
public Name xClassName(Type t) {
if (t.hasTag(CLASS)) {
return names.fromUtf(externalize(t.tsym.flatName())); // depends on control dependency: [if], data = [none]
} else if (t.hasTag(ARRAY)) {
return typeSig(types.erasure(t)); // depends on control dependency: [if], data = [none]
} else {
throw new AssertionError("xClassName");
}
} } |
public class class_name {
private boolean isSuppressedRow(String ruleId, Map<String, Object> row, String primaryColumn) {
Object primaryValue = row.get(primaryColumn);
if (primaryValue != null && Suppress.class.isAssignableFrom(primaryValue.getClass())) {
Suppress suppress = (Suppress) primaryValue;
for (String suppressId : suppress.getSuppressIds()) {
if (ruleId.equals(suppressId)) {
return true;
}
}
}
return false;
} } | public class class_name {
private boolean isSuppressedRow(String ruleId, Map<String, Object> row, String primaryColumn) {
Object primaryValue = row.get(primaryColumn);
if (primaryValue != null && Suppress.class.isAssignableFrom(primaryValue.getClass())) {
Suppress suppress = (Suppress) primaryValue;
for (String suppressId : suppress.getSuppressIds()) {
if (ruleId.equals(suppressId)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
public String getInitParameter(String name) {
ServletConfig sc = getServletConfig();
if (sc == null) {
throw new IllegalStateException(
lStrings.getString("err.servlet_config_not_initialized"));
}
return sc.getInitParameter(name);
} } | public class class_name {
public String getInitParameter(String name) {
ServletConfig sc = getServletConfig();
if (sc == null) {
throw new IllegalStateException(
lStrings.getString("err.servlet_config_not_initialized"));
}
return sc.getInitParameter(name); // depends on control dependency: [if], data = [none]
} } |
public class class_name {
byte[] decodeLikeAnEngineer(final byte[] input) {
if (input == null) {
throw new NullPointerException("input");
}
if ((input.length & 0x01) == 0x01) {
throw new IllegalArgumentException(
"input.length(" + input.length + ") is not even");
}
final byte[] output = new byte[input.length >> 1];
int index = 0; // index in input
for (int i = 0; i < output.length; i++) {
output[i] = (byte) ((decodeHalf(input[index++]) << 4)
| decodeHalf(input[index++]));
}
return output;
} } | public class class_name {
byte[] decodeLikeAnEngineer(final byte[] input) {
if (input == null) {
throw new NullPointerException("input");
}
if ((input.length & 0x01) == 0x01) {
throw new IllegalArgumentException(
"input.length(" + input.length + ") is not even");
}
final byte[] output = new byte[input.length >> 1];
int index = 0; // index in input
for (int i = 0; i < output.length; i++) {
output[i] = (byte) ((decodeHalf(input[index++]) << 4)
| decodeHalf(input[index++])); // depends on control dependency: [for], data = [i]
}
return output;
} } |
public class class_name {
public static boolean loadPageLayoutData(DockingManager manager, String pageId, Perspective perspective){
manager.beginLoadLayoutData();
try{
if(isValidLayout(manager, pageId, perspective)){
String pageLayout = MessageFormat.format(PAGE_LAYOUT, pageId, perspective.getId());
manager.loadLayoutDataFrom(pageLayout);
return true;
}
else{
manager.loadLayoutData();
return false;
}
}
catch(Exception e){
manager.loadLayoutData();
return false;
}
} } | public class class_name {
public static boolean loadPageLayoutData(DockingManager manager, String pageId, Perspective perspective){
manager.beginLoadLayoutData();
try{
if(isValidLayout(manager, pageId, perspective)){
String pageLayout = MessageFormat.format(PAGE_LAYOUT, pageId, perspective.getId());
manager.loadLayoutDataFrom(pageLayout); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
else{
manager.loadLayoutData(); // depends on control dependency: [if], data = [none]
return false; // depends on control dependency: [if], data = [none]
}
}
catch(Exception e){
manager.loadLayoutData();
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public <RO> SingleRuleContext<DPO, TDPO, RO> check(Rule<TDPO, RO> rule) {
List<Rule<TDPO, RO>> addedRules = new ArrayList<Rule<TDPO, RO>>();
if (rule != null) {
addedRules.add(rule);
}
// Change context
return new SingleRuleContext<DPO, TDPO, RO>(addedTriggers, addedDataProviders, dataProviderToRuleMapping,
addedRuleInputTransformers, addedRules);
} } | public class class_name {
public <RO> SingleRuleContext<DPO, TDPO, RO> check(Rule<TDPO, RO> rule) {
List<Rule<TDPO, RO>> addedRules = new ArrayList<Rule<TDPO, RO>>();
if (rule != null) {
addedRules.add(rule); // depends on control dependency: [if], data = [(rule]
}
// Change context
return new SingleRuleContext<DPO, TDPO, RO>(addedTriggers, addedDataProviders, dataProviderToRuleMapping,
addedRuleInputTransformers, addedRules);
} } |
public class class_name {
public void marshall(ListFragmentsRequest listFragmentsRequest, ProtocolMarshaller protocolMarshaller) {
if (listFragmentsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listFragmentsRequest.getStreamName(), STREAMNAME_BINDING);
protocolMarshaller.marshall(listFragmentsRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(listFragmentsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listFragmentsRequest.getFragmentSelector(), FRAGMENTSELECTOR_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ListFragmentsRequest listFragmentsRequest, ProtocolMarshaller protocolMarshaller) {
if (listFragmentsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listFragmentsRequest.getStreamName(), STREAMNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listFragmentsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listFragmentsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listFragmentsRequest.getFragmentSelector(), FRAGMENTSELECTOR_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@VisibleForTesting
synchronized Bucket<V> getBucket(int bucketedSize) {
// get an existing bucket
Bucket<V> bucket = mBuckets.get(bucketedSize);
if (bucket != null || !mAllowNewBuckets) {
return bucket;
}
// create a new bucket
if (FLog.isLoggable(FLog.VERBOSE)) {
FLog.v(TAG, "creating new bucket %s", bucketedSize);
}
Bucket<V> newBucket = newBucket(bucketedSize);
mBuckets.put(bucketedSize, newBucket);
return newBucket;
} } | public class class_name {
@VisibleForTesting
synchronized Bucket<V> getBucket(int bucketedSize) {
// get an existing bucket
Bucket<V> bucket = mBuckets.get(bucketedSize);
if (bucket != null || !mAllowNewBuckets) {
return bucket; // depends on control dependency: [if], data = [none]
}
// create a new bucket
if (FLog.isLoggable(FLog.VERBOSE)) {
FLog.v(TAG, "creating new bucket %s", bucketedSize); // depends on control dependency: [if], data = [none]
}
Bucket<V> newBucket = newBucket(bucketedSize);
mBuckets.put(bucketedSize, newBucket);
return newBucket;
} } |
public class class_name {
public void updateFilters(List<CmsDataViewFilter> newFilters) {
if (newFilters.isEmpty()) {
m_filterContainer.setVisible(false);
}
if (m_filters.equals(newFilters)) {
return;
}
m_filterContainer.removeAllComponents();
m_filters = newFilters;
m_filterMap.clear();
for (CmsDataViewFilter filter : newFilters) {
m_filterMap.put(filter.getId(), filter);
final CmsDataViewFilter finalFilter = filter;
ComboBox select = new ComboBox(filter.getNiceName());
select.setWidth("175px");
select.setNullSelectionAllowed(false);
select.setPageLength(0);
Map<String, String> options = filter.getOptions();
for (Map.Entry<String, String> entry : options.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
select.addItem(key);
select.setItemCaption(key, value);
}
select.setValue(filter.getValue());
if (filter.getHelpText() != null) {
select.setDescription(filter.getHelpText());
}
select.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = 1L;
public void valueChange(ValueChangeEvent event) {
String newValue = (String)(event.getProperty().getValue());
updateFilter(finalFilter.getId(), newValue);
}
});
m_filterContainer.addComponent(select);
}
} } | public class class_name {
public void updateFilters(List<CmsDataViewFilter> newFilters) {
if (newFilters.isEmpty()) {
m_filterContainer.setVisible(false); // depends on control dependency: [if], data = [none]
}
if (m_filters.equals(newFilters)) {
return; // depends on control dependency: [if], data = [none]
}
m_filterContainer.removeAllComponents();
m_filters = newFilters;
m_filterMap.clear();
for (CmsDataViewFilter filter : newFilters) {
m_filterMap.put(filter.getId(), filter); // depends on control dependency: [for], data = [filter]
final CmsDataViewFilter finalFilter = filter;
ComboBox select = new ComboBox(filter.getNiceName());
select.setWidth("175px"); // depends on control dependency: [for], data = [none]
select.setNullSelectionAllowed(false); // depends on control dependency: [for], data = [none]
select.setPageLength(0); // depends on control dependency: [for], data = [none]
Map<String, String> options = filter.getOptions();
for (Map.Entry<String, String> entry : options.entrySet()) {
String key = entry.getKey();
String value = entry.getValue();
select.addItem(key); // depends on control dependency: [for], data = [none]
select.setItemCaption(key, value); // depends on control dependency: [for], data = [none]
}
select.setValue(filter.getValue()); // depends on control dependency: [for], data = [filter]
if (filter.getHelpText() != null) {
select.setDescription(filter.getHelpText()); // depends on control dependency: [if], data = [(filter.getHelpText()]
}
select.addValueChangeListener(new ValueChangeListener() {
private static final long serialVersionUID = 1L;
public void valueChange(ValueChangeEvent event) {
String newValue = (String)(event.getProperty().getValue());
updateFilter(finalFilter.getId(), newValue);
}
}); // depends on control dependency: [for], data = [none]
m_filterContainer.addComponent(select); // depends on control dependency: [for], data = [filter]
}
} } |
public class class_name {
public Optional<CoreNLPParseNode> terminalHead() {
if (terminal()) {
return Optional.of(this);
}
if (immediateHead().isPresent()) {
return immediateHead().get().terminalHead();
}
return Optional.absent();
} } | public class class_name {
public Optional<CoreNLPParseNode> terminalHead() {
if (terminal()) {
return Optional.of(this); // depends on control dependency: [if], data = [none]
}
if (immediateHead().isPresent()) {
return immediateHead().get().terminalHead(); // depends on control dependency: [if], data = [none]
}
return Optional.absent();
} } |
public class class_name {
public final void parseVersion( String version )
{
unparsed = version;
int index = version.indexOf( "-" );
String part1;
String part2 = null;
if ( index < 0 )
{
part1 = version;
}
else
{
part1 = version.substring( 0, index );
part2 = version.substring( index + 1 );
}
if ( part2 != null )
{
try
{
if ( ( part2.length() == 1 ) || !part2.startsWith( "0" ) )
{
buildNumber = Integer.valueOf( part2 );
}
else
{
qualifier = part2;
}
}
catch ( NumberFormatException e )
{
qualifier = part2;
}
}
if ( !part1.contains(".") && !part1.startsWith( "0" ) )
{
try
{
majorVersion = Integer.valueOf( part1 );
}
catch ( NumberFormatException e )
{
// qualifier is the whole version, including "-"
qualifier = version;
buildNumber = null;
}
}
else
{
boolean fallback = false;
StringTokenizer tok = new StringTokenizer( part1, "." );
try
{
majorVersion = getNextIntegerToken( tok );
if ( tok.hasMoreTokens() )
{
minorVersion = getNextIntegerToken( tok );
}
if ( tok.hasMoreTokens() )
{
incrementalVersion = getNextIntegerToken( tok );
}
if ( tok.hasMoreTokens() )
{
fallback = true;
}
// string tokenzier won't detect these and ignores them
if (part1.contains("..") || part1.startsWith( "." ) || part1.endsWith( "." ) )
{
fallback = true;
}
}
catch ( NumberFormatException e )
{
fallback = true;
}
if ( fallback )
{
// qualifier is the whole version, including "-"
qualifier = version;
majorVersion = null;
minorVersion = null;
incrementalVersion = null;
buildNumber = null;
}
}
} } | public class class_name {
public final void parseVersion( String version )
{
unparsed = version;
int index = version.indexOf( "-" );
String part1;
String part2 = null;
if ( index < 0 )
{
part1 = version; // depends on control dependency: [if], data = [none]
}
else
{
part1 = version.substring( 0, index ); // depends on control dependency: [if], data = [none]
part2 = version.substring( index + 1 ); // depends on control dependency: [if], data = [( index]
}
if ( part2 != null )
{
try
{
if ( ( part2.length() == 1 ) || !part2.startsWith( "0" ) )
{
buildNumber = Integer.valueOf( part2 ); // depends on control dependency: [if], data = [none]
}
else
{
qualifier = part2; // depends on control dependency: [if], data = [none]
}
}
catch ( NumberFormatException e )
{
qualifier = part2;
} // depends on control dependency: [catch], data = [none]
}
if ( !part1.contains(".") && !part1.startsWith( "0" ) )
{
try
{
majorVersion = Integer.valueOf( part1 ); // depends on control dependency: [try], data = [none]
}
catch ( NumberFormatException e )
{
// qualifier is the whole version, including "-"
qualifier = version;
buildNumber = null;
} // depends on control dependency: [catch], data = [none]
}
else
{
boolean fallback = false;
StringTokenizer tok = new StringTokenizer( part1, "." );
try
{
majorVersion = getNextIntegerToken( tok ); // depends on control dependency: [try], data = [none]
if ( tok.hasMoreTokens() )
{
minorVersion = getNextIntegerToken( tok ); // depends on control dependency: [if], data = [none]
}
if ( tok.hasMoreTokens() )
{
incrementalVersion = getNextIntegerToken( tok ); // depends on control dependency: [if], data = [none]
}
if ( tok.hasMoreTokens() )
{
fallback = true; // depends on control dependency: [if], data = [none]
}
// string tokenzier won't detect these and ignores them
if (part1.contains("..") || part1.startsWith( "." ) || part1.endsWith( "." ) )
{
fallback = true; // depends on control dependency: [if], data = [none]
}
}
catch ( NumberFormatException e )
{
fallback = true;
} // depends on control dependency: [catch], data = [none]
if ( fallback )
{
// qualifier is the whole version, including "-"
qualifier = version; // depends on control dependency: [if], data = [none]
majorVersion = null; // depends on control dependency: [if], data = [none]
minorVersion = null; // depends on control dependency: [if], data = [none]
incrementalVersion = null; // depends on control dependency: [if], data = [none]
buildNumber = null; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public boolean checkConstraint( Point2D_F64 viewA , Point2D_F64 viewB , Se3_F64 fromAtoB ) {
triangulate.triangulate(viewA,viewB,fromAtoB,P);
if( P.z > 0 ) {
SePointOps_F64.transform(fromAtoB,P,P);
return P.z > 0;
}
return false;
} } | public class class_name {
public boolean checkConstraint( Point2D_F64 viewA , Point2D_F64 viewB , Se3_F64 fromAtoB ) {
triangulate.triangulate(viewA,viewB,fromAtoB,P);
if( P.z > 0 ) {
SePointOps_F64.transform(fromAtoB,P,P); // depends on control dependency: [if], data = [none]
return P.z > 0; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
private View findFocusableViewInMyBounds(final boolean topFocus, final int top, final boolean leftFocus, final int left, View preferredFocusable) {
/*
* The fading edge's transparent side should be considered for focus
* since it's mostly visible, so we divide the actual fading edge length
* by 2.
*/
final int verticalFadingEdgeLength = getVerticalFadingEdgeLength() / 2;
final int topWithoutFadingEdge = top + verticalFadingEdgeLength;
final int bottomWithoutFadingEdge = top + getHeight() - verticalFadingEdgeLength;
final int horizontalFadingEdgeLength = getHorizontalFadingEdgeLength() / 2;
final int leftWithoutFadingEdge = left + horizontalFadingEdgeLength;
final int rightWithoutFadingEdge = left + getWidth() - horizontalFadingEdgeLength;
if ((preferredFocusable != null)
&& (preferredFocusable.getTop() < bottomWithoutFadingEdge)
&& (preferredFocusable.getBottom() > topWithoutFadingEdge)
&& (preferredFocusable.getLeft() < rightWithoutFadingEdge)
&& (preferredFocusable.getRight() > leftWithoutFadingEdge)) {
return preferredFocusable;
}
return findFocusableViewInBounds(topFocus, topWithoutFadingEdge, bottomWithoutFadingEdge, leftFocus, leftWithoutFadingEdge, rightWithoutFadingEdge);
} } | public class class_name {
private View findFocusableViewInMyBounds(final boolean topFocus, final int top, final boolean leftFocus, final int left, View preferredFocusable) {
/*
* The fading edge's transparent side should be considered for focus
* since it's mostly visible, so we divide the actual fading edge length
* by 2.
*/
final int verticalFadingEdgeLength = getVerticalFadingEdgeLength() / 2;
final int topWithoutFadingEdge = top + verticalFadingEdgeLength;
final int bottomWithoutFadingEdge = top + getHeight() - verticalFadingEdgeLength;
final int horizontalFadingEdgeLength = getHorizontalFadingEdgeLength() / 2;
final int leftWithoutFadingEdge = left + horizontalFadingEdgeLength;
final int rightWithoutFadingEdge = left + getWidth() - horizontalFadingEdgeLength;
if ((preferredFocusable != null)
&& (preferredFocusable.getTop() < bottomWithoutFadingEdge)
&& (preferredFocusable.getBottom() > topWithoutFadingEdge)
&& (preferredFocusable.getLeft() < rightWithoutFadingEdge)
&& (preferredFocusable.getRight() > leftWithoutFadingEdge)) {
return preferredFocusable; // depends on control dependency: [if], data = [none]
}
return findFocusableViewInBounds(topFocus, topWithoutFadingEdge, bottomWithoutFadingEdge, leftFocus, leftWithoutFadingEdge, rightWithoutFadingEdge);
} } |
public class class_name {
protected void resumeTransaction() {
//create local variable - JIT performance improvement
final boolean isTraceOn = com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled();
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[RESUME_TRANSACTION]);
}
// LocalTransactionCurrent ltCurrent = TransactionManagerFactory.getLocalTransactionCurrent(); //d120870.2
LocalTransactionCurrent ltCurrent = this.getDatabaseStoreService().getLocalTransactionCurrent();
LocalTransactionCoordinator coord = ltCurrent.getLocalTranCoord();
if (coord != null) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Complete the Local Transaction");
}
try {
// Clean-up the Tx
coord.cleanup();
} catch (InconsistentLocalTranException ex) {
// Absorb any exception from cleanup - it doesn't really
// matter if there are inconsistencies in cleanup.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "InconsistentLocalTranException", ex);
} catch (RolledbackException rbe) {
// We need to inform the user that completion
// was affected by a call to setRollbackOnly
// so rethrow as a ServletException.
//
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "DatabaseHashMap.localRollBack", rbe);
}
}
Object[] suspendedTx = (Object[]) suspendedTransactions.remove(Thread.currentThread()); // PM56632
if (suspendedTx != null) {
for (int i = suspendedTx.length - 1; i >= 0; i--) { // LTC resume, then global transaction
Object susTrans = suspendedTx[i];
if (susTrans != null) {
if (susTrans instanceof Transaction) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Resume the suspended Global Transaction");
}
Transaction tx = (Transaction) susTrans;
try { // remove null check findbugs 106329
// TransactionManagerFactory.getTransactionManager().resume(tx);
this.getDatabaseStoreService().getEmbeddableWebSphereTransactionManager().resume(tx);
} catch (Throwable ex) {
// Absorb all possible JTA resume exceptions
com.ibm.ws.ffdc.FFDCFilter.processException(ex, "com.ibm.ws.session.store.db.DatabaseHashMap.resumeGlobalTransaction", "1210", this);
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "CommonMessage.exception", ex);
}
} else if (susTrans instanceof LocalTransactionCoordinator) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Resume the suspended Local Transaction");
}
try {
coord = (LocalTransactionCoordinator) susTrans;
ltCurrent.resume(coord);
} catch (IllegalStateException ex) {
// We must be running under a received global tran.
// We should never have needed to suspend an LTC in preInvoke under
// these circumstances but the up-chain webapp may have started
// a global tran by a back-door route. Take a relaxed
// approach and just end the LTC, which is what should
// have happened when the global tran was started.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "IllegalStateException", ex);
try {
// Clean-up the Tx
coord.cleanup();
} catch (InconsistentLocalTranException iltex) {
// Absorb any exception from cleanup - it doesn't really
// matter if there are inconsistencies in cleanup.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "InconsistentLocalTranException", iltex);
} catch (RolledbackException rbe) {
// We need to inform the user that completion
// was affected by a call to setRollbackOnly
// so rethrow as a ServletException.
//
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "DatabaseHashMap.localRollBack", rbe);
}
}
}
}
}
} // PM56632
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINER)) {
LoggingUtil.SESSION_LOGGER_WAS.exiting(methodClassName, methodNames[RESUME_TRANSACTION]);
}
} } | public class class_name {
protected void resumeTransaction() {
//create local variable - JIT performance improvement
final boolean isTraceOn = com.ibm.websphere.ras.TraceComponent.isAnyTracingEnabled();
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.entering(methodClassName, methodNames[RESUME_TRANSACTION]); // depends on control dependency: [if], data = [none]
}
// LocalTransactionCurrent ltCurrent = TransactionManagerFactory.getLocalTransactionCurrent(); //d120870.2
LocalTransactionCurrent ltCurrent = this.getDatabaseStoreService().getLocalTransactionCurrent();
LocalTransactionCoordinator coord = ltCurrent.getLocalTranCoord();
if (coord != null) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Complete the Local Transaction"); // depends on control dependency: [if], data = [none]
}
try {
// Clean-up the Tx
coord.cleanup(); // depends on control dependency: [try], data = [none]
} catch (InconsistentLocalTranException ex) {
// Absorb any exception from cleanup - it doesn't really
// matter if there are inconsistencies in cleanup.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "InconsistentLocalTranException", ex);
} catch (RolledbackException rbe) { // depends on control dependency: [catch], data = [none]
// We need to inform the user that completion
// was affected by a call to setRollbackOnly
// so rethrow as a ServletException.
//
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "DatabaseHashMap.localRollBack", rbe);
} // depends on control dependency: [catch], data = [none]
}
Object[] suspendedTx = (Object[]) suspendedTransactions.remove(Thread.currentThread()); // PM56632
if (suspendedTx != null) {
for (int i = suspendedTx.length - 1; i >= 0; i--) { // LTC resume, then global transaction
Object susTrans = suspendedTx[i];
if (susTrans != null) {
if (susTrans instanceof Transaction) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Resume the suspended Global Transaction"); // depends on control dependency: [if], data = [none]
}
Transaction tx = (Transaction) susTrans;
try { // remove null check findbugs 106329
// TransactionManagerFactory.getTransactionManager().resume(tx);
this.getDatabaseStoreService().getEmbeddableWebSphereTransactionManager().resume(tx); // depends on control dependency: [try], data = [none]
} catch (Throwable ex) {
// Absorb all possible JTA resume exceptions
com.ibm.ws.ffdc.FFDCFilter.processException(ex, "com.ibm.ws.session.store.db.DatabaseHashMap.resumeGlobalTransaction", "1210", this);
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "CommonMessage.exception", ex);
} // depends on control dependency: [catch], data = [none]
} else if (susTrans instanceof LocalTransactionCoordinator) {
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINE)) {
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.FINE, methodClassName, methodNames[RESUME_TRANSACTION], "Resume the suspended Local Transaction"); // depends on control dependency: [if], data = [none]
}
try {
coord = (LocalTransactionCoordinator) susTrans; // depends on control dependency: [try], data = [none]
ltCurrent.resume(coord); // depends on control dependency: [try], data = [none]
} catch (IllegalStateException ex) {
// We must be running under a received global tran.
// We should never have needed to suspend an LTC in preInvoke under
// these circumstances but the up-chain webapp may have started
// a global tran by a back-door route. Take a relaxed
// approach and just end the LTC, which is what should
// have happened when the global tran was started.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "IllegalStateException", ex);
try {
// Clean-up the Tx
coord.cleanup(); // depends on control dependency: [try], data = [none]
} catch (InconsistentLocalTranException iltex) {
// Absorb any exception from cleanup - it doesn't really
// matter if there are inconsistencies in cleanup.
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "InconsistentLocalTranException", iltex);
} catch (RolledbackException rbe) { // depends on control dependency: [catch], data = [none]
// We need to inform the user that completion
// was affected by a call to setRollbackOnly
// so rethrow as a ServletException.
//
LoggingUtil.SESSION_LOGGER_WAS.logp(Level.SEVERE, methodClassName, methodNames[RESUME_TRANSACTION], "DatabaseHashMap.localRollBack", rbe);
} // depends on control dependency: [catch], data = [none]
} // depends on control dependency: [catch], data = [none]
}
}
}
} // PM56632
if (isTraceOn && LoggingUtil.SESSION_LOGGER_WAS.isLoggable(Level.FINER)) {
LoggingUtil.SESSION_LOGGER_WAS.exiting(methodClassName, methodNames[RESUME_TRANSACTION]); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String replaceAll(CharSequence str, Pattern pattern, Func1<Matcher, String> replaceFun){
if (StrUtil.isEmpty(str)) {
return StrUtil.str(str);
}
final Matcher matcher = pattern.matcher(str);
final StringBuffer buffer = new StringBuffer();
while (matcher.find()) {
try {
matcher.appendReplacement(buffer, replaceFun.call(matcher));
} catch (Exception e) {
throw new UtilException(e);
}
}
matcher.appendTail(buffer);
return buffer.toString();
} } | public class class_name {
public static String replaceAll(CharSequence str, Pattern pattern, Func1<Matcher, String> replaceFun){
if (StrUtil.isEmpty(str)) {
return StrUtil.str(str);
// depends on control dependency: [if], data = [none]
}
final Matcher matcher = pattern.matcher(str);
final StringBuffer buffer = new StringBuffer();
while (matcher.find()) {
try {
matcher.appendReplacement(buffer, replaceFun.call(matcher));
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new UtilException(e);
}
// depends on control dependency: [catch], data = [none]
}
matcher.appendTail(buffer);
return buffer.toString();
} } |
public class class_name {
private TemplateNode renderMainHelper(
Appendable outputBuf,
String templateName,
@Nullable SoyRecord data,
@Nullable SoyRecord ijData,
Predicate<String> activeDelPackageNames,
@Nullable SoyMsgBundle msgBundle,
@Nullable SoyIdRenamingMap idRenamingMap,
@Nullable SoyCssRenamingMap cssRenamingMap,
boolean debugSoyTemplateInfo,
ImmutableMap<String, Supplier<Object>> pluginInstances) {
// templateNode is always guaranteed to be non-null because for a tofu compile all templates are
// considered source files
TemplateNode template = basicTemplates.get(templateName);
if (template == null) {
throw new SoyTofuException("Attempting to render undefined template '" + templateName + "'.");
} else if (template.getVisibility() == Visibility.PRIVATE) {
throw new SoyTofuException("Attempting to render private template '" + templateName + "'.");
}
if (data == null) {
data = SoyValueConverter.EMPTY_DICT;
}
if (ijData == null) {
ijData = SoyValueConverter.EMPTY_DICT;
}
try {
RenderVisitor rv =
new RenderVisitor(
new EvalVisitorFactoryImpl(),
outputBuf,
basicTemplates,
delTemplates,
data,
ijData,
activeDelPackageNames,
msgBundle,
idRenamingMap,
cssRenamingMap,
debugSoyTemplateInfo,
pluginInstances);
rv.exec(template);
} catch (RenderException re) {
throw new SoyTofuException(re);
}
return template;
} } | public class class_name {
private TemplateNode renderMainHelper(
Appendable outputBuf,
String templateName,
@Nullable SoyRecord data,
@Nullable SoyRecord ijData,
Predicate<String> activeDelPackageNames,
@Nullable SoyMsgBundle msgBundle,
@Nullable SoyIdRenamingMap idRenamingMap,
@Nullable SoyCssRenamingMap cssRenamingMap,
boolean debugSoyTemplateInfo,
ImmutableMap<String, Supplier<Object>> pluginInstances) {
// templateNode is always guaranteed to be non-null because for a tofu compile all templates are
// considered source files
TemplateNode template = basicTemplates.get(templateName);
if (template == null) {
throw new SoyTofuException("Attempting to render undefined template '" + templateName + "'.");
} else if (template.getVisibility() == Visibility.PRIVATE) {
throw new SoyTofuException("Attempting to render private template '" + templateName + "'.");
}
if (data == null) {
data = SoyValueConverter.EMPTY_DICT; // depends on control dependency: [if], data = [none]
}
if (ijData == null) {
ijData = SoyValueConverter.EMPTY_DICT; // depends on control dependency: [if], data = [none]
}
try {
RenderVisitor rv =
new RenderVisitor(
new EvalVisitorFactoryImpl(),
outputBuf,
basicTemplates,
delTemplates,
data,
ijData,
activeDelPackageNames,
msgBundle,
idRenamingMap,
cssRenamingMap,
debugSoyTemplateInfo,
pluginInstances);
rv.exec(template); // depends on control dependency: [try], data = [none]
} catch (RenderException re) {
throw new SoyTofuException(re);
} // depends on control dependency: [catch], data = [none]
return template;
} } |
public class class_name {
public static boolean isType(File file, String extension)
{
Check.notNull(extension);
if (file != null && file.isFile())
{
final String current = getExtension(file);
return current.equals(extension.replace(Constant.DOT, Constant.EMPTY_STRING));
}
return false;
} } | public class class_name {
public static boolean isType(File file, String extension)
{
Check.notNull(extension);
if (file != null && file.isFile())
{
final String current = getExtension(file);
return current.equals(extension.replace(Constant.DOT, Constant.EMPTY_STRING)); // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
public static ByteBuf copiedBuffer(ByteBuf buffer) {
int readable = buffer.readableBytes();
if (readable > 0) {
ByteBuf copy = buffer(readable);
copy.writeBytes(buffer, buffer.readerIndex(), readable);
return copy;
} else {
return EMPTY_BUFFER;
}
} } | public class class_name {
public static ByteBuf copiedBuffer(ByteBuf buffer) {
int readable = buffer.readableBytes();
if (readable > 0) {
ByteBuf copy = buffer(readable);
copy.writeBytes(buffer, buffer.readerIndex(), readable); // depends on control dependency: [if], data = [none]
return copy; // depends on control dependency: [if], data = [none]
} else {
return EMPTY_BUFFER; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void addCookie(
HttpServletRequest request,
HttpServletResponse response,
String name,
String value,
String domain,
int maxAge, boolean httpOnly) {
String contextPath = request.getContextPath();
if (contextPath == null || contextPath.isEmpty()) {
contextPath = "/";
}
addCookie(request, response, name, value, domain, contextPath, maxAge,httpOnly);
} } | public class class_name {
public static void addCookie(
HttpServletRequest request,
HttpServletResponse response,
String name,
String value,
String domain,
int maxAge, boolean httpOnly) {
String contextPath = request.getContextPath();
if (contextPath == null || contextPath.isEmpty()) {
contextPath = "/"; // depends on control dependency: [if], data = [none]
}
addCookie(request, response, name, value, domain, contextPath, maxAge,httpOnly);
} } |
public class class_name {
private static List<ConfigPath> resolveUniqueTypePaths(final List<ConfigPath> items) {
final Map<Class, ConfigPath> index = new HashMap<>();
final List<Class> duplicates = new ArrayList<>();
for (ConfigPath item : items) {
final Class type = item.getDeclaredType();
if (!item.isCustomType() || duplicates.contains(type)) {
continue;
}
if (index.containsKey(type)) {
// type not unique
index.remove(type);
duplicates.add(type);
} else {
index.put(type, item);
}
}
return index.isEmpty() ? Collections.emptyList() : new ArrayList<>(index.values());
} } | public class class_name {
private static List<ConfigPath> resolveUniqueTypePaths(final List<ConfigPath> items) {
final Map<Class, ConfigPath> index = new HashMap<>();
final List<Class> duplicates = new ArrayList<>();
for (ConfigPath item : items) {
final Class type = item.getDeclaredType();
if (!item.isCustomType() || duplicates.contains(type)) {
continue;
}
if (index.containsKey(type)) {
// type not unique
index.remove(type); // depends on control dependency: [if], data = [none]
duplicates.add(type); // depends on control dependency: [if], data = [none]
} else {
index.put(type, item); // depends on control dependency: [if], data = [none]
}
}
return index.isEmpty() ? Collections.emptyList() : new ArrayList<>(index.values());
} } |
public class class_name {
@ForOverride
@Nullable
protected JSType findPropertyTypeWithoutConsideringTemplateTypes(String propertyName) {
ObjectType autoboxObjType = ObjectType.cast(autoboxesTo());
if (autoboxObjType != null) {
return autoboxObjType.findPropertyType(propertyName);
}
return null;
} } | public class class_name {
@ForOverride
@Nullable
protected JSType findPropertyTypeWithoutConsideringTemplateTypes(String propertyName) {
ObjectType autoboxObjType = ObjectType.cast(autoboxesTo());
if (autoboxObjType != null) {
return autoboxObjType.findPropertyType(propertyName); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public int getExpiresIn(String tokenGrantType, String scope) {
int expiresIn = Integer.MAX_VALUE;
List<Scope> scopes = loadScopes(scope);
boolean ccGrantType = TokenRequest.CLIENT_CREDENTIALS.equals(tokenGrantType);
if (TokenRequest.CLIENT_CREDENTIALS.equals(tokenGrantType)) {
for (Scope s : scopes) {
if (s.getCcExpiresIn() < expiresIn) {
expiresIn = s.getCcExpiresIn();
}
}
} else if (TokenRequest.PASSWORD.equals(tokenGrantType)) {
for (Scope s : scopes) {
if (s.getPassExpiresIn() < expiresIn) {
expiresIn = s.getPassExpiresIn();
}
}
} else {
// refresh_token
for (Scope s : scopes) {
if (s.getRefreshExpiresIn() < expiresIn) {
expiresIn = s.getRefreshExpiresIn();
}
}
}
if (scopes.size() == 0 || expiresIn == Integer.MAX_VALUE) {
expiresIn = (ccGrantType) ? OAuthConfig.DEFAULT_CC_EXPIRES_IN : OAuthConfig.DEFAULT_PASSWORD_EXPIRES_IN;
}
return expiresIn;
} } | public class class_name {
public int getExpiresIn(String tokenGrantType, String scope) {
int expiresIn = Integer.MAX_VALUE;
List<Scope> scopes = loadScopes(scope);
boolean ccGrantType = TokenRequest.CLIENT_CREDENTIALS.equals(tokenGrantType);
if (TokenRequest.CLIENT_CREDENTIALS.equals(tokenGrantType)) {
for (Scope s : scopes) {
if (s.getCcExpiresIn() < expiresIn) {
expiresIn = s.getCcExpiresIn(); // depends on control dependency: [if], data = [none]
}
}
} else if (TokenRequest.PASSWORD.equals(tokenGrantType)) {
for (Scope s : scopes) {
if (s.getPassExpiresIn() < expiresIn) {
expiresIn = s.getPassExpiresIn(); // depends on control dependency: [if], data = [none]
}
}
} else {
// refresh_token
for (Scope s : scopes) {
if (s.getRefreshExpiresIn() < expiresIn) {
expiresIn = s.getRefreshExpiresIn(); // depends on control dependency: [if], data = [none]
}
}
}
if (scopes.size() == 0 || expiresIn == Integer.MAX_VALUE) {
expiresIn = (ccGrantType) ? OAuthConfig.DEFAULT_CC_EXPIRES_IN : OAuthConfig.DEFAULT_PASSWORD_EXPIRES_IN; // depends on control dependency: [if], data = [none]
}
return expiresIn;
} } |
public class class_name {
public CmsAttributeValueView getParentView() {
Widget ancestor = getParent();
while ((ancestor != null) && !(ancestor instanceof CmsAttributeValueView)) {
ancestor = ancestor.getParent();
}
return (CmsAttributeValueView)ancestor;
} } | public class class_name {
public CmsAttributeValueView getParentView() {
Widget ancestor = getParent();
while ((ancestor != null) && !(ancestor instanceof CmsAttributeValueView)) {
ancestor = ancestor.getParent();
// depends on control dependency: [while], data = [none]
}
return (CmsAttributeValueView)ancestor;
} } |
public class class_name {
final protected SaltProject executeQuery(WebResource subgraphRes,
MatchGroup matches, int left, int right, String segmentation,
SubgraphFilter filter)
{
SaltProject p = null;
WebResource res = subgraphRes.queryParam("left", "" + left).queryParam(
"right", "" + right);
try
{
if (segmentation != null)
{
res = res.queryParam("segmentation", segmentation);
}
if (filter != null)
{
res = res.queryParam("filter", filter.name());
}
p = res.post(SaltProject.class, matches);
}
catch (UniformInterfaceException ex)
{
log.error(ex.getMessage(), ex);
}
return p;
} } | public class class_name {
final protected SaltProject executeQuery(WebResource subgraphRes,
MatchGroup matches, int left, int right, String segmentation,
SubgraphFilter filter)
{
SaltProject p = null;
WebResource res = subgraphRes.queryParam("left", "" + left).queryParam(
"right", "" + right);
try
{
if (segmentation != null)
{
res = res.queryParam("segmentation", segmentation); // depends on control dependency: [if], data = [none]
}
if (filter != null)
{
res = res.queryParam("filter", filter.name()); // depends on control dependency: [if], data = [none]
}
p = res.post(SaltProject.class, matches); // depends on control dependency: [try], data = [none]
}
catch (UniformInterfaceException ex)
{
log.error(ex.getMessage(), ex);
} // depends on control dependency: [catch], data = [none]
return p;
} } |
public class class_name {
public static Object convertValueToSafeJson(Converter converter, Object value) {
value = Proxies.unwrap(value);
if (isJsonObject(value)) {
return value;
}
if (converter != null) {
// TODO converters ususally go from String -> CustomType?
try {
Object converted = converter.convert(value);
if (converted != null) {
value = converted;
}
} catch (Exception e) {
// ignore - invalid converter
}
}
if (value != null) {
return toSafeJsonValue(value);
} else {
return null;
}
} } | public class class_name {
public static Object convertValueToSafeJson(Converter converter, Object value) {
value = Proxies.unwrap(value);
if (isJsonObject(value)) {
return value; // depends on control dependency: [if], data = [none]
}
if (converter != null) {
// TODO converters ususally go from String -> CustomType?
try {
Object converted = converter.convert(value);
if (converted != null) {
value = converted; // depends on control dependency: [if], data = [none]
}
} catch (Exception e) {
// ignore - invalid converter
} // depends on control dependency: [catch], data = [none]
}
if (value != null) {
return toSafeJsonValue(value); // depends on control dependency: [if], data = [(value]
} else {
return null; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void updateDagStatus() {
// A dag may have nodes that are disabled. It's safer to scan all the nodes.
// Assume the overhead is minimal. If it is not the case, we can optimize later.
boolean failed = false;
for (final Node node : this.nodes) {
final Status nodeStatus = node.getStatus();
if (!nodeStatus.isTerminal()) {
return;
}
if (nodeStatus == Status.FAILURE) {
failed = true;
}
}
// Update the dag status only after all nodes have reached terminal states.
updateDagStatusInternal(failed);
} } | public class class_name {
void updateDagStatus() {
// A dag may have nodes that are disabled. It's safer to scan all the nodes.
// Assume the overhead is minimal. If it is not the case, we can optimize later.
boolean failed = false;
for (final Node node : this.nodes) {
final Status nodeStatus = node.getStatus();
if (!nodeStatus.isTerminal()) {
return; // depends on control dependency: [if], data = [none]
}
if (nodeStatus == Status.FAILURE) {
failed = true; // depends on control dependency: [if], data = [none]
}
}
// Update the dag status only after all nodes have reached terminal states.
updateDagStatusInternal(failed);
} } |
public class class_name {
public TermNatures getTermNatures(String word) {
String[] params = null;
// 获得词性 , 先从系统辞典。在从用户自定义辞典
AnsjItem ansjItem = DATDictionary.getItem(word);
TermNatures tn = null;
if (ansjItem != AnsjItem.NULL) {
tn = ansjItem.termNatures;
} else if ((params = getParams(word)) != null) {
tn = new TermNatures(new TermNature(params[0], 1));
} else if (WordAlert.isEnglish(word)) {
tn = TermNatures.EN;
} else if (WordAlert.isNumber(word)) {
tn = TermNatures.M;
} else {
tn = TermNatures.NULL;
}
return tn;
} } | public class class_name {
public TermNatures getTermNatures(String word) {
String[] params = null;
// 获得词性 , 先从系统辞典。在从用户自定义辞典
AnsjItem ansjItem = DATDictionary.getItem(word);
TermNatures tn = null;
if (ansjItem != AnsjItem.NULL) {
tn = ansjItem.termNatures; // depends on control dependency: [if], data = [none]
} else if ((params = getParams(word)) != null) {
tn = new TermNatures(new TermNature(params[0], 1)); // depends on control dependency: [if], data = [none]
} else if (WordAlert.isEnglish(word)) {
tn = TermNatures.EN; // depends on control dependency: [if], data = [none]
} else if (WordAlert.isNumber(word)) {
tn = TermNatures.M; // depends on control dependency: [if], data = [none]
} else {
tn = TermNatures.NULL; // depends on control dependency: [if], data = [none]
}
return tn;
} } |
public class class_name {
public static <S, D extends S> void copyFields(final S src, D dest) throws IllegalArgumentException {
Class<?> targetClass = src.getClass();
do {
Field[] fields = targetClass.getDeclaredFields();
for (Field field : fields) {
// Skip static fields:
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
try {
if ((!Modifier.isPublic(field.getModifiers())
|| !Modifier.isPublic(field.getDeclaringClass().getModifiers())
|| Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true);
}
Object srcValue = field.get(src);
field.set(dest, srcValue);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException(
"Shouldn't be illegal to access field '" + field.getName() + "': " + ex);
}
}
targetClass = targetClass.getSuperclass();
}
while (targetClass != null && targetClass != Object.class);
} } | public class class_name {
public static <S, D extends S> void copyFields(final S src, D dest) throws IllegalArgumentException {
Class<?> targetClass = src.getClass();
do {
Field[] fields = targetClass.getDeclaredFields();
for (Field field : fields) {
// Skip static fields:
if (Modifier.isStatic(field.getModifiers())) {
continue;
}
try {
if ((!Modifier.isPublic(field.getModifiers())
|| !Modifier.isPublic(field.getDeclaringClass().getModifiers())
|| Modifier.isFinal(field.getModifiers())) && !field.isAccessible()) {
field.setAccessible(true); // depends on control dependency: [if], data = [none]
}
Object srcValue = field.get(src);
field.set(dest, srcValue); // depends on control dependency: [try], data = [none]
}
catch (IllegalAccessException ex) {
throw new IllegalStateException(
"Shouldn't be illegal to access field '" + field.getName() + "': " + ex);
} // depends on control dependency: [catch], data = [none]
}
targetClass = targetClass.getSuperclass();
}
while (targetClass != null && targetClass != Object.class);
} } |
public class class_name {
public ObjectIterator iterator()
{
ObjectIterator iterator = this.iterator;
if (null == iterator)
{
iterator = new ObjectIterator();
if (shouldAvoidAllocation)
{
this.iterator = iterator;
}
}
return iterator.reset();
} } | public class class_name {
public ObjectIterator iterator()
{
ObjectIterator iterator = this.iterator;
if (null == iterator)
{
iterator = new ObjectIterator(); // depends on control dependency: [if], data = [none]
if (shouldAvoidAllocation)
{
this.iterator = iterator; // depends on control dependency: [if], data = [none]
}
}
return iterator.reset();
} } |
public class class_name {
public Object getParent(Object element) {
if (element instanceof DFSPath) {
return ((DFSPath) element).getParent();
} else if (element instanceof HadoopServer) {
return locationsRoot;
}
return null;
} } | public class class_name {
public Object getParent(Object element) {
if (element instanceof DFSPath) {
return ((DFSPath) element).getParent(); // depends on control dependency: [if], data = [none]
} else if (element instanceof HadoopServer) {
return locationsRoot; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
private static void printEmphasized(String toPrint) {
// print the triggered header
pristineSysErr.println("+----------\\");
for (String line : toPrint.split("\n")) {
pristineSysErr.println("| " + line);
}
pristineSysErr.println("+----------/");
} } | public class class_name {
private static void printEmphasized(String toPrint) {
// print the triggered header
pristineSysErr.println("+----------\\");
for (String line : toPrint.split("\n")) {
pristineSysErr.println("| " + line); // depends on control dependency: [for], data = [line]
}
pristineSysErr.println("+----------/");
} } |
public class class_name {
public synchronized void decrementActivityCount()
{
_activityCount--;
if(_activityCount == 0)
{
this.notifyAll();
}
if (tc.isDebugEnabled()) Tr.debug(tc, "decrementActivityCount", new Object[]{this, new Integer(_activityCount)});
} } | public class class_name {
public synchronized void decrementActivityCount()
{
_activityCount--;
if(_activityCount == 0)
{
this.notifyAll(); // depends on control dependency: [if], data = [none]
}
if (tc.isDebugEnabled()) Tr.debug(tc, "decrementActivityCount", new Object[]{this, new Integer(_activityCount)});
} } |
public class class_name {
public final static Map<String, Cookie> readCookieMap(HttpServletRequest httpServletRequest) {
Map<String, Cookie> cookieMap = new HashMap<String, Cookie>();
Cookie[] cookies = httpServletRequest.getCookies();
if (null == cookies) {
return null;
}
for (Cookie cookie : cookies) {
cookieMap.put(cookie.getName().toLowerCase(), cookie);
}
return cookieMap;
} } | public class class_name {
public final static Map<String, Cookie> readCookieMap(HttpServletRequest httpServletRequest) {
Map<String, Cookie> cookieMap = new HashMap<String, Cookie>();
Cookie[] cookies = httpServletRequest.getCookies();
if (null == cookies) {
return null;
// depends on control dependency: [if], data = [none]
}
for (Cookie cookie : cookies) {
cookieMap.put(cookie.getName().toLowerCase(), cookie);
// depends on control dependency: [for], data = [cookie]
}
return cookieMap;
} } |
public class class_name {
private AutomaticZenRule copyAutomaticZenRule(AutomaticZenRule automaticZenRule) {
Parcel parcel = Parcel.obtain();
try {
automaticZenRule.writeToParcel(parcel, /* flags= */ 0);
parcel.setDataPosition(0);
return new AutomaticZenRule(parcel);
} finally {
parcel.recycle();
}
} } | public class class_name {
private AutomaticZenRule copyAutomaticZenRule(AutomaticZenRule automaticZenRule) {
Parcel parcel = Parcel.obtain();
try {
automaticZenRule.writeToParcel(parcel, /* flags= */ 0); // depends on control dependency: [try], data = [none]
parcel.setDataPosition(0); // depends on control dependency: [try], data = [none]
return new AutomaticZenRule(parcel); // depends on control dependency: [try], data = [none]
} finally {
parcel.recycle();
}
} } |
public class class_name {
private File[] getFilesMatchingPath(final String path) {
if (path.endsWith("*")) {
final File dir = new File(path.substring(0, path.lastIndexOf("/") + 1));
final FileFilter fileFilter = new WildcardFileFilter(path.substring(path.lastIndexOf("/")
+ 1));
final File[] files = dir.listFiles(fileFilter);
return files;
} else {
return new File[]{new File(path)};
}
} } | public class class_name {
private File[] getFilesMatchingPath(final String path) {
if (path.endsWith("*")) {
final File dir = new File(path.substring(0, path.lastIndexOf("/") + 1));
final FileFilter fileFilter = new WildcardFileFilter(path.substring(path.lastIndexOf("/")
+ 1));
final File[] files = dir.listFiles(fileFilter);
return files; // depends on control dependency: [if], data = [none]
} else {
return new File[]{new File(path)}; // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void addToBuffer(String index, String document) {
if (timerMDC == null) {
timerMDC = MDC.get("name");
}
// Remove old entries from the buffer
int removedSize = 0;
if (docBuffer.containsKey(index)) {
log.debug("Removing buffer duplicate: '{}'", index);
removedSize = docBuffer.get(index).length();
docBuffer.remove(index);
}
int length = document.length() - removedSize;
// If this is the first document in the buffer, record its age
bufferYoungest = new Date().getTime();
if (docBuffer.isEmpty()) {
bufferOldest = new Date().getTime();
log.debug("=== New buffer starting: {}", bufferOldest);
}
// Add to the buffer
docBuffer.put(index, document);
bufferSize += length;
// Check if submission is required
checkBuffer();
} } | public class class_name {
private void addToBuffer(String index, String document) {
if (timerMDC == null) {
timerMDC = MDC.get("name"); // depends on control dependency: [if], data = [none]
}
// Remove old entries from the buffer
int removedSize = 0;
if (docBuffer.containsKey(index)) {
log.debug("Removing buffer duplicate: '{}'", index); // depends on control dependency: [if], data = [none]
removedSize = docBuffer.get(index).length(); // depends on control dependency: [if], data = [none]
docBuffer.remove(index); // depends on control dependency: [if], data = [none]
}
int length = document.length() - removedSize;
// If this is the first document in the buffer, record its age
bufferYoungest = new Date().getTime();
if (docBuffer.isEmpty()) {
bufferOldest = new Date().getTime(); // depends on control dependency: [if], data = [none]
log.debug("=== New buffer starting: {}", bufferOldest); // depends on control dependency: [if], data = [none]
}
// Add to the buffer
docBuffer.put(index, document);
bufferSize += length;
// Check if submission is required
checkBuffer();
} } |
public class class_name {
private static Collection<?> getRowsFromMapAssociation(AssociationKey associationKey, Document value) {
String rowKeyIndexColumn = associationKey.getMetadata().getRowKeyIndexColumnNames()[0];
List<Document> rows = new ArrayList<Document>();
String[] associationKeyColumns = associationKey.getMetadata()
.getAssociatedEntityKeyMetadata()
.getAssociationKeyColumns();
// Omit shared prefix of compound ids, will be handled in the row accessor
String prefix = DocumentHelpers.getColumnSharedPrefix( associationKeyColumns );
prefix = prefix == null ? "" : prefix + ".";
String embeddedValueColumnPrefix = associationKey.getMetadata().getCollectionRole() + EMBEDDABLE_COLUMN_PREFIX;
// restore the list representation
for ( String rowKey : value.keySet() ) {
Object mapRow = value.get( rowKey );
// include the row key index column
Document row = new Document();
row.put( rowKeyIndexColumn, rowKey );
// several value columns, copy them all
if ( mapRow instanceof Document ) {
for ( String column : associationKey.getMetadata().getAssociatedEntityKeyMetadata().getAssociationKeyColumns() ) {
// The column is part of an element collection; Restore the "value" node in the hierarchy
if ( column.startsWith( embeddedValueColumnPrefix ) ) {
MongoHelpers.setValue(
row,
column.substring( associationKey.getMetadata().getCollectionRole().length() + 1 ),
( (Document) mapRow ).get( column.substring( embeddedValueColumnPrefix.length() ) )
);
}
else {
row.put(
column.substring( prefix.length() ),
( (Document) mapRow ).get( column.substring( prefix.length() ) )
);
}
}
}
// single value column
else {
row.put( associationKey.getMetadata().getAssociatedEntityKeyMetadata().getAssociationKeyColumns()[0], mapRow );
}
rows.add( row );
}
return rows;
} } | public class class_name {
private static Collection<?> getRowsFromMapAssociation(AssociationKey associationKey, Document value) {
String rowKeyIndexColumn = associationKey.getMetadata().getRowKeyIndexColumnNames()[0];
List<Document> rows = new ArrayList<Document>();
String[] associationKeyColumns = associationKey.getMetadata()
.getAssociatedEntityKeyMetadata()
.getAssociationKeyColumns();
// Omit shared prefix of compound ids, will be handled in the row accessor
String prefix = DocumentHelpers.getColumnSharedPrefix( associationKeyColumns );
prefix = prefix == null ? "" : prefix + ".";
String embeddedValueColumnPrefix = associationKey.getMetadata().getCollectionRole() + EMBEDDABLE_COLUMN_PREFIX;
// restore the list representation
for ( String rowKey : value.keySet() ) {
Object mapRow = value.get( rowKey );
// include the row key index column
Document row = new Document();
row.put( rowKeyIndexColumn, rowKey ); // depends on control dependency: [for], data = [rowKey]
// several value columns, copy them all
if ( mapRow instanceof Document ) {
for ( String column : associationKey.getMetadata().getAssociatedEntityKeyMetadata().getAssociationKeyColumns() ) {
// The column is part of an element collection; Restore the "value" node in the hierarchy
if ( column.startsWith( embeddedValueColumnPrefix ) ) {
MongoHelpers.setValue(
row,
column.substring( associationKey.getMetadata().getCollectionRole().length() + 1 ),
( (Document) mapRow ).get( column.substring( embeddedValueColumnPrefix.length() ) )
); // depends on control dependency: [if], data = [none]
}
else {
row.put(
column.substring( prefix.length() ),
( (Document) mapRow ).get( column.substring( prefix.length() ) )
); // depends on control dependency: [if], data = [none]
}
}
}
// single value column
else {
row.put( associationKey.getMetadata().getAssociatedEntityKeyMetadata().getAssociationKeyColumns()[0], mapRow ); // depends on control dependency: [if], data = [none]
}
rows.add( row ); // depends on control dependency: [for], data = [none]
}
return rows;
} } |
public class class_name {
protected void recycleChildren(RecyclerView.Recycler recycler, int startIndex, int endIndex) {
if (startIndex == endIndex) {
return;
}
if (DEBUG) {
Log.d(TAG, "Recycling " + Math.abs(startIndex - endIndex) + " items");
}
if (endIndex > startIndex) {
for (int i = endIndex - 1; i >= startIndex; i--) {
removeAndRecycleViewAt(i, recycler);
}
} else {
for (int i = startIndex; i > endIndex; i--) {
removeAndRecycleViewAt(i, recycler);
}
}
} } | public class class_name {
protected void recycleChildren(RecyclerView.Recycler recycler, int startIndex, int endIndex) {
if (startIndex == endIndex) {
return; // depends on control dependency: [if], data = [none]
}
if (DEBUG) {
Log.d(TAG, "Recycling " + Math.abs(startIndex - endIndex) + " items"); // depends on control dependency: [if], data = [none]
}
if (endIndex > startIndex) {
for (int i = endIndex - 1; i >= startIndex; i--) {
removeAndRecycleViewAt(i, recycler); // depends on control dependency: [for], data = [i]
}
} else {
for (int i = startIndex; i > endIndex; i--) {
removeAndRecycleViewAt(i, recycler); // depends on control dependency: [for], data = [i]
}
}
} } |
public class class_name {
public static LinkedHashSet<ClassNode> collectSelfTypes(
ClassNode receiver,
LinkedHashSet<ClassNode> selfTypes,
boolean checkInterfaces,
boolean checkSuper) {
if (Traits.isTrait(receiver)) {
List<AnnotationNode> annotations = receiver.getAnnotations(SELFTYPE_CLASSNODE);
for (AnnotationNode annotation : annotations) {
Expression value = annotation.getMember("value");
if (value instanceof ClassExpression) {
selfTypes.add(value.getType());
} else if (value instanceof ListExpression) {
List<Expression> expressions = ((ListExpression) value).getExpressions();
for (Expression expression : expressions) {
if (expression instanceof ClassExpression) {
selfTypes.add(expression.getType());
}
}
}
}
}
if (checkInterfaces) {
ClassNode[] interfaces = receiver.getInterfaces();
for (ClassNode anInterface : interfaces) {
collectSelfTypes(anInterface, selfTypes, true, checkSuper);
}
}
if (checkSuper) {
ClassNode superClass = receiver.getSuperClass();
if (superClass != null) {
collectSelfTypes(superClass, selfTypes, checkInterfaces, true);
}
}
return selfTypes;
} } | public class class_name {
public static LinkedHashSet<ClassNode> collectSelfTypes(
ClassNode receiver,
LinkedHashSet<ClassNode> selfTypes,
boolean checkInterfaces,
boolean checkSuper) {
if (Traits.isTrait(receiver)) {
List<AnnotationNode> annotations = receiver.getAnnotations(SELFTYPE_CLASSNODE);
for (AnnotationNode annotation : annotations) {
Expression value = annotation.getMember("value");
if (value instanceof ClassExpression) {
selfTypes.add(value.getType()); // depends on control dependency: [if], data = [none]
} else if (value instanceof ListExpression) {
List<Expression> expressions = ((ListExpression) value).getExpressions();
for (Expression expression : expressions) {
if (expression instanceof ClassExpression) {
selfTypes.add(expression.getType()); // depends on control dependency: [if], data = [none]
}
}
}
}
}
if (checkInterfaces) {
ClassNode[] interfaces = receiver.getInterfaces();
for (ClassNode anInterface : interfaces) {
collectSelfTypes(anInterface, selfTypes, true, checkSuper); // depends on control dependency: [for], data = [anInterface]
}
}
if (checkSuper) {
ClassNode superClass = receiver.getSuperClass();
if (superClass != null) {
collectSelfTypes(superClass, selfTypes, checkInterfaces, true); // depends on control dependency: [if], data = [(superClass]
}
}
return selfTypes;
} } |
public class class_name {
protected void addPackagesList(PackageDoc[] packages, Content tbody) {
for (int i = 0; i < packages.length; i++) {
if (packages[i] != null && packages[i].name().length() > 0) {
if (configuration.nodeprecated && Util.isDeprecated(packages[i]))
continue;
Content packageLinkContent = getPackageLink(packages[i],
getPackageName(packages[i]));
Content tdPackage = HtmlTree.TD(HtmlStyle.colFirst, packageLinkContent);
HtmlTree tdSummary = new HtmlTree(HtmlTag.TD);
tdSummary.addStyle(HtmlStyle.colLast);
addSummaryComment(packages[i], tdSummary);
HtmlTree tr = HtmlTree.TR(tdPackage);
tr.addContent(tdSummary);
if (i%2 == 0)
tr.addStyle(HtmlStyle.altColor);
else
tr.addStyle(HtmlStyle.rowColor);
tbody.addContent(tr);
}
}
} } | public class class_name {
protected void addPackagesList(PackageDoc[] packages, Content tbody) {
for (int i = 0; i < packages.length; i++) {
if (packages[i] != null && packages[i].name().length() > 0) {
if (configuration.nodeprecated && Util.isDeprecated(packages[i]))
continue;
Content packageLinkContent = getPackageLink(packages[i],
getPackageName(packages[i]));
Content tdPackage = HtmlTree.TD(HtmlStyle.colFirst, packageLinkContent);
HtmlTree tdSummary = new HtmlTree(HtmlTag.TD);
tdSummary.addStyle(HtmlStyle.colLast); // depends on control dependency: [if], data = [none]
addSummaryComment(packages[i], tdSummary); // depends on control dependency: [if], data = [(packages[i]]
HtmlTree tr = HtmlTree.TR(tdPackage);
tr.addContent(tdSummary); // depends on control dependency: [if], data = [none]
if (i%2 == 0)
tr.addStyle(HtmlStyle.altColor);
else
tr.addStyle(HtmlStyle.rowColor);
tbody.addContent(tr); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public Clustering<ClusterModel> run(Relation<V> relation) {
ArrayModifiableDBIDs ids = DBIDUtil.newArray(relation.getDBIDs());
final int size = ids.size();
// Sort by the sole dimension
ids.sort(new VectorUtil.SortDBIDsBySingleDimension(relation, dim));
// Density storage.
WritableDoubleDataStore density = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 0.);
DBIDArrayIter iter = ids.iter(), iter2 = ids.iter();
StepProgress sprog = LOG.isVerbose() ? new StepProgress("Clustering steps", 2) : null;
LOG.beginStep(sprog, 1, "Kernel density estimation.");
{
double[] scratch = new double[2 * k];
iter.seek(0);
for(int i = 0; i < size; i++, iter.advance()) {
// Current value.
final double curv = relation.get(iter).doubleValue(dim);
final int pre = Math.max(i - k, 0), prek = i - pre;
final int pos = Math.min(i + k, size - 1), posk = pos - i;
iter2.seek(pre);
for(int j = 0; j < prek; j++, iter2.advance()) {
scratch[j] = curv - relation.get(iter2).doubleValue(dim);
}
assert (iter2.getOffset() == i);
iter2.advance();
for(int j = 0; j < posk; j++, iter2.advance()) {
scratch[prek + j] = relation.get(iter2).doubleValue(dim) - curv;
}
assert (prek + posk >= k);
double kdist = QuickSelect.quickSelect(scratch, 0, prek + posk, k);
switch(mode){
case BALLOON: {
double dens = 0.;
if(kdist > 0.) {
for(int j = 0; j < prek + posk; j++) {
dens += kernel.density(scratch[j] / kdist);
}
}
else {
dens = Double.POSITIVE_INFINITY;
}
assert (iter.getOffset() == i);
density.putDouble(iter, dens);
break;
}
case SAMPLE: {
if(kdist > 0.) {
iter2.seek(pre);
for(int j = 0; j < prek; j++, iter2.advance()) {
double delta = curv - relation.get(iter2).doubleValue(dim);
density.putDouble(iter2, density.doubleValue(iter2) + kernel.density(delta / kdist));
}
assert (iter2.getOffset() == i);
iter2.advance();
for(int j = 0; j < posk; j++, iter2.advance()) {
double delta = relation.get(iter2).doubleValue(dim) - curv;
density.putDouble(iter2, density.doubleValue(iter2) + kernel.density(delta / kdist));
}
}
else {
iter2.seek(pre);
for(int j = 0; j < prek; j++, iter2.advance()) {
double delta = curv - relation.get(iter2).doubleValue(dim);
if(!(delta > 0.)) {
density.putDouble(iter2, Double.POSITIVE_INFINITY);
}
}
assert (iter2.getOffset() == i);
iter2.advance();
for(int j = 0; j < posk; j++, iter2.advance()) {
double delta = relation.get(iter2).doubleValue(dim) - curv;
if(!(delta > 0.)) {
density.putDouble(iter2, Double.POSITIVE_INFINITY);
}
}
}
break;
}
default:
throw new UnsupportedOperationException("Unknown mode specified.");
}
}
}
LOG.beginStep(sprog, 2, "Local minima detection.");
Clustering<ClusterModel> clustering = new Clustering<>("onedimensional-kde-clustering", "One-Dimensional clustering using kernel density estimation.");
{
double[] scratch = new double[2 * minwindow + 1];
int begin = 0;
int halfw = (minwindow + 1) >> 1;
iter.seek(0);
// Fill initial buffer.
for(int i = 0; i < size; i++, iter.advance()) {
final int m = i % scratch.length, t = (i - minwindow - 1) % scratch.length;
scratch[m] = density.doubleValue(iter);
if(i > scratch.length) {
double min = Double.POSITIVE_INFINITY;
for(int j = 0; j < scratch.length; j++) {
if(j != t && scratch[j] < min) {
min = scratch[j];
}
}
// Local minimum:
if(scratch[t] < min) {
int end = i - minwindow + 1;
{ // Test on which side the kNN is
iter2.seek(end);
double curv = relation.get(iter2).doubleValue(dim);
iter2.seek(end - halfw);
double left = relation.get(iter2).doubleValue(dim) - curv;
iter2.seek(end + halfw);
double right = curv - relation.get(iter2).doubleValue(dim);
if(left < right) {
end++;
}
}
iter2.seek(begin);
ArrayModifiableDBIDs cids = DBIDUtil.newArray(end - begin);
for(int j = 0; j < end - begin; j++, iter2.advance()) {
cids.add(iter2);
}
clustering.addToplevelCluster(new Cluster<>(cids, ClusterModel.CLUSTER));
begin = end;
}
}
}
// Extract last cluster
int end = size;
iter2.seek(begin);
ArrayModifiableDBIDs cids = DBIDUtil.newArray(end - begin);
for(int j = 0; j < end - begin; j++, iter2.advance()) {
cids.add(iter2);
}
clustering.addToplevelCluster(new Cluster<>(cids, ClusterModel.CLUSTER));
}
LOG.ensureCompleted(sprog);
return clustering;
} } | public class class_name {
public Clustering<ClusterModel> run(Relation<V> relation) {
ArrayModifiableDBIDs ids = DBIDUtil.newArray(relation.getDBIDs());
final int size = ids.size();
// Sort by the sole dimension
ids.sort(new VectorUtil.SortDBIDsBySingleDimension(relation, dim));
// Density storage.
WritableDoubleDataStore density = DataStoreUtil.makeDoubleStorage(ids, DataStoreFactory.HINT_HOT | DataStoreFactory.HINT_TEMP, 0.);
DBIDArrayIter iter = ids.iter(), iter2 = ids.iter();
StepProgress sprog = LOG.isVerbose() ? new StepProgress("Clustering steps", 2) : null;
LOG.beginStep(sprog, 1, "Kernel density estimation.");
{
double[] scratch = new double[2 * k];
iter.seek(0);
for(int i = 0; i < size; i++, iter.advance()) {
// Current value.
final double curv = relation.get(iter).doubleValue(dim);
final int pre = Math.max(i - k, 0), prek = i - pre;
final int pos = Math.min(i + k, size - 1), posk = pos - i;
iter2.seek(pre); // depends on control dependency: [for], data = [none]
for(int j = 0; j < prek; j++, iter2.advance()) {
scratch[j] = curv - relation.get(iter2).doubleValue(dim); // depends on control dependency: [for], data = [j]
}
assert (iter2.getOffset() == i); // depends on control dependency: [for], data = [i]
iter2.advance(); // depends on control dependency: [for], data = [none]
for(int j = 0; j < posk; j++, iter2.advance()) {
scratch[prek + j] = relation.get(iter2).doubleValue(dim) - curv; // depends on control dependency: [for], data = [j]
}
assert (prek + posk >= k); // depends on control dependency: [for], data = [none]
double kdist = QuickSelect.quickSelect(scratch, 0, prek + posk, k);
switch(mode){
case BALLOON: {
double dens = 0.;
if(kdist > 0.) {
for(int j = 0; j < prek + posk; j++) {
dens += kernel.density(scratch[j] / kdist); // depends on control dependency: [for], data = [j]
}
}
else {
dens = Double.POSITIVE_INFINITY; // depends on control dependency: [if], data = [none]
}
assert (iter.getOffset() == i);
density.putDouble(iter, dens);
break;
}
case SAMPLE: {
if(kdist > 0.) {
iter2.seek(pre); // depends on control dependency: [if], data = [none]
for(int j = 0; j < prek; j++, iter2.advance()) {
double delta = curv - relation.get(iter2).doubleValue(dim);
density.putDouble(iter2, density.doubleValue(iter2) + kernel.density(delta / kdist)); // depends on control dependency: [for], data = [none]
}
assert (iter2.getOffset() == i); // depends on control dependency: [if], data = [none]
iter2.advance(); // depends on control dependency: [if], data = [none]
for(int j = 0; j < posk; j++, iter2.advance()) {
double delta = relation.get(iter2).doubleValue(dim) - curv;
density.putDouble(iter2, density.doubleValue(iter2) + kernel.density(delta / kdist)); // depends on control dependency: [for], data = [none]
}
}
else {
iter2.seek(pre); // depends on control dependency: [if], data = [none]
for(int j = 0; j < prek; j++, iter2.advance()) {
double delta = curv - relation.get(iter2).doubleValue(dim);
if(!(delta > 0.)) {
density.putDouble(iter2, Double.POSITIVE_INFINITY); // depends on control dependency: [if], data = [none]
}
}
assert (iter2.getOffset() == i); // depends on control dependency: [if], data = [none]
iter2.advance(); // depends on control dependency: [if], data = [none]
for(int j = 0; j < posk; j++, iter2.advance()) {
double delta = relation.get(iter2).doubleValue(dim) - curv;
if(!(delta > 0.)) {
density.putDouble(iter2, Double.POSITIVE_INFINITY); // depends on control dependency: [if], data = [none]
}
}
}
break;
}
default:
throw new UnsupportedOperationException("Unknown mode specified.");
}
}
}
LOG.beginStep(sprog, 2, "Local minima detection.");
Clustering<ClusterModel> clustering = new Clustering<>("onedimensional-kde-clustering", "One-Dimensional clustering using kernel density estimation.");
{
double[] scratch = new double[2 * minwindow + 1];
int begin = 0;
int halfw = (minwindow + 1) >> 1;
iter.seek(0);
// Fill initial buffer.
for(int i = 0; i < size; i++, iter.advance()) {
final int m = i % scratch.length, t = (i - minwindow - 1) % scratch.length;
scratch[m] = density.doubleValue(iter);
if(i > scratch.length) {
double min = Double.POSITIVE_INFINITY;
for(int j = 0; j < scratch.length; j++) {
if(j != t && scratch[j] < min) {
min = scratch[j];
}
}
// Local minimum:
if(scratch[t] < min) {
int end = i - minwindow + 1;
{ // Test on which side the kNN is
iter2.seek(end);
double curv = relation.get(iter2).doubleValue(dim);
iter2.seek(end - halfw);
double left = relation.get(iter2).doubleValue(dim) - curv;
iter2.seek(end + halfw);
double right = curv - relation.get(iter2).doubleValue(dim);
if(left < right) {
end++;
}
}
iter2.seek(begin);
ArrayModifiableDBIDs cids = DBIDUtil.newArray(end - begin);
for(int j = 0; j < end - begin; j++, iter2.advance()) {
cids.add(iter2);
}
clustering.addToplevelCluster(new Cluster<>(cids, ClusterModel.CLUSTER));
begin = end;
}
}
}
// Extract last cluster
int end = size;
iter2.seek(begin);
ArrayModifiableDBIDs cids = DBIDUtil.newArray(end - begin);
for(int j = 0; j < end - begin; j++, iter2.advance()) {
cids.add(iter2);
}
clustering.addToplevelCluster(new Cluster<>(cids, ClusterModel.CLUSTER));
}
LOG.ensureCompleted(sprog);
return clustering;
} } |
public class class_name {
protected void setSpeed(boolean reverse, IntsRef edgeFlags, double speed) {
if (speed < 0 || Double.isNaN(speed))
throw new IllegalArgumentException("Speed cannot be negative or NaN: " + speed + ", flags:" + BitUtil.LITTLE.toBitString(edgeFlags));
if (speed < speedFactor / 2) {
speedEncoder.setDecimal(reverse, edgeFlags, 0);
accessEnc.setBool(reverse, edgeFlags, false);
return;
}
if (speed > getMaxSpeed())
speed = getMaxSpeed();
speedEncoder.setDecimal(reverse, edgeFlags, speed);
} } | public class class_name {
protected void setSpeed(boolean reverse, IntsRef edgeFlags, double speed) {
if (speed < 0 || Double.isNaN(speed))
throw new IllegalArgumentException("Speed cannot be negative or NaN: " + speed + ", flags:" + BitUtil.LITTLE.toBitString(edgeFlags));
if (speed < speedFactor / 2) {
speedEncoder.setDecimal(reverse, edgeFlags, 0); // depends on control dependency: [if], data = [none]
accessEnc.setBool(reverse, edgeFlags, false); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (speed > getMaxSpeed())
speed = getMaxSpeed();
speedEncoder.setDecimal(reverse, edgeFlags, speed);
} } |
public class class_name {
@Override
public Map<String, ZkDisconfData> getDisconfData(String app, String env, String version) {
String baseUrl = ZooPathMgr.getZooBaseUrl(zooConfig.getZookeeperUrlPrefix(), app, env, version);
Map<String, ZkDisconfData> fileMap = new HashMap<String, ZkDisconfData>();
try {
fileMap = getDisconfData(ZooPathMgr.getFileZooPath(baseUrl));
Map<String, ZkDisconfData> itemMap = getDisconfData(ZooPathMgr.getItemZooPath(baseUrl));
fileMap.putAll(itemMap);
} catch (KeeperException e) {
LOG.error(e.getMessage(), e);
} catch (InterruptedException e) {
LOG.error(e.getMessage(), e);
}
return fileMap;
} } | public class class_name {
@Override
public Map<String, ZkDisconfData> getDisconfData(String app, String env, String version) {
String baseUrl = ZooPathMgr.getZooBaseUrl(zooConfig.getZookeeperUrlPrefix(), app, env, version);
Map<String, ZkDisconfData> fileMap = new HashMap<String, ZkDisconfData>();
try {
fileMap = getDisconfData(ZooPathMgr.getFileZooPath(baseUrl)); // depends on control dependency: [try], data = [none]
Map<String, ZkDisconfData> itemMap = getDisconfData(ZooPathMgr.getItemZooPath(baseUrl));
fileMap.putAll(itemMap); // depends on control dependency: [try], data = [none]
} catch (KeeperException e) {
LOG.error(e.getMessage(), e);
} catch (InterruptedException e) { // depends on control dependency: [catch], data = [none]
LOG.error(e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
return fileMap;
} } |
public class class_name {
public static void setOpaque (JComponent comp, final boolean opaque)
{
applyToHierarchy(comp, new ComponentOp() {
public void apply (Component comp) {
if (comp instanceof JComponent) {
((JComponent) comp).setOpaque(opaque);
}
}
});
} } | public class class_name {
public static void setOpaque (JComponent comp, final boolean opaque)
{
applyToHierarchy(comp, new ComponentOp() {
public void apply (Component comp) {
if (comp instanceof JComponent) {
((JComponent) comp).setOpaque(opaque); // depends on control dependency: [if], data = [none]
}
}
});
} } |
public class class_name {
private void dialogChanged() {
// TODO: Needs to be cleaned up, this design is tied to a two page design and can't handle a third page, e.g. used for some other component type...
if (componentType.value == UTILITY_COMPONENT.ordinal()) {
updateStatus("Utility components are not yet supported");
return;
}
String artifactId = getArtifactId();
if (artifactId == null) {
updateStatus("Missing artifact id");
return;
}
if (artifactId.toLowerCase().startsWith("test")) {
updateStatus("Artifact id should not start with \"test\" since it will confuse JUnit to think that non unit-test classes are unit-test classes");
return;
}
// If creating an integration component then force viewing page #2 otherwise mark it as completed
ComponentEnum compEnum = ComponentEnum.get(componentType.value);
CreateIntegrationComponentPage p = ((CreateComponentWizard)getWizard()).getCreateIntegrationComponentPage();
p.setMustBeDisplayed(compEnum == INTEGRATION_COMPONENT);
getContainer().updateButtons();
// If creating an schema description integration component then force viewing page #2 otherwise mark it as completed
CreateServiceDescriptionComponentPage p2 = ((CreateComponentWizard)getWizard()).getCreateServiceDescriptionComponentPage();
p2.setMustBeDisplayed(compEnum == SD_SCHEMA_COMPONENT);
getContainer().updateButtons();
String rootFolderName = getRootFolder();
if (rootFolderName.length() == 0) {
updateStatus("The root folder must be specified");
return;
}
File rootFolder = new File(rootFolderName);
if (!rootFolder.isDirectory()) {
updateStatus("The root folder must be an existing folder");
return;
}
String projectFolderName = getComponentProjectName(componentType.value, getGroupId(), getArtifactId());
System.err.println("comp-type: " + componentType + ", proj-namn: " + projectFolderName);
if (projectFolderName != null) {
File projectFolder = new File(rootFolderName + "/" + projectFolderName);
if (projectFolder.exists()) {
updateStatus("Project folder [" + projectFolderName + "] already exists in root folder [" + rootFolderName + "], select a name of a non-existing folder");
return;
}
}
// TODO: Also assert that a project with the selected name doesn't already exist in the workspace
File mvnHome = new File(getMavenHome());
if (!mvnHome.isDirectory()) {
updateStatus("The maven home folder must be an existing folder, update in the soi-toolkit preferences page");
return;
}
File mvn = new File(getMavenHome() + "/bin/mvn" + (SwtUtil.isWindows() ? ".bat" : ""));
if (!mvn.isFile()) {
updateStatus("The maven executable can't be found at: " + mvn.getAbsolutePath() + ", update in the soi-toolkit preferences page");
return;
}
// Validate custom model by setting it on the model-factory-class
String groovyClass = getCustomGroovyModelImpl();
if (groovyClass == null || groovyClass.trim().length() == 0) {
System.err.println("### Empty groovy-classname, reset model");
ModelFactory.resetModelClass();
} else {
try {
System.err.println("### Setting groovy-classname: " + groovyClass);
ModelFactory.setModelGroovyClass(new URL(groovyClass));
} catch (Throwable ex) {
ModelFactory.resetModelClass();
updateStatus("Invalid Groovy class for a custom model (update in the soi-toolkit preferences page), error: " + ex);
return;
}
}
updateStatus(null);
} } | public class class_name {
private void dialogChanged() {
// TODO: Needs to be cleaned up, this design is tied to a two page design and can't handle a third page, e.g. used for some other component type...
if (componentType.value == UTILITY_COMPONENT.ordinal()) {
updateStatus("Utility components are not yet supported"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
String artifactId = getArtifactId();
if (artifactId == null) {
updateStatus("Missing artifact id"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
if (artifactId.toLowerCase().startsWith("test")) {
updateStatus("Artifact id should not start with \"test\" since it will confuse JUnit to think that non unit-test classes are unit-test classes"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// If creating an integration component then force viewing page #2 otherwise mark it as completed
ComponentEnum compEnum = ComponentEnum.get(componentType.value);
CreateIntegrationComponentPage p = ((CreateComponentWizard)getWizard()).getCreateIntegrationComponentPage();
p.setMustBeDisplayed(compEnum == INTEGRATION_COMPONENT);
getContainer().updateButtons();
// If creating an schema description integration component then force viewing page #2 otherwise mark it as completed
CreateServiceDescriptionComponentPage p2 = ((CreateComponentWizard)getWizard()).getCreateServiceDescriptionComponentPage();
p2.setMustBeDisplayed(compEnum == SD_SCHEMA_COMPONENT);
getContainer().updateButtons();
String rootFolderName = getRootFolder();
if (rootFolderName.length() == 0) {
updateStatus("The root folder must be specified"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
File rootFolder = new File(rootFolderName);
if (!rootFolder.isDirectory()) {
updateStatus("The root folder must be an existing folder"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
String projectFolderName = getComponentProjectName(componentType.value, getGroupId(), getArtifactId());
System.err.println("comp-type: " + componentType + ", proj-namn: " + projectFolderName);
if (projectFolderName != null) {
File projectFolder = new File(rootFolderName + "/" + projectFolderName);
if (projectFolder.exists()) {
updateStatus("Project folder [" + projectFolderName + "] already exists in root folder [" + rootFolderName + "], select a name of a non-existing folder"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
// TODO: Also assert that a project with the selected name doesn't already exist in the workspace
File mvnHome = new File(getMavenHome());
if (!mvnHome.isDirectory()) {
updateStatus("The maven home folder must be an existing folder, update in the soi-toolkit preferences page"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
File mvn = new File(getMavenHome() + "/bin/mvn" + (SwtUtil.isWindows() ? ".bat" : ""));
if (!mvn.isFile()) {
updateStatus("The maven executable can't be found at: " + mvn.getAbsolutePath() + ", update in the soi-toolkit preferences page"); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
// Validate custom model by setting it on the model-factory-class
String groovyClass = getCustomGroovyModelImpl();
if (groovyClass == null || groovyClass.trim().length() == 0) {
System.err.println("### Empty groovy-classname, reset model"); // depends on control dependency: [if], data = [none]
ModelFactory.resetModelClass(); // depends on control dependency: [if], data = [none]
} else {
try {
System.err.println("### Setting groovy-classname: " + groovyClass); // depends on control dependency: [try], data = [none]
ModelFactory.setModelGroovyClass(new URL(groovyClass)); // depends on control dependency: [try], data = [none]
} catch (Throwable ex) {
ModelFactory.resetModelClass();
updateStatus("Invalid Groovy class for a custom model (update in the soi-toolkit preferences page), error: " + ex);
return;
} // depends on control dependency: [catch], data = [none]
}
updateStatus(null);
} } |
public class class_name {
public void setParams(final Map<String, String> params) {
// check "format y" and "format y2"
String[] y_format_keys = {"format y", "format y2"};
for(String k : y_format_keys){
if(params.containsKey(k)){
params.put(k, URLDecoder.decode(params.get(k)));
}
}
this.params = params;
} } | public class class_name {
public void setParams(final Map<String, String> params) {
// check "format y" and "format y2"
String[] y_format_keys = {"format y", "format y2"};
for(String k : y_format_keys){
if(params.containsKey(k)){
params.put(k, URLDecoder.decode(params.get(k))); // depends on control dependency: [if], data = [none]
}
}
this.params = params;
} } |
public class class_name {
public Object getConnection(final Subject containerSubject,
final ConnectionRequestInfo requestInfo) throws ResourceException {
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) {
SibTr
.entry(this, TRACE, "getConnection", new Object[] {
SibRaUtils.subjectToString(containerSubject),
requestInfo });
}
SibRaConnection connection = null;
if (requestInfo instanceof SibRaConnectionRequestInfo) {
final SibRaConnectionRequestInfo sibRaRequestInfo = (SibRaConnectionRequestInfo) requestInfo;
SICoreConnection coreConnection = null;
try {
_connectionException = null;
coreConnection = _coreConnection.cloneConnection();
} catch (final SIConnectionUnavailableException exception) {
// No FFDC Code Needed
// We will catch SIConnectionUnavailableException and SIConnectionDroppedException here
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false; //PK60857
} catch (SIException exception) {
FFDCFilter
.processException(
exception,
"com.ibm.ws.sib.ra.impl.SibRaManagedConnection.getConnection",
FFDC_PROBE_1, this);
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception);
}
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false;
} catch (SIErrorException exception) {
FFDCFilter
.processException(
exception,
"com.ibm.ws.sib.ra.impl.SibRaManagedConnection.getConnection",
FFDC_PROBE_7, this);
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception);
}
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false;
}
if (coreConnection != null)
{
sibRaRequestInfo.setCoreConnection(coreConnection);
connection = new SibRaConnection(this, sibRaRequestInfo,
coreConnection);
_connections.add(connection);
}
else
{
connection = new SibRaConnection(this, sibRaRequestInfo, coreConnection);
}
} else {
ResourceAdapterInternalException exception = new ResourceAdapterInternalException(NLS.getFormattedMessage(
"UNRECOGNISED_REQUEST_INFO_CWSIV0401", new Object[] {
requestInfo, SibRaConnectionRequestInfo.class },
null));
if (TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception);
}
throw exception;
}
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) {
SibTr.exit(this, TRACE, "getConnection", connection);
}
return connection;
} } | public class class_name {
public Object getConnection(final Subject containerSubject,
final ConnectionRequestInfo requestInfo) throws ResourceException {
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) {
SibTr
.entry(this, TRACE, "getConnection", new Object[] {
SibRaUtils.subjectToString(containerSubject),
requestInfo });
}
SibRaConnection connection = null;
if (requestInfo instanceof SibRaConnectionRequestInfo) {
final SibRaConnectionRequestInfo sibRaRequestInfo = (SibRaConnectionRequestInfo) requestInfo;
SICoreConnection coreConnection = null;
try {
_connectionException = null; // depends on control dependency: [try], data = [none]
coreConnection = _coreConnection.cloneConnection(); // depends on control dependency: [try], data = [none]
} catch (final SIConnectionUnavailableException exception) {
// No FFDC Code Needed
// We will catch SIConnectionUnavailableException and SIConnectionDroppedException here
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false; //PK60857
} catch (SIException exception) { // depends on control dependency: [catch], data = [none]
FFDCFilter
.processException(
exception,
"com.ibm.ws.sib.ra.impl.SibRaManagedConnection.getConnection",
FFDC_PROBE_1, this);
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception); // depends on control dependency: [if], data = [none]
}
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false;
} catch (SIErrorException exception) { // depends on control dependency: [catch], data = [none]
FFDCFilter
.processException(
exception,
"com.ibm.ws.sib.ra.impl.SibRaManagedConnection.getConnection",
FFDC_PROBE_7, this);
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception); // depends on control dependency: [if], data = [none]
}
connectionErrorOccurred(exception, false);
_connectionException = exception;
_validConnection = false;
} // depends on control dependency: [catch], data = [none]
if (coreConnection != null)
{
sibRaRequestInfo.setCoreConnection(coreConnection); // depends on control dependency: [if], data = [(coreConnection]
connection = new SibRaConnection(this, sibRaRequestInfo,
coreConnection); // depends on control dependency: [if], data = [none]
_connections.add(connection); // depends on control dependency: [if], data = [none]
}
else
{
connection = new SibRaConnection(this, sibRaRequestInfo, coreConnection); // depends on control dependency: [if], data = [none]
}
} else {
ResourceAdapterInternalException exception = new ResourceAdapterInternalException(NLS.getFormattedMessage(
"UNRECOGNISED_REQUEST_INFO_CWSIV0401", new Object[] {
requestInfo, SibRaConnectionRequestInfo.class },
null));
if (TRACE.isEventEnabled()) {
SibTr.exception(this, TRACE, exception); // depends on control dependency: [if], data = [none]
}
throw exception;
}
if (TraceComponent.isAnyTracingEnabled() && TRACE.isEntryEnabled()) {
SibTr.exit(this, TRACE, "getConnection", connection);
}
return connection;
} } |
public class class_name {
public HandlerRegistration setSelectLocationHandler(SelectLocationHandler handler) {
if (handlerManager.getHandlerCount(SelectLocationHandler.TYPE) > 0) {
SelectLocationHandler previous = handlerManager.getHandler(SelectLocationHandler.TYPE, 0);
handlerManager.removeHandler(SelectLocationHandler.TYPE, previous);
}
return handlerManager.addHandler(SelectLocationHandler.TYPE, handler);
} } | public class class_name {
public HandlerRegistration setSelectLocationHandler(SelectLocationHandler handler) {
if (handlerManager.getHandlerCount(SelectLocationHandler.TYPE) > 0) {
SelectLocationHandler previous = handlerManager.getHandler(SelectLocationHandler.TYPE, 0);
handlerManager.removeHandler(SelectLocationHandler.TYPE, previous); // depends on control dependency: [if], data = [none]
}
return handlerManager.addHandler(SelectLocationHandler.TYPE, handler);
} } |
public class class_name {
public static Map<String,String> bundleToStringMap( final ResourceBundle bundle,
final String suffix )
{
if ( bundle == null )
{
return Collections.<String,String>emptyMap();
}
String theSuffix;
if ( StringUtils.isEmpty( suffix ) )
{
theSuffix = "";
}
else
{
theSuffix = suffix + ".";
}
Map<String,String> map = new LinkedHashMap<String,String>();
Enumeration<String> keys = bundle.getKeys();
while( keys.hasMoreElements() )
{
String key = keys.nextElement();
Object value = bundle.getObject( key );
String strValue = ( value != null ) ? value.toString() : null;
map.put( theSuffix + key, strValue );
}
return map;
} } | public class class_name {
public static Map<String,String> bundleToStringMap( final ResourceBundle bundle,
final String suffix )
{
if ( bundle == null )
{
return Collections.<String,String>emptyMap(); // depends on control dependency: [if], data = [none]
}
String theSuffix;
if ( StringUtils.isEmpty( suffix ) )
{
theSuffix = ""; // depends on control dependency: [if], data = [none]
}
else
{
theSuffix = suffix + "."; // depends on control dependency: [if], data = [none]
}
Map<String,String> map = new LinkedHashMap<String,String>();
Enumeration<String> keys = bundle.getKeys();
while( keys.hasMoreElements() )
{
String key = keys.nextElement();
Object value = bundle.getObject( key );
String strValue = ( value != null ) ? value.toString() : null;
map.put( theSuffix + key, strValue ); // depends on control dependency: [while], data = [none]
}
return map;
} } |
public class class_name {
private void writeLegacyFormatting(List<Object> list, Object paramExtractor) {
if (paramExtractor != null) {
list.add("{\"params\":");
list.add(paramExtractor);
list.add(",");
}
else {
list.add("{");
}
if (HAS_SCRIPT) {
/*
* {
* "params": ...,
* "lang": "...",
* "script": "...",
* "upsert": {...}
* }
*/
if (HAS_LANG) {
list.add(SCRIPT_LANG_1X);
}
list.add(SCRIPT_1X);
if (UPSERT) {
list.add(",\"upsert\":");
}
}
else {
/*
* {
* "doc_as_upsert": true,
* "doc": {...}
* }
*/
if (UPSERT) {
list.add("\"doc_as_upsert\":true,");
}
list.add("\"doc\":");
}
} } | public class class_name {
private void writeLegacyFormatting(List<Object> list, Object paramExtractor) {
if (paramExtractor != null) {
list.add("{\"params\":"); // depends on control dependency: [if], data = [none]
list.add(paramExtractor); // depends on control dependency: [if], data = [(paramExtractor]
list.add(","); // depends on control dependency: [if], data = [none]
}
else {
list.add("{"); // depends on control dependency: [if], data = [none]
}
if (HAS_SCRIPT) {
/*
* {
* "params": ...,
* "lang": "...",
* "script": "...",
* "upsert": {...}
* }
*/
if (HAS_LANG) {
list.add(SCRIPT_LANG_1X); // depends on control dependency: [if], data = [none]
}
list.add(SCRIPT_1X); // depends on control dependency: [if], data = [none]
if (UPSERT) {
list.add(",\"upsert\":"); // depends on control dependency: [if], data = [none]
}
}
else {
/*
* {
* "doc_as_upsert": true,
* "doc": {...}
* }
*/
if (UPSERT) {
list.add("\"doc_as_upsert\":true,"); // depends on control dependency: [if], data = [none]
}
list.add("\"doc\":"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static void recursiveFindBadNodesInTable(Node n, Node cellroot, Vector<Node> nodes)
{
Node cell = cellroot;
if (n.getNodeType() == Node.ELEMENT_NODE)
{
String tag = n.getNodeName();
if (tag.equalsIgnoreCase("table"))
{
if (cell != null) //do not enter nested tables
return;
}
else if (tag.equalsIgnoreCase("tbody") ||
tag.equalsIgnoreCase("thead") ||
tag.equalsIgnoreCase("tfoot") ||
tag.equalsIgnoreCase("tr") ||
tag.equalsIgnoreCase("col") ||
tag.equalsIgnoreCase("colgroup"))
{
}
else if (tag.equalsIgnoreCase("td") || tag.equalsIgnoreCase("th") || tag.equalsIgnoreCase("caption"))
{
cell = n;
}
else //other elements
{
if (cell == null)
{
nodes.add(n);
return;
}
}
} //other nodes
else if (n.getNodeType() == Node.TEXT_NODE)
{
if (cell == null && n.getNodeValue().trim().length() > 0)
{
nodes.add(n);
return;
}
}
NodeList child = n.getChildNodes();
for (int i = 0; i < child.getLength(); i++)
recursiveFindBadNodesInTable(child.item(i), cell, nodes);
} } | public class class_name {
private static void recursiveFindBadNodesInTable(Node n, Node cellroot, Vector<Node> nodes)
{
Node cell = cellroot;
if (n.getNodeType() == Node.ELEMENT_NODE)
{
String tag = n.getNodeName();
if (tag.equalsIgnoreCase("table"))
{
if (cell != null) //do not enter nested tables
return;
}
else if (tag.equalsIgnoreCase("tbody") ||
tag.equalsIgnoreCase("thead") ||
tag.equalsIgnoreCase("tfoot") ||
tag.equalsIgnoreCase("tr") ||
tag.equalsIgnoreCase("col") ||
tag.equalsIgnoreCase("colgroup"))
{
}
else if (tag.equalsIgnoreCase("td") || tag.equalsIgnoreCase("th") || tag.equalsIgnoreCase("caption"))
{
cell = n; // depends on control dependency: [if], data = [none]
}
else //other elements
{
if (cell == null)
{
nodes.add(n); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
} //other nodes
else if (n.getNodeType() == Node.TEXT_NODE)
{
if (cell == null && n.getNodeValue().trim().length() > 0)
{
nodes.add(n); // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
}
NodeList child = n.getChildNodes();
for (int i = 0; i < child.getLength(); i++)
recursiveFindBadNodesInTable(child.item(i), cell, nodes);
} } |
public class class_name {
private Conversation startNewConversation(ConnectionData connectionDataToUse, ConversationReceiveListener conversationReceiveListener, boolean isNewConnectionData,
boolean handshake) throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "startNewConversation",
new Object[] { connectionDataToUse, conversationReceiveListener, Boolean.valueOf(isNewConnectionData), Boolean.valueOf(handshake) });
Conversation retConversation;
synchronized (this)
{
if (isNewConnectionData)
{
synchronized (connectionData)
{
connectionData.add(connectionDataToUse);
}
}
// Start a new conversation over the designated connection.
// If this is a connection on behalf of the WMQRA then we need to disable the normal handshaking behaviour
// in order to allow other non WMQRA conversations to handshake as normal.
retConversation = connectionDataToUse.getConnection().startNewConversation(conversationReceiveListener, handshake);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "startNewConversation", retConversation);
return retConversation;
} } | public class class_name {
private Conversation startNewConversation(ConnectionData connectionDataToUse, ConversationReceiveListener conversationReceiveListener, boolean isNewConnectionData,
boolean handshake) throws SIResourceException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "startNewConversation",
new Object[] { connectionDataToUse, conversationReceiveListener, Boolean.valueOf(isNewConnectionData), Boolean.valueOf(handshake) });
Conversation retConversation;
synchronized (this)
{
if (isNewConnectionData)
{
synchronized (connectionData) // depends on control dependency: [if], data = [none]
{
connectionData.add(connectionDataToUse);
}
}
// Start a new conversation over the designated connection.
// If this is a connection on behalf of the WMQRA then we need to disable the normal handshaking behaviour
// in order to allow other non WMQRA conversations to handshake as normal.
retConversation = connectionDataToUse.getConnection().startNewConversation(conversationReceiveListener, handshake);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "startNewConversation", retConversation);
return retConversation;
} } |
public class class_name {
public void marshall(DeleteSkillGroupRequest deleteSkillGroupRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteSkillGroupRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteSkillGroupRequest.getSkillGroupArn(), SKILLGROUPARN_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeleteSkillGroupRequest deleteSkillGroupRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteSkillGroupRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteSkillGroupRequest.getSkillGroupArn(), SKILLGROUPARN_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void addServerReadyConsumer(Consumer<Server> consumer) {
synchronized (readyConsumers) {
if (ready) {
consumer.accept(this);
} else {
readyConsumers.add(consumer);
}
}
} } | public class class_name {
public void addServerReadyConsumer(Consumer<Server> consumer) {
synchronized (readyConsumers) {
if (ready) {
consumer.accept(this); // depends on control dependency: [if], data = [none]
} else {
readyConsumers.add(consumer); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void validate(final ValidationContext context) {
if (!isValid()) {
val messages = context.getMessageContext();
messages.addMessage(new MessageBuilder()
.error()
.source("token")
.defaultText("Unable to accept credential with an empty or unspecified token")
.build());
}
} } | public class class_name {
public void validate(final ValidationContext context) {
if (!isValid()) {
val messages = context.getMessageContext();
messages.addMessage(new MessageBuilder()
.error()
.source("token")
.defaultText("Unable to accept credential with an empty or unspecified token")
.build()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
public Collection<KamNode> getNodes(NodeFilter filter) {
Set<KamNode> kamNodes = new LinkedHashSet<KamNode>();
for (KamNode kamNode : idNodeMap.values()) {
// Check for a node filter
if (null != filter) {
if (!filter.accept(kamNode)) {
continue;
}
}
kamNodes.add(kamNode);
}
return kamNodes;
} } | public class class_name {
@Override
public Collection<KamNode> getNodes(NodeFilter filter) {
Set<KamNode> kamNodes = new LinkedHashSet<KamNode>();
for (KamNode kamNode : idNodeMap.values()) {
// Check for a node filter
if (null != filter) {
if (!filter.accept(kamNode)) {
continue;
}
}
kamNodes.add(kamNode); // depends on control dependency: [for], data = [kamNode]
}
return kamNodes;
} } |
public class class_name {
protected void rebuild() {
// Checks if the number of nodes in the tree exceeds the maximum number
while (this.rootCount > this.maxNumClusterFeatures) {
// Doubles the global threshold
this.T *= 2.0;
this.root.setThreshold(calcRSquared(1));
// Adds all nodes to the ClusteringFeature tree again
Queue<ClusteringTreeNode> Q = new LinkedList<ClusteringTreeNode>();
Q.addAll(this.root.getChildren());
this.root.clearChildren();
this.rootCount = 0;
while (!Q.isEmpty()) {
ClusteringTreeNode x = Q.element();
Q.addAll(x.getChildren());
x.clearChildren();
bicoCFUpdate(x);
Q.remove();
}
}
} } | public class class_name {
protected void rebuild() {
// Checks if the number of nodes in the tree exceeds the maximum number
while (this.rootCount > this.maxNumClusterFeatures) {
// Doubles the global threshold
this.T *= 2.0; // depends on control dependency: [while], data = [none]
this.root.setThreshold(calcRSquared(1)); // depends on control dependency: [while], data = [none]
// Adds all nodes to the ClusteringFeature tree again
Queue<ClusteringTreeNode> Q = new LinkedList<ClusteringTreeNode>();
Q.addAll(this.root.getChildren()); // depends on control dependency: [while], data = [none]
this.root.clearChildren(); // depends on control dependency: [while], data = [none]
this.rootCount = 0; // depends on control dependency: [while], data = [none]
while (!Q.isEmpty()) {
ClusteringTreeNode x = Q.element();
Q.addAll(x.getChildren()); // depends on control dependency: [while], data = [none]
x.clearChildren(); // depends on control dependency: [while], data = [none]
bicoCFUpdate(x); // depends on control dependency: [while], data = [none]
Q.remove(); // depends on control dependency: [while], data = [none]
}
}
} } |
public class class_name {
protected void setupValidators() {
if (m_loginname.getValidators().size() == 0) {
m_loginname.addValidator(new LoginNameValidator());
m_pw.getPassword1Field().addValidator(new PasswordValidator());
m_site.addValidator(new StartSiteValidator());
m_startview.addValidator(new StartViewValidator());
m_startfolder.addValidator(new StartPathValidator());
}
} } | public class class_name {
protected void setupValidators() {
if (m_loginname.getValidators().size() == 0) {
m_loginname.addValidator(new LoginNameValidator()); // depends on control dependency: [if], data = [none]
m_pw.getPassword1Field().addValidator(new PasswordValidator()); // depends on control dependency: [if], data = [none]
m_site.addValidator(new StartSiteValidator()); // depends on control dependency: [if], data = [none]
m_startview.addValidator(new StartViewValidator()); // depends on control dependency: [if], data = [none]
m_startfolder.addValidator(new StartPathValidator()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected boolean identifyRaggeds(NetcdfDataset ds, EncodingInfo info, Dimension instanceDim, Dimension sampleDim, Formatter errlog) {
// check for contiguous
Evaluator.VarAtt varatt = Evaluator.findVariableWithAttribute(ds, CF.SAMPLE_DIMENSION); // CF 1.6
if (varatt == null) varatt = Evaluator.findVariableWithAttribute(ds, CF.RAGGED_ROWSIZE); // backwards compatibility
if (varatt != null) {
Variable ragged_rowSize = varatt.var;
String sampleDimName = varatt.att.getStringValue();
if (sampleDim != null && !sampleDimName.equals(sampleDim.getShortName())) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has sample dimension %s must be %s%n", sampleDimName, sampleDim.getShortName());
return false;
}
if (sampleDim == null) {
sampleDim = ds.findDimension(sampleDimName);
if (sampleDim == null) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has invalid sample dimension %s%n", sampleDimName);
return false;
}
}
Dimension rrDim;
if (ragged_rowSize.getRank() > 0)
rrDim = ragged_rowSize.getDimension(0); // nobs(station)
else if (ragged_rowSize.getParentStructure() != null) {
Structure parent = ragged_rowSize.getParentStructure(); // if ragged_rowSize is a structure member, use dimension of parent structure
rrDim = parent.getDimension(0);
} else {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable (%s) must have rank 1%n", ragged_rowSize);
return false;
}
if (instanceDim != null && instanceDim != rrDim) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has invalid instance dimension %s must be %s%n", rrDim, instanceDim);
return false;
}
instanceDim = rrDim;
if (ragged_rowSize.getDataType() != DataType.INT) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable must be of type integer%n");
return false;
}
info.set(Encoding.raggedContiguous, instanceDim, sampleDim);
info.ragged_rowSize = ragged_rowSize;
info.parentStruct = ragged_rowSize.getParentStructure();
return true;
} // rowsize was found
varatt = Evaluator.findVariableWithAttribute(ds, CF.INSTANCE_DIMENSION); // CF 1.6
if (varatt == null) varatt = Evaluator.findVariableWithAttribute(ds, CF.RAGGED_PARENTINDEX); // backwards compatibility
if (varatt != null) {
Variable ragged_parentIndex = varatt.var;
String instanceDimName = varatt.att.getStringValue();
if (instanceDim != null && !instanceDimName.equals(instanceDim.getShortName())) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable has instance dimension %s must be %s%n", instanceDimName, instanceDim.getShortName());
return false;
}
if (instanceDim == null) {
instanceDim = ds.findDimension(instanceDimName);
if (instanceDim == null) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable has invalid instance dimension %s%n", instanceDimName);
return false;
}
}
if (ragged_parentIndex.getDataType() != DataType.INT) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable must be of type integer%n");
return false;
}
// allow netcdf-4 structures, eg kunicki
if (ragged_parentIndex.isMemberOfStructure()) {
Structure s = ragged_parentIndex.getParentStructure();
if (s.getRank() == 0 || !s.getDimension(0).equals(sampleDim)) {
errlog.format("CFpointObs: Indexed ragged array representation (structure): parent_index variable must be of form Struct { %s }(%s) %n", ragged_parentIndex.getFullName(), sampleDim.getShortName());
return false;
}
} else {
if (ragged_parentIndex.getRank() != 1 || !ragged_parentIndex.getDimension(0).equals(sampleDim)) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable must be of form %s(%s) %n", ragged_parentIndex.getFullName(), sampleDim.getShortName());
return false;
}
}
info.set(Encoding.raggedIndex, instanceDim, sampleDim);
info.ragged_parentIndex = ragged_parentIndex;
info.childStruct = ragged_parentIndex.getParentStructure();
return true;
} // parent index was found
/* kunicki 10/21/2011
Variable ragged_parentIndex = Evaluator.getVariableWithAttributeValue(ds, CF.RAGGED_PARENTINDEX, parentDim.getShortName());
if ((ragged_parentIndex == null) ||
(!ragged_parentIndex.isMemberOfStructure() && (ragged_parentIndex.getRank() == 0 || ragged_parentIndex.getDimension(0).getShortName() != childDim.getShortName()) ||
(ragged_parentIndex.isMemberOfStructure() && (ragged_parentIndex.getParentStructure().getRank() == 0 || ragged_parentIndex.getParentStructure().getDimension(0).getShortName() != childDim.getShortName())))
) {
// if ((null == ragged_parentIndex) || (ragged_parentIndex.getRank() == 0) || (ragged_parentIndex.getDimension(0).getShortName() != childDim.getShortName())) {
errlog.format("there must be a ragged_parent_index variable with outer dimension that matches obs dimension %s%n", childDim.getShortName());
return null;
} */
return false;
} } | public class class_name {
protected boolean identifyRaggeds(NetcdfDataset ds, EncodingInfo info, Dimension instanceDim, Dimension sampleDim, Formatter errlog) {
// check for contiguous
Evaluator.VarAtt varatt = Evaluator.findVariableWithAttribute(ds, CF.SAMPLE_DIMENSION); // CF 1.6
if (varatt == null) varatt = Evaluator.findVariableWithAttribute(ds, CF.RAGGED_ROWSIZE); // backwards compatibility
if (varatt != null) {
Variable ragged_rowSize = varatt.var;
String sampleDimName = varatt.att.getStringValue();
if (sampleDim != null && !sampleDimName.equals(sampleDim.getShortName())) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has sample dimension %s must be %s%n", sampleDimName, sampleDim.getShortName());
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (sampleDim == null) {
sampleDim = ds.findDimension(sampleDimName);
// depends on control dependency: [if], data = [(sampleDim]
if (sampleDim == null) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has invalid sample dimension %s%n", sampleDimName);
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
}
Dimension rrDim;
if (ragged_rowSize.getRank() > 0)
rrDim = ragged_rowSize.getDimension(0); // nobs(station)
else if (ragged_rowSize.getParentStructure() != null) {
Structure parent = ragged_rowSize.getParentStructure(); // if ragged_rowSize is a structure member, use dimension of parent structure
rrDim = parent.getDimension(0);
// depends on control dependency: [if], data = [none]
} else {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable (%s) must have rank 1%n", ragged_rowSize);
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (instanceDim != null && instanceDim != rrDim) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable has invalid instance dimension %s must be %s%n", rrDim, instanceDim);
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
instanceDim = rrDim;
// depends on control dependency: [if], data = [none]
if (ragged_rowSize.getDataType() != DataType.INT) {
errlog.format("CFpointObs: Contiguous ragged array representation: row_size variable must be of type integer%n");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
info.set(Encoding.raggedContiguous, instanceDim, sampleDim);
// depends on control dependency: [if], data = [none]
info.ragged_rowSize = ragged_rowSize;
// depends on control dependency: [if], data = [none]
info.parentStruct = ragged_rowSize.getParentStructure();
// depends on control dependency: [if], data = [none]
return true;
// depends on control dependency: [if], data = [none]
} // rowsize was found
varatt = Evaluator.findVariableWithAttribute(ds, CF.INSTANCE_DIMENSION); // CF 1.6
if (varatt == null) varatt = Evaluator.findVariableWithAttribute(ds, CF.RAGGED_PARENTINDEX); // backwards compatibility
if (varatt != null) {
Variable ragged_parentIndex = varatt.var;
String instanceDimName = varatt.att.getStringValue();
if (instanceDim != null && !instanceDimName.equals(instanceDim.getShortName())) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable has instance dimension %s must be %s%n", instanceDimName, instanceDim.getShortName());
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
if (instanceDim == null) {
instanceDim = ds.findDimension(instanceDimName);
// depends on control dependency: [if], data = [(instanceDim]
if (instanceDim == null) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable has invalid instance dimension %s%n", instanceDimName);
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
}
if (ragged_parentIndex.getDataType() != DataType.INT) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable must be of type integer%n");
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
// allow netcdf-4 structures, eg kunicki
if (ragged_parentIndex.isMemberOfStructure()) {
Structure s = ragged_parentIndex.getParentStructure();
if (s.getRank() == 0 || !s.getDimension(0).equals(sampleDim)) {
errlog.format("CFpointObs: Indexed ragged array representation (structure): parent_index variable must be of form Struct { %s }(%s) %n", ragged_parentIndex.getFullName(), sampleDim.getShortName());
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
} else {
if (ragged_parentIndex.getRank() != 1 || !ragged_parentIndex.getDimension(0).equals(sampleDim)) {
errlog.format("CFpointObs: Indexed ragged array representation: parent_index variable must be of form %s(%s) %n", ragged_parentIndex.getFullName(), sampleDim.getShortName());
// depends on control dependency: [if], data = [none]
return false;
// depends on control dependency: [if], data = [none]
}
}
info.set(Encoding.raggedIndex, instanceDim, sampleDim);
// depends on control dependency: [if], data = [none]
info.ragged_parentIndex = ragged_parentIndex;
// depends on control dependency: [if], data = [none]
info.childStruct = ragged_parentIndex.getParentStructure();
// depends on control dependency: [if], data = [none]
return true;
// depends on control dependency: [if], data = [none]
} // parent index was found
/* kunicki 10/21/2011
Variable ragged_parentIndex = Evaluator.getVariableWithAttributeValue(ds, CF.RAGGED_PARENTINDEX, parentDim.getShortName());
if ((ragged_parentIndex == null) ||
(!ragged_parentIndex.isMemberOfStructure() && (ragged_parentIndex.getRank() == 0 || ragged_parentIndex.getDimension(0).getShortName() != childDim.getShortName()) ||
(ragged_parentIndex.isMemberOfStructure() && (ragged_parentIndex.getParentStructure().getRank() == 0 || ragged_parentIndex.getParentStructure().getDimension(0).getShortName() != childDim.getShortName())))
) {
// if ((null == ragged_parentIndex) || (ragged_parentIndex.getRank() == 0) || (ragged_parentIndex.getDimension(0).getShortName() != childDim.getShortName())) {
errlog.format("there must be a ragged_parent_index variable with outer dimension that matches obs dimension %s%n", childDim.getShortName());
return null;
} */
return false;
} } |
public class class_name {
public static void addEntityChangeListener(I_CmsEntityChangeListener changeListener, String changeScope) {
CmsDebugLog.getInstance().printLine("trying to ad change listener for scope: " + changeScope);
if ((INSTANCE == null) || (INSTANCE.m_entityObserver == null)) {
CmsDebugLog.getInstance().printLine("handling external registration");
if (isObserverExported()) {
CmsDebugLog.getInstance().printLine("registration is available");
try {
addNativeListener(changeListener, changeScope);
} catch (Exception e) {
CmsDebugLog.getInstance().printLine(
"Exception occured during listener registration" + e.getMessage());
}
} else {
throw new RuntimeException("Editor is not initialized yet.");
}
} else {
INSTANCE.m_entityObserver.addEntityChangeListener(changeListener, changeScope);
}
} } | public class class_name {
public static void addEntityChangeListener(I_CmsEntityChangeListener changeListener, String changeScope) {
CmsDebugLog.getInstance().printLine("trying to ad change listener for scope: " + changeScope);
if ((INSTANCE == null) || (INSTANCE.m_entityObserver == null)) {
CmsDebugLog.getInstance().printLine("handling external registration");
// depends on control dependency: [if], data = [none]
if (isObserverExported()) {
CmsDebugLog.getInstance().printLine("registration is available");
// depends on control dependency: [if], data = [none]
try {
addNativeListener(changeListener, changeScope);
// depends on control dependency: [try], data = [none]
} catch (Exception e) {
CmsDebugLog.getInstance().printLine(
"Exception occured during listener registration" + e.getMessage());
}
// depends on control dependency: [catch], data = [none]
} else {
throw new RuntimeException("Editor is not initialized yet.");
}
} else {
INSTANCE.m_entityObserver.addEntityChangeListener(changeListener, changeScope);
// depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private static double approxBinomialCdf(int k, double p, int n) {
//use an approximation as described at http://www.math.ucla.edu/~tom/distributions/binomial.html
double Z = p;
double A=k+1;
double B=n-k;
double S=A+B;
double BT=Math.exp(ContinuousDistributions.logGamma(S)-ContinuousDistributions.logGamma(B)-ContinuousDistributions.logGamma(A)+A*Math.log(Z)+B*Math.log(1-Z));
double probabilitySum;
if (Z<(A+1)/(S+2)) {
probabilitySum=BT*ContinuousDistributions.betinc(Z,A,B);
}
else {
probabilitySum=1.0-BT*ContinuousDistributions.betinc(1.0-Z,B,A);
}
probabilitySum=1.0-probabilitySum;
return probabilitySum;
} } | public class class_name {
private static double approxBinomialCdf(int k, double p, int n) {
//use an approximation as described at http://www.math.ucla.edu/~tom/distributions/binomial.html
double Z = p;
double A=k+1;
double B=n-k;
double S=A+B;
double BT=Math.exp(ContinuousDistributions.logGamma(S)-ContinuousDistributions.logGamma(B)-ContinuousDistributions.logGamma(A)+A*Math.log(Z)+B*Math.log(1-Z));
double probabilitySum;
if (Z<(A+1)/(S+2)) {
probabilitySum=BT*ContinuousDistributions.betinc(Z,A,B); // depends on control dependency: [if], data = [(Z]
}
else {
probabilitySum=1.0-BT*ContinuousDistributions.betinc(1.0-Z,B,A); // depends on control dependency: [if], data = [none]
}
probabilitySum=1.0-probabilitySum;
return probabilitySum;
} } |
public class class_name {
public void addResource(String resource) {
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_ADD_MOD_RESOURCE_1, resource));
}
m_resources.add(resource);
} } | public class class_name {
public void addResource(String resource) {
if (LOG.isDebugEnabled()) {
LOG.debug(Messages.get().getBundle().key(Messages.LOG_ADD_MOD_RESOURCE_1, resource)); // depends on control dependency: [if], data = [none]
}
m_resources.add(resource);
} } |
public class class_name {
protected boolean loadMore()
throws XMLStreamException
{
WstxInputSource input = mInput;
do {
/* Need to make sure offsets are properly updated for error
* reporting purposes, and do this now while previous amounts
* are still known.
*/
mCurrInputProcessed += mInputEnd;
verifyLimit("Maximum document characters", mConfig.getMaxCharacters(), mCurrInputProcessed);
mCurrInputRowStart -= mInputEnd;
int count;
try {
count = input.readInto(this);
if (count > 0) {
return true;
}
input.close();
} catch (IOException ioe) {
throw constructFromIOE(ioe);
}
if (input == mRootInput) {
/* Note: no need to check entity/input nesting in this
* particular case, since it will be handled by higher level
* parsing code (results in an unexpected EOF)
*/
return false;
}
WstxInputSource parent = input.getParent();
if (parent == null) { // sanity check!
throwNullParent(input);
}
/* 13-Feb-2006, TSa: Ok, do we violate a proper nesting constraints
* with this input block closure?
*/
if (mCurrDepth != input.getScopeId()) {
handleIncompleteEntityProblem(input);
}
mInput = input = parent;
input.restoreContext(this);
mInputTopDepth = input.getScopeId();
/* 21-Feb-2006, TSa: Since linefeed normalization needs to be
* suppressed for internal entity expansion, we may need to
* change the state...
*/
if (!mNormalizeLFs) {
mNormalizeLFs = !input.fromInternalEntity();
}
// Maybe there are leftovers from that input in buffer now?
} while (mInputPtr >= mInputEnd);
return true;
} } | public class class_name {
protected boolean loadMore()
throws XMLStreamException
{
WstxInputSource input = mInput;
do {
/* Need to make sure offsets are properly updated for error
* reporting purposes, and do this now while previous amounts
* are still known.
*/
mCurrInputProcessed += mInputEnd;
verifyLimit("Maximum document characters", mConfig.getMaxCharacters(), mCurrInputProcessed);
mCurrInputRowStart -= mInputEnd;
int count;
try {
count = input.readInto(this); // depends on control dependency: [try], data = [none]
if (count > 0) {
return true; // depends on control dependency: [if], data = [none]
}
input.close(); // depends on control dependency: [try], data = [none]
} catch (IOException ioe) {
throw constructFromIOE(ioe);
} // depends on control dependency: [catch], data = [none]
if (input == mRootInput) {
/* Note: no need to check entity/input nesting in this
* particular case, since it will be handled by higher level
* parsing code (results in an unexpected EOF)
*/
return false; // depends on control dependency: [if], data = [none]
}
WstxInputSource parent = input.getParent();
if (parent == null) { // sanity check!
throwNullParent(input); // depends on control dependency: [if], data = [none]
}
/* 13-Feb-2006, TSa: Ok, do we violate a proper nesting constraints
* with this input block closure?
*/
if (mCurrDepth != input.getScopeId()) {
handleIncompleteEntityProblem(input); // depends on control dependency: [if], data = [none]
}
mInput = input = parent;
input.restoreContext(this);
mInputTopDepth = input.getScopeId();
/* 21-Feb-2006, TSa: Since linefeed normalization needs to be
* suppressed for internal entity expansion, we may need to
* change the state...
*/
if (!mNormalizeLFs) {
mNormalizeLFs = !input.fromInternalEntity(); // depends on control dependency: [if], data = [none]
}
// Maybe there are leftovers from that input in buffer now?
} while (mInputPtr >= mInputEnd);
return true;
} } |
public class class_name {
public void train(String corpus)
{
CorpusLoader.walk(corpus, new CorpusLoader.Handler()
{
@Override
public void handle(Document document)
{
List<List<Word>> simpleSentenceList = document.getSimpleSentenceList();
List<List<IWord>> compatibleList = new LinkedList<List<IWord>>();
for (List<Word> wordList : simpleSentenceList)
{
compatibleList.add(new LinkedList<IWord>(wordList));
}
CommonDictionaryMaker.this.compute(compatibleList);
}
});
} } | public class class_name {
public void train(String corpus)
{
CorpusLoader.walk(corpus, new CorpusLoader.Handler()
{
@Override
public void handle(Document document)
{
List<List<Word>> simpleSentenceList = document.getSimpleSentenceList();
List<List<IWord>> compatibleList = new LinkedList<List<IWord>>();
for (List<Word> wordList : simpleSentenceList)
{
compatibleList.add(new LinkedList<IWord>(wordList)); // depends on control dependency: [for], data = [wordList]
}
CommonDictionaryMaker.this.compute(compatibleList);
}
});
} } |
public class class_name {
protected State getRuntimePropsEnrichedTblProps() {
State tableProps = new State(this.props.getTablePartitionProps());
if (this.props.getRuntimeTableProps().isPresent()){
tableProps.setProp(HiveMetaStoreUtils.RUNTIME_PROPS, this.props.getRuntimeTableProps().get());
}
return tableProps;
} } | public class class_name {
protected State getRuntimePropsEnrichedTblProps() {
State tableProps = new State(this.props.getTablePartitionProps());
if (this.props.getRuntimeTableProps().isPresent()){
tableProps.setProp(HiveMetaStoreUtils.RUNTIME_PROPS, this.props.getRuntimeTableProps().get()); // depends on control dependency: [if], data = [none]
}
return tableProps;
} } |
public class class_name {
public final void dispose() throws IllegalStateException {
synchronized (this) {
if (pageServiceRegistration == null) {
throw new IllegalStateException(String.format("%s [%s] has not been registered.", getClass()
.getSimpleName(), this));
}
pageServiceRegistration.unregister();
pageServiceRegistration = null;
if (mountPointRegistration != null) {
mountPointRegistration.dispose();
mountPointRegistration = null;
}
}
} } | public class class_name {
public final void dispose() throws IllegalStateException {
synchronized (this) {
if (pageServiceRegistration == null) {
throw new IllegalStateException(String.format("%s [%s] has not been registered.", getClass()
.getSimpleName(), this));
}
pageServiceRegistration.unregister();
pageServiceRegistration = null;
if (mountPointRegistration != null) {
mountPointRegistration.dispose(); // depends on control dependency: [if], data = [none]
mountPointRegistration = null; // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void indexDocument(final ODocument iDocument) {
modificationLock.requestModificationLock();
try {
Object fieldValue;
for (final String fieldName : iDocument.fieldNames()) {
fieldValue = iDocument.field(fieldName);
put(fieldValue, iDocument);
}
acquireExclusiveLock();
try {
map.save();
} finally {
releaseExclusiveLock();
}
} finally {
modificationLock.releaseModificationLock();
}
} } | public class class_name {
public void indexDocument(final ODocument iDocument) {
modificationLock.requestModificationLock();
try {
Object fieldValue;
for (final String fieldName : iDocument.fieldNames()) {
fieldValue = iDocument.field(fieldName);
// depends on control dependency: [for], data = [fieldName]
put(fieldValue, iDocument);
// depends on control dependency: [for], data = [none]
}
acquireExclusiveLock();
// depends on control dependency: [try], data = [none]
try {
map.save();
// depends on control dependency: [try], data = [none]
} finally {
releaseExclusiveLock();
}
} finally {
modificationLock.releaseModificationLock();
}
} } |
public class class_name {
@Override
public EClass getIfcFontWeight() {
if (ifcFontWeightEClass == null) {
ifcFontWeightEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(808);
}
return ifcFontWeightEClass;
} } | public class class_name {
@Override
public EClass getIfcFontWeight() {
if (ifcFontWeightEClass == null) {
ifcFontWeightEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI).getEClassifiers()
.get(808);
// depends on control dependency: [if], data = [none]
}
return ifcFontWeightEClass;
} } |
public class class_name {
public URL getResource(String name) {
for (ClassLoader parent : parents) {
URL url = parent.getResource(name);
if (url != null) {
return url;
}
}
return super.getResource(name);
} } | public class class_name {
public URL getResource(String name) {
for (ClassLoader parent : parents) {
URL url = parent.getResource(name);
if (url != null) {
return url; // depends on control dependency: [if], data = [none]
}
}
return super.getResource(name);
} } |
public class class_name {
@Nonnull
public static List<ViewDescriptor> allInstantiable() {
List<ViewDescriptor> r = new ArrayList<>();
StaplerRequest request = Stapler.getCurrentRequest();
if (request == null) {
throw new IllegalStateException("This method can only be invoked from a stapler request");
}
ViewGroup owner = request.findAncestorObject(ViewGroup.class);
if (owner == null) {
throw new IllegalStateException("This method can only be invoked from a request with a ViewGroup ancestor");
}
for (ViewDescriptor d : DescriptorVisibilityFilter.apply(owner, all())) {
if (d.isApplicableIn(owner) && d.isInstantiable()
&& owner.getACL().hasCreatePermission(Jenkins.getAuthentication(), owner, d)) {
r.add(d);
}
}
return r;
} } | public class class_name {
@Nonnull
public static List<ViewDescriptor> allInstantiable() {
List<ViewDescriptor> r = new ArrayList<>();
StaplerRequest request = Stapler.getCurrentRequest();
if (request == null) {
throw new IllegalStateException("This method can only be invoked from a stapler request");
}
ViewGroup owner = request.findAncestorObject(ViewGroup.class);
if (owner == null) {
throw new IllegalStateException("This method can only be invoked from a request with a ViewGroup ancestor");
}
for (ViewDescriptor d : DescriptorVisibilityFilter.apply(owner, all())) {
if (d.isApplicableIn(owner) && d.isInstantiable()
&& owner.getACL().hasCreatePermission(Jenkins.getAuthentication(), owner, d)) {
r.add(d); // depends on control dependency: [if], data = [none]
}
}
return r;
} } |
public class class_name {
@Override
public boolean moveTo(long pKey) {
try {
this.mCurrentData = (BlockDataElement)mPageReadTrx.getData(pKey);
return true;
} catch (TTException e) {
return false;
}
} } | public class class_name {
@Override
public boolean moveTo(long pKey) {
try {
this.mCurrentData = (BlockDataElement)mPageReadTrx.getData(pKey);
// depends on control dependency: [try], data = [none]
return true;
// depends on control dependency: [try], data = [none]
} catch (TTException e) {
return false;
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public CreateDeploymentGroupRequest withOnPremisesInstanceTagFilters(TagFilter... onPremisesInstanceTagFilters) {
if (this.onPremisesInstanceTagFilters == null) {
setOnPremisesInstanceTagFilters(new com.amazonaws.internal.SdkInternalList<TagFilter>(onPremisesInstanceTagFilters.length));
}
for (TagFilter ele : onPremisesInstanceTagFilters) {
this.onPremisesInstanceTagFilters.add(ele);
}
return this;
} } | public class class_name {
public CreateDeploymentGroupRequest withOnPremisesInstanceTagFilters(TagFilter... onPremisesInstanceTagFilters) {
if (this.onPremisesInstanceTagFilters == null) {
setOnPremisesInstanceTagFilters(new com.amazonaws.internal.SdkInternalList<TagFilter>(onPremisesInstanceTagFilters.length)); // depends on control dependency: [if], data = [none]
}
for (TagFilter ele : onPremisesInstanceTagFilters) {
this.onPremisesInstanceTagFilters.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public void marshall(KinesisStreamsOutputUpdate kinesisStreamsOutputUpdate, ProtocolMarshaller protocolMarshaller) {
if (kinesisStreamsOutputUpdate == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisStreamsOutputUpdate.getResourceARNUpdate(), RESOURCEARNUPDATE_BINDING);
protocolMarshaller.marshall(kinesisStreamsOutputUpdate.getRoleARNUpdate(), ROLEARNUPDATE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(KinesisStreamsOutputUpdate kinesisStreamsOutputUpdate, ProtocolMarshaller protocolMarshaller) {
if (kinesisStreamsOutputUpdate == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(kinesisStreamsOutputUpdate.getResourceARNUpdate(), RESOURCEARNUPDATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(kinesisStreamsOutputUpdate.getRoleARNUpdate(), ROLEARNUPDATE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public boolean validateChannel(final Channel channel)
{
boolean ret = false;
final SetVarAction var = new SetVarAction(channel, "testState", "1");
ManagerResponse response = null;
try
{
AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX();
response = pbx.sendAction(var, 500);
}
catch (final Exception e)
{
ActivityHelper.logger.debug(e, e);
ActivityHelper.logger.error("getVariable: " + e);
}
if ((response != null) && (response.getAttribute("Response").compareToIgnoreCase("success") == 0))
{
ret = true;
}
return ret;
} } | public class class_name {
public boolean validateChannel(final Channel channel)
{
boolean ret = false;
final SetVarAction var = new SetVarAction(channel, "testState", "1");
ManagerResponse response = null;
try
{
AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX();
response = pbx.sendAction(var, 500);
// depends on control dependency: [try], data = [none]
}
catch (final Exception e)
{
ActivityHelper.logger.debug(e, e);
ActivityHelper.logger.error("getVariable: " + e);
}
// depends on control dependency: [catch], data = [none]
if ((response != null) && (response.getAttribute("Response").compareToIgnoreCase("success") == 0))
{
ret = true;
// depends on control dependency: [if], data = [none]
}
return ret;
} } |
public class class_name {
private float getBorderWidthInside(int side) {
float width = 0f;
if (useBorderPadding) {
switch (side) {
case Rectangle.LEFT:
width = getBorderWidthLeft();
break;
case Rectangle.RIGHT:
width = getBorderWidthRight();
break;
case Rectangle.TOP:
width = getBorderWidthTop();
break;
default: // default and BOTTOM
width = getBorderWidthBottom();
break;
}
// non-variable (original style) borders overlap the rectangle (only 1/2 counts)
if (!isUseVariableBorders()) {
width = width / 2f;
}
}
return width;
} } | public class class_name {
private float getBorderWidthInside(int side) {
float width = 0f;
if (useBorderPadding) {
switch (side) {
case Rectangle.LEFT:
width = getBorderWidthLeft();
break;
case Rectangle.RIGHT:
width = getBorderWidthRight();
break;
case Rectangle.TOP:
width = getBorderWidthTop();
break;
default: // default and BOTTOM
width = getBorderWidthBottom();
break;
}
// non-variable (original style) borders overlap the rectangle (only 1/2 counts)
if (!isUseVariableBorders()) {
width = width / 2f; // depends on control dependency: [if], data = [none]
}
}
return width;
} } |
public class class_name {
void recover(ReadableLogRecord logRecord) throws LogCorruptedException,InternalLogException
{
if (tc.isEntryEnabled()) Tr.entry(tc, "recover", new Object[] {logRecord, this});
// If the parent recovery log instance has experienced a serious internal error then prevent
// this operation from executing.
if (_recLog.failed())
{
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException");
throw new InternalLogException(null);
}
try
{
// Determine the number of data items to be recovered this time.
int numDataItems = logRecord.getInt();
if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering '" + numDataItems + "' data items");
// Reconstruct each data item in memory.
for (int d = 0; d < numDataItems; d++)
{
if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering data item '" + d + "'");
DataItem dataItem = null;
if (_singleData)
{
// Check to see if we are replacing something, or if there is no
// data in this section.
if (_writtenData.size() > 0) /* @MD19840A*/
{ /* @MD19840A*/
dataItem = (DataItem)_writtenData.get(0); /* @MD19840A*/
} /* @MD19840A*/
if (dataItem == null) /* @MD19840A*/
{ /* @MD19840A*/
// This recoverable unit section may hold only piece of data. This
// must be held by the special DataItem subclass, SingleDataItem.
// Create the wrapper to hold this information.
dataItem = new SingleDataItem(_storageMode, logRecord, this);
_writtenData.add(dataItem); /* @MD19994M*/
} /* @MD19840A*/
else /* @MD19840A*/
{ /* @MD19840A*/
// Replace the existing data in the SingleDataItem /* @MD19840A*/
((SingleDataItem)dataItem).setData(logRecord); /* @MD19840A*/
_writtenData.set(0, dataItem); /* @MD19994A*/
} /* @MD19840A*/
}
else
{
// This recoverable unit section may hold an arbitrary number of pieces
// of data. These are all cached within the standard DataItem class.
// Create the wrapper to hold this information.
dataItem = new DataItem(_storageMode, logRecord, this);
// As this recoverable unit seciton may hold an arbirary number of
// data items, add the wrapper to the end of the list.
_writtenData.add(dataItem);
}
// However this information is stored inside the recoverable unit, 'dataItem' is the
// last data block to be added to the recoverable unit section. Preserve this reference
// in '_lastData' in order to have quick access to it from the lastData() method.
_lastDataItem = dataItem;
}
}
catch (InternalLogException exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitSectionImpl.recover", "876", this);
if (tc.isDebugEnabled()) Tr.debug(tc, "An InternalLogException occured reconstructng a RecoverableUnitSectionImpl");
_recLog.markFailed(exc); /* @MD19484C*/
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "LogCorruptedException");
throw new LogCorruptedException(exc);
}
catch (Throwable exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitSectionImpl.recover", "884", this);
if (tc.isDebugEnabled()) Tr.debug(tc, "An exception occured reconstructng a RecoverableUnitSectionImpl");
_recLog.markFailed(exc); /* @MD19484C*/
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException");
throw new InternalLogException(exc);
}
if (tc.isEntryEnabled()) Tr.exit(tc, "recover");
} } | public class class_name {
void recover(ReadableLogRecord logRecord) throws LogCorruptedException,InternalLogException
{
if (tc.isEntryEnabled()) Tr.entry(tc, "recover", new Object[] {logRecord, this});
// If the parent recovery log instance has experienced a serious internal error then prevent
// this operation from executing.
if (_recLog.failed())
{
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException");
throw new InternalLogException(null);
}
try
{
// Determine the number of data items to be recovered this time.
int numDataItems = logRecord.getInt();
if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering '" + numDataItems + "' data items");
// Reconstruct each data item in memory.
for (int d = 0; d < numDataItems; d++)
{
if (tc.isDebugEnabled()) Tr.debug(tc, "Recovering data item '" + d + "'");
DataItem dataItem = null;
if (_singleData)
{
// Check to see if we are replacing something, or if there is no
// data in this section.
if (_writtenData.size() > 0) /* @MD19840A*/
{ /* @MD19840A*/
dataItem = (DataItem)_writtenData.get(0); /* @MD19840A*/ // depends on control dependency: [if], data = [0)]
} /* @MD19840A*/
if (dataItem == null) /* @MD19840A*/
{ /* @MD19840A*/
// This recoverable unit section may hold only piece of data. This
// must be held by the special DataItem subclass, SingleDataItem.
// Create the wrapper to hold this information.
dataItem = new SingleDataItem(_storageMode, logRecord, this); // depends on control dependency: [if], data = [none]
_writtenData.add(dataItem); /* @MD19994M*/ // depends on control dependency: [if], data = [(dataItem]
} /* @MD19840A*/
else /* @MD19840A*/
{ /* @MD19840A*/
// Replace the existing data in the SingleDataItem /* @MD19840A*/
((SingleDataItem)dataItem).setData(logRecord); /* @MD19840A*/ // depends on control dependency: [if], data = [none]
_writtenData.set(0, dataItem); /* @MD19994A*/ // depends on control dependency: [if], data = [none]
} /* @MD19840A*/
}
else
{
// This recoverable unit section may hold an arbitrary number of pieces
// of data. These are all cached within the standard DataItem class.
// Create the wrapper to hold this information.
dataItem = new DataItem(_storageMode, logRecord, this); // depends on control dependency: [if], data = [none]
// As this recoverable unit seciton may hold an arbirary number of
// data items, add the wrapper to the end of the list.
_writtenData.add(dataItem); // depends on control dependency: [if], data = [none]
}
// However this information is stored inside the recoverable unit, 'dataItem' is the
// last data block to be added to the recoverable unit section. Preserve this reference
// in '_lastData' in order to have quick access to it from the lastData() method.
_lastDataItem = dataItem; // depends on control dependency: [for], data = [none]
}
}
catch (InternalLogException exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitSectionImpl.recover", "876", this);
if (tc.isDebugEnabled()) Tr.debug(tc, "An InternalLogException occured reconstructng a RecoverableUnitSectionImpl");
_recLog.markFailed(exc); /* @MD19484C*/
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "LogCorruptedException");
throw new LogCorruptedException(exc);
} // depends on control dependency: [catch], data = [none]
catch (Throwable exc)
{
FFDCFilter.processException(exc, "com.ibm.ws.recoverylog.spi.RecoverableUnitSectionImpl.recover", "884", this);
if (tc.isDebugEnabled()) Tr.debug(tc, "An exception occured reconstructng a RecoverableUnitSectionImpl");
_recLog.markFailed(exc); /* @MD19484C*/
if (tc.isEntryEnabled()) Tr.exit(tc, "recover", "InternalLogException");
throw new InternalLogException(exc);
} // depends on control dependency: [catch], data = [none]
if (tc.isEntryEnabled()) Tr.exit(tc, "recover");
} } |
public class class_name {
@Pure
protected static double paramFloat(String paramName, Map<String, String> parameters)
throws LawParameterNotFoundException {
final String svalue = parameters.get(paramName);
if (svalue != null && !"".equals(svalue)) { //$NON-NLS-1$
try {
return Float.parseFloat(svalue);
} catch (AssertionError e) {
throw e;
} catch (Throwable e) {
//
}
}
throw new LawParameterNotFoundException(paramName);
} } | public class class_name {
@Pure
protected static double paramFloat(String paramName, Map<String, String> parameters)
throws LawParameterNotFoundException {
final String svalue = parameters.get(paramName);
if (svalue != null && !"".equals(svalue)) { //$NON-NLS-1$
try {
return Float.parseFloat(svalue); // depends on control dependency: [try], data = [none]
} catch (AssertionError e) {
throw e;
} catch (Throwable e) { // depends on control dependency: [catch], data = [none]
//
} // depends on control dependency: [catch], data = [none]
}
throw new LawParameterNotFoundException(paramName);
} } |
public class class_name {
private CALC unbindAll(AbstractCalculator<CALC> undbindFrom) {
// find root and first child
AbstractCalculator root = undbindFrom.parentCalculator != null ? undbindFrom.parentCalculator : undbindFrom;
AbstractCalculator child = root.childCalculator;
while (root != null) {
AbstractCalculator tmpParent = root.parentCalculator;
if (tmpParent == null)
break;
else
root = tmpParent;
child = root.childCalculator;
}
// undbind all from root to last child
while (child != null) {
if (child.isUnbind == false)
root.expression(child, false);
child.isUnbind = true;
child = child.childCalculator; // new unbind child
}
return (CALC) undbindFrom;
} } | public class class_name {
private CALC unbindAll(AbstractCalculator<CALC> undbindFrom) {
// find root and first child
AbstractCalculator root = undbindFrom.parentCalculator != null ? undbindFrom.parentCalculator : undbindFrom;
AbstractCalculator child = root.childCalculator;
while (root != null) {
AbstractCalculator tmpParent = root.parentCalculator;
if (tmpParent == null)
break;
else
root = tmpParent;
child = root.childCalculator;
// depends on control dependency: [while], data = [none]
}
// undbind all from root to last child
while (child != null) {
if (child.isUnbind == false)
root.expression(child, false);
child.isUnbind = true;
// depends on control dependency: [while], data = [none]
child = child.childCalculator; // new unbind child
// depends on control dependency: [while], data = [none]
}
return (CALC) undbindFrom;
} } |
public class class_name {
public static BritishCutoverDate from(TemporalAccessor temporal) {
if (temporal instanceof BritishCutoverDate) {
return (BritishCutoverDate) temporal;
}
return new BritishCutoverDate(LocalDate.from(temporal));
} } | public class class_name {
public static BritishCutoverDate from(TemporalAccessor temporal) {
if (temporal instanceof BritishCutoverDate) {
return (BritishCutoverDate) temporal; // depends on control dependency: [if], data = [none]
}
return new BritishCutoverDate(LocalDate.from(temporal));
} } |
public class class_name {
public void fromJson(JSONObject json) {
super.fromJson(json);
if (json.containsKey(CHANGE)) {
change = new Change(json.getJSONObject(CHANGE));
}
if (json.containsKey(PATCH_SET)) {
patchSet = new PatchSet(json.getJSONObject(PATCH_SET));
} else if (json.containsKey(PATCHSET)) {
patchSet = new PatchSet(json.getJSONObject(PATCHSET));
}
} } | public class class_name {
public void fromJson(JSONObject json) {
super.fromJson(json);
if (json.containsKey(CHANGE)) {
change = new Change(json.getJSONObject(CHANGE)); // depends on control dependency: [if], data = [none]
}
if (json.containsKey(PATCH_SET)) {
patchSet = new PatchSet(json.getJSONObject(PATCH_SET)); // depends on control dependency: [if], data = [none]
} else if (json.containsKey(PATCHSET)) {
patchSet = new PatchSet(json.getJSONObject(PATCHSET)); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
void grow(int neededSize) {
if (neededSize < 0) {
throw new IllegalArgumentException(
"Cannot grow BufferHolder by size " + neededSize + " because the size is negative");
}
if (neededSize > ARRAY_MAX - totalSize()) {
throw new IllegalArgumentException(
"Cannot grow BufferHolder by size " + neededSize + " because the size after growing " +
"exceeds size limitation " + ARRAY_MAX);
}
final int length = totalSize() + neededSize;
if (buffer.length < length) {
// This will not happen frequently, because the buffer is re-used.
int newLength = length < ARRAY_MAX / 2 ? length * 2 : ARRAY_MAX;
int roundedSize = ByteArrayMethods.roundNumberOfBytesToNearestWord(newLength);
final byte[] tmp = new byte[roundedSize];
Platform.copyMemory(
buffer,
Platform.BYTE_ARRAY_OFFSET,
tmp,
Platform.BYTE_ARRAY_OFFSET,
totalSize());
buffer = tmp;
row.pointTo(buffer, buffer.length);
}
} } | public class class_name {
void grow(int neededSize) {
if (neededSize < 0) {
throw new IllegalArgumentException(
"Cannot grow BufferHolder by size " + neededSize + " because the size is negative");
}
if (neededSize > ARRAY_MAX - totalSize()) {
throw new IllegalArgumentException(
"Cannot grow BufferHolder by size " + neededSize + " because the size after growing " +
"exceeds size limitation " + ARRAY_MAX);
}
final int length = totalSize() + neededSize;
if (buffer.length < length) {
// This will not happen frequently, because the buffer is re-used.
int newLength = length < ARRAY_MAX / 2 ? length * 2 : ARRAY_MAX;
int roundedSize = ByteArrayMethods.roundNumberOfBytesToNearestWord(newLength);
final byte[] tmp = new byte[roundedSize];
Platform.copyMemory(
buffer,
Platform.BYTE_ARRAY_OFFSET,
tmp,
Platform.BYTE_ARRAY_OFFSET,
totalSize()); // depends on control dependency: [if], data = [none]
buffer = tmp; // depends on control dependency: [if], data = [none]
row.pointTo(buffer, buffer.length); // depends on control dependency: [if], data = [length)]
}
} } |
public class class_name {
public void resetProperties() {
for ( Entry<ParameterType, Map<String, Object>> e : stores.entrySet()) {
e.getValue().clear();
}
} } | public class class_name {
public void resetProperties() {
for ( Entry<ParameterType, Map<String, Object>> e : stores.entrySet()) {
e.getValue().clear(); // depends on control dependency: [for], data = [e]
}
} } |
public class class_name {
private void onMembershipChange(ClusterMembershipEvent event) {
if (event.type() == ClusterMembershipEvent.Type.MEMBER_REMOVED) {
recoverTransactions(transactions.entrySet().iterator(), event.subject().id());
}
} } | public class class_name {
private void onMembershipChange(ClusterMembershipEvent event) {
if (event.type() == ClusterMembershipEvent.Type.MEMBER_REMOVED) {
recoverTransactions(transactions.entrySet().iterator(), event.subject().id()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private String getExtension(String fileName) {
if (fileName == null) {
return null;
}
String ext = null;
int i = fileName.lastIndexOf('.');
if (i > 0 && i < fileName.length() - 1) {
ext = fileName.substring(i + 1).toLowerCase();
}
return ext;
} } | public class class_name {
private String getExtension(String fileName) {
if (fileName == null) {
return null; // depends on control dependency: [if], data = [none]
}
String ext = null;
int i = fileName.lastIndexOf('.');
if (i > 0 && i < fileName.length() - 1) {
ext = fileName.substring(i + 1).toLowerCase(); // depends on control dependency: [if], data = [(i]
}
return ext;
} } |
public class class_name {
public boolean lastChild() {
if (mNode == null || mNode.getChildCount() < 1) {
return false;
}
for (int i = mNode.getChildCount() - 1; i >= 0; --i) {
AccessibilityNodeInfoCompat newNode = mNode.getChild(i);
if (newNode == null) {
return false;
}
if (AccessibilityNodeInfoUtils.isVisibleOrLegacy(newNode)) {
reset(newNode);
return true;
}
newNode.recycle();
}
return false;
} } | public class class_name {
public boolean lastChild() {
if (mNode == null || mNode.getChildCount() < 1) {
return false; // depends on control dependency: [if], data = [none]
}
for (int i = mNode.getChildCount() - 1; i >= 0; --i) {
AccessibilityNodeInfoCompat newNode = mNode.getChild(i);
if (newNode == null) {
return false; // depends on control dependency: [if], data = [none]
}
if (AccessibilityNodeInfoUtils.isVisibleOrLegacy(newNode)) {
reset(newNode); // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
newNode.recycle(); // depends on control dependency: [for], data = [none]
}
return false;
} } |
public class class_name {
private Result allSubTypes(List<PType> sub, List<PType> sup,
boolean invignore)
{
if (sub.size() != sup.size())
{
return Result.No;
} else
{
for (int i = 0; i < sub.size(); i++)
{
if (searchSubType(sub.get(i), sup.get(i), invignore) == Result.No)
{
return Result.No;
}
}
}
return Result.Yes;
} } | public class class_name {
private Result allSubTypes(List<PType> sub, List<PType> sup,
boolean invignore)
{
if (sub.size() != sup.size())
{
return Result.No; // depends on control dependency: [if], data = [none]
} else
{
for (int i = 0; i < sub.size(); i++)
{
if (searchSubType(sub.get(i), sup.get(i), invignore) == Result.No)
{
return Result.No; // depends on control dependency: [if], data = [none]
}
}
}
return Result.Yes;
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.