code
stringlengths 130
281k
| code_dependency
stringlengths 182
306k
|
---|---|
public class class_name {
@Override
public final FunctionType getSuperClassConstructor() {
checkArgument(isConstructor() || isInterface());
ObjectType maybeSuperInstanceType = getPrototype().getImplicitPrototype();
if (maybeSuperInstanceType == null) {
return null;
}
return maybeSuperInstanceType.getConstructor();
} } | public class class_name {
@Override
public final FunctionType getSuperClassConstructor() {
checkArgument(isConstructor() || isInterface());
ObjectType maybeSuperInstanceType = getPrototype().getImplicitPrototype();
if (maybeSuperInstanceType == null) {
return null; // depends on control dependency: [if], data = [none]
}
return maybeSuperInstanceType.getConstructor();
} } |
public class class_name {
public JsonModelCoder<T> doResolve(T obj) {
JsonModelCoder<T> coder = resolve(obj);
if (coder == null) {
throw new NullPointerException("resolve method must return coder.");
}
return coder;
} } | public class class_name {
public JsonModelCoder<T> doResolve(T obj) {
JsonModelCoder<T> coder = resolve(obj);
if (coder == null) {
throw new NullPointerException("resolve method must return coder."); // depends on control dependency: [if], data = [none]
}
return coder;
} } |
public class class_name {
private LinkedHashSet<Formula> gatherAppliedOperands(final NAryOperator operator) {
final LinkedHashSet<Formula> applied = new LinkedHashSet<>();
for (final Formula operand : operator) {
applied.add(apply(operand, false));
}
return applied;
} } | public class class_name {
private LinkedHashSet<Formula> gatherAppliedOperands(final NAryOperator operator) {
final LinkedHashSet<Formula> applied = new LinkedHashSet<>();
for (final Formula operand : operator) {
applied.add(apply(operand, false)); // depends on control dependency: [for], data = [operand]
}
return applied;
} } |
public class class_name {
private static boolean merge(final ClassWriter cw, int t,
final int[] types, final int index) {
int u = types[index];
if (u == t) {
// if the types are equal, merge(u,t)=u, so there is no change
return false;
}
if ((t & ~DIM) == NULL) {
if (u == NULL) {
return false;
}
t = NULL;
}
if (u == 0) {
// if types[index] has never been assigned, merge(u,t)=t
types[index] = t;
return true;
}
int v;
if ((u & BASE_KIND) == OBJECT || (u & DIM) != 0) {
// if u is a reference type of any dimension
if (t == NULL) {
// if t is the NULL type, merge(u,t)=u, so there is no change
return false;
} else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) {
// if t and u have the same dimension and same base kind
if ((u & BASE_KIND) == OBJECT) {
// if t is also a reference type, and if u and t have the
// same dimension merge(u,t) = dim(t) | common parent of the
// element types of u and t
v = (t & DIM) | OBJECT
| cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE);
} else {
// if u and t are array types, but not with the same element
// type, merge(u,t) = dim(u) - 1 | java/lang/Object
int vdim = ELEMENT_OF + (u & DIM);
v = vdim | OBJECT | cw.addType("java/lang/Object");
}
} else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) {
// if t is any other reference or array type, the merged type
// is min(udim, tdim) | java/lang/Object, where udim is the
// array dimension of u, minus 1 if u is an array type with a
// primitive element type (and similarly for tdim).
int tdim = (((t & DIM) == 0 || (t & BASE_KIND) == OBJECT) ? 0
: ELEMENT_OF) + (t & DIM);
int udim = (((u & DIM) == 0 || (u & BASE_KIND) == OBJECT) ? 0
: ELEMENT_OF) + (u & DIM);
v = Math.min(tdim, udim) | OBJECT
| cw.addType("java/lang/Object");
} else {
// if t is any other type, merge(u,t)=TOP
v = TOP;
}
} else if (u == NULL) {
// if u is the NULL type, merge(u,t)=t,
// or TOP if t is not a reference type
v = (t & BASE_KIND) == OBJECT || (t & DIM) != 0 ? t : TOP;
} else {
// if u is any other type, merge(u,t)=TOP whatever t
v = TOP;
}
if (u != v) {
types[index] = v;
return true;
}
return false;
} } | public class class_name {
private static boolean merge(final ClassWriter cw, int t,
final int[] types, final int index) {
int u = types[index];
if (u == t) {
// if the types are equal, merge(u,t)=u, so there is no change
return false; // depends on control dependency: [if], data = [none]
}
if ((t & ~DIM) == NULL) {
if (u == NULL) {
return false; // depends on control dependency: [if], data = [none]
}
t = NULL; // depends on control dependency: [if], data = [none]
}
if (u == 0) {
// if types[index] has never been assigned, merge(u,t)=t
types[index] = t; // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
int v;
if ((u & BASE_KIND) == OBJECT || (u & DIM) != 0) {
// if u is a reference type of any dimension
if (t == NULL) {
// if t is the NULL type, merge(u,t)=u, so there is no change
return false; // depends on control dependency: [if], data = [none]
} else if ((t & (DIM | BASE_KIND)) == (u & (DIM | BASE_KIND))) {
// if t and u have the same dimension and same base kind
if ((u & BASE_KIND) == OBJECT) {
// if t is also a reference type, and if u and t have the
// same dimension merge(u,t) = dim(t) | common parent of the
// element types of u and t
v = (t & DIM) | OBJECT
| cw.getMergedType(t & BASE_VALUE, u & BASE_VALUE); // depends on control dependency: [if], data = [none]
} else {
// if u and t are array types, but not with the same element
// type, merge(u,t) = dim(u) - 1 | java/lang/Object
int vdim = ELEMENT_OF + (u & DIM);
v = vdim | OBJECT | cw.addType("java/lang/Object"); // depends on control dependency: [if], data = [none]
}
} else if ((t & BASE_KIND) == OBJECT || (t & DIM) != 0) {
// if t is any other reference or array type, the merged type
// is min(udim, tdim) | java/lang/Object, where udim is the
// array dimension of u, minus 1 if u is an array type with a
// primitive element type (and similarly for tdim).
int tdim = (((t & DIM) == 0 || (t & BASE_KIND) == OBJECT) ? 0
: ELEMENT_OF) + (t & DIM);
int udim = (((u & DIM) == 0 || (u & BASE_KIND) == OBJECT) ? 0
: ELEMENT_OF) + (u & DIM);
v = Math.min(tdim, udim) | OBJECT
| cw.addType("java/lang/Object"); // depends on control dependency: [if], data = [none]
} else {
// if t is any other type, merge(u,t)=TOP
v = TOP; // depends on control dependency: [if], data = [none]
}
} else if (u == NULL) {
// if u is the NULL type, merge(u,t)=t,
// or TOP if t is not a reference type
v = (t & BASE_KIND) == OBJECT || (t & DIM) != 0 ? t : TOP; // depends on control dependency: [if], data = [none]
} else {
// if u is any other type, merge(u,t)=TOP whatever t
v = TOP; // depends on control dependency: [if], data = [none]
}
if (u != v) {
types[index] = v; // depends on control dependency: [if], data = [none]
return true; // depends on control dependency: [if], data = [none]
}
return false;
} } |
public class class_name {
@Override
public CommerceNotificationTemplateUserSegmentRel fetchByCommerceNotificationTemplateId_Last(
long commerceNotificationTemplateId,
OrderByComparator<CommerceNotificationTemplateUserSegmentRel> orderByComparator) {
int count = countByCommerceNotificationTemplateId(commerceNotificationTemplateId);
if (count == 0) {
return null;
}
List<CommerceNotificationTemplateUserSegmentRel> list = findByCommerceNotificationTemplateId(commerceNotificationTemplateId,
count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0);
}
return null;
} } | public class class_name {
@Override
public CommerceNotificationTemplateUserSegmentRel fetchByCommerceNotificationTemplateId_Last(
long commerceNotificationTemplateId,
OrderByComparator<CommerceNotificationTemplateUserSegmentRel> orderByComparator) {
int count = countByCommerceNotificationTemplateId(commerceNotificationTemplateId);
if (count == 0) {
return null; // depends on control dependency: [if], data = [none]
}
List<CommerceNotificationTemplateUserSegmentRel> list = findByCommerceNotificationTemplateId(commerceNotificationTemplateId,
count - 1, count, orderByComparator);
if (!list.isEmpty()) {
return list.get(0); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public void activatedServiceWhichDefineEventAsInitial(ServiceComponent serviceComponent) {
// create new ordered set
SortedSet<ServiceComponent> activeServicesWhichDefineEventAsInitial = new TreeSet<ServiceComponent>(new ActiveServicesWhichDefineEventAsInitialComparator());
// add all existent active services, except old version, this allows smooth service upgrade
ServiceID oldVersion = serviceComponent.getOldVersion();
if (oldVersion == null) {
activeServicesWhichDefineEventAsInitial.addAll(this.activeServicesWhichDefineEventAsInitial);
}
else {
for (ServiceComponent existentServiceComponent : this.activeServicesWhichDefineEventAsInitial) {
if(!existentServiceComponent.getServiceID().equals(oldVersion)) {
activeServicesWhichDefineEventAsInitial.add(existentServiceComponent);
}
}
}
// add new service
activeServicesWhichDefineEventAsInitial.add(serviceComponent);
// replace old set
this.activeServicesWhichDefineEventAsInitial = activeServicesWhichDefineEventAsInitial;
} } | public class class_name {
public void activatedServiceWhichDefineEventAsInitial(ServiceComponent serviceComponent) {
// create new ordered set
SortedSet<ServiceComponent> activeServicesWhichDefineEventAsInitial = new TreeSet<ServiceComponent>(new ActiveServicesWhichDefineEventAsInitialComparator());
// add all existent active services, except old version, this allows smooth service upgrade
ServiceID oldVersion = serviceComponent.getOldVersion();
if (oldVersion == null) {
activeServicesWhichDefineEventAsInitial.addAll(this.activeServicesWhichDefineEventAsInitial);
// depends on control dependency: [if], data = [none]
}
else {
for (ServiceComponent existentServiceComponent : this.activeServicesWhichDefineEventAsInitial) {
if(!existentServiceComponent.getServiceID().equals(oldVersion)) {
activeServicesWhichDefineEventAsInitial.add(existentServiceComponent);
// depends on control dependency: [if], data = [none]
}
}
}
// add new service
activeServicesWhichDefineEventAsInitial.add(serviceComponent);
// replace old set
this.activeServicesWhichDefineEventAsInitial = activeServicesWhichDefineEventAsInitial;
} } |
public class class_name {
public int fullBlockSamplesAvailableToEncode() {
int available = 0;
int channels = streamConfig.getChannelCount();
for(BlockEncodeRequest ber : preparedRequests) {
int[] block = ber.samples;
available += block.length/channels;
}
return available;
} } | public class class_name {
public int fullBlockSamplesAvailableToEncode() {
int available = 0;
int channels = streamConfig.getChannelCount();
for(BlockEncodeRequest ber : preparedRequests) {
int[] block = ber.samples;
available += block.length/channels; // depends on control dependency: [for], data = [none]
}
return available;
} } |
public class class_name {
public void copyFrom(final Record source, final int[] sourcePositions, final int[] targetPositions) {
final int[] sourceOffsets = source.offsets;
final int[] sourceLengths = source.lengths;
final byte[] sourceBuffer = source.binaryData;
final Value[] sourceFields = source.writeFields;
boolean anyFieldIsBinary = false;
int maxFieldNum = 0;
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
final int targetFieldNum = targetPositions[i];
maxFieldNum = Math.max(targetFieldNum, maxFieldNum);
if (sourceOffset == NULL_INDICATOR_OFFSET) {
// set null on existing field (new fields are null by default)
if (targetFieldNum < numFields) {
internallySetField(targetFieldNum, null);
}
} else if (sourceOffset != MODIFIED_INDICATOR_OFFSET) {
anyFieldIsBinary = true;
}
}
if (numFields < maxFieldNum + 1) {
setNumFields(maxFieldNum + 1);
}
final int[] targetLengths = this.lengths;
final int[] targetOffsets = this.offsets;
// reserve space in binaryData for the binary source fields
if (anyFieldIsBinary) {
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
if (sourceOffset != MODIFIED_INDICATOR_OFFSET && sourceOffset != NULL_INDICATOR_OFFSET) {
final int targetFieldNum = targetPositions[i];
targetLengths[targetFieldNum] = sourceLengths[sourceFieldNum];
internallySetField(targetFieldNum, RESERVE_SPACE);
}
}
updateBinaryRepresenation();
}
final byte[] targetBuffer = this.binaryData;
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
final int targetFieldNum = targetPositions[i];
if (sourceOffset == MODIFIED_INDICATOR_OFFSET) {
internallySetField(targetFieldNum, sourceFields[sourceFieldNum]);
} else if (sourceOffset != NULL_INDICATOR_OFFSET) {
// bin-copy
final int targetOffset = targetOffsets[targetFieldNum];
final int length = targetLengths[targetFieldNum];
System.arraycopy(sourceBuffer, sourceOffset, targetBuffer, targetOffset, length);
}
}
} } | public class class_name {
public void copyFrom(final Record source, final int[] sourcePositions, final int[] targetPositions) {
final int[] sourceOffsets = source.offsets;
final int[] sourceLengths = source.lengths;
final byte[] sourceBuffer = source.binaryData;
final Value[] sourceFields = source.writeFields;
boolean anyFieldIsBinary = false;
int maxFieldNum = 0;
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
final int targetFieldNum = targetPositions[i];
maxFieldNum = Math.max(targetFieldNum, maxFieldNum); // depends on control dependency: [for], data = [none]
if (sourceOffset == NULL_INDICATOR_OFFSET) {
// set null on existing field (new fields are null by default)
if (targetFieldNum < numFields) {
internallySetField(targetFieldNum, null); // depends on control dependency: [if], data = [(targetFieldNum]
}
} else if (sourceOffset != MODIFIED_INDICATOR_OFFSET) {
anyFieldIsBinary = true; // depends on control dependency: [if], data = [none]
}
}
if (numFields < maxFieldNum + 1) {
setNumFields(maxFieldNum + 1); // depends on control dependency: [if], data = [maxFieldNum + 1)]
}
final int[] targetLengths = this.lengths;
final int[] targetOffsets = this.offsets;
// reserve space in binaryData for the binary source fields
if (anyFieldIsBinary) {
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
if (sourceOffset != MODIFIED_INDICATOR_OFFSET && sourceOffset != NULL_INDICATOR_OFFSET) {
final int targetFieldNum = targetPositions[i];
targetLengths[targetFieldNum] = sourceLengths[sourceFieldNum]; // depends on control dependency: [if], data = [none]
internallySetField(targetFieldNum, RESERVE_SPACE); // depends on control dependency: [if], data = [none]
}
}
updateBinaryRepresenation(); // depends on control dependency: [if], data = [none]
}
final byte[] targetBuffer = this.binaryData;
for (int i = 0; i < sourcePositions.length; i++) {
final int sourceFieldNum = sourcePositions[i];
final int sourceOffset = sourceOffsets[sourceFieldNum];
final int targetFieldNum = targetPositions[i];
if (sourceOffset == MODIFIED_INDICATOR_OFFSET) {
internallySetField(targetFieldNum, sourceFields[sourceFieldNum]); // depends on control dependency: [if], data = [none]
} else if (sourceOffset != NULL_INDICATOR_OFFSET) {
// bin-copy
final int targetOffset = targetOffsets[targetFieldNum];
final int length = targetLengths[targetFieldNum];
System.arraycopy(sourceBuffer, sourceOffset, targetBuffer, targetOffset, length); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
public void setRequiredServices(long requiredServices) {
lock.lock();
try {
this.requiredServices = requiredServices;
peerDiscoverers.clear();
addPeerDiscovery(MultiplexingDiscovery.forServices(params, requiredServices));
} finally {
lock.unlock();
}
} } | public class class_name {
public void setRequiredServices(long requiredServices) {
lock.lock();
try {
this.requiredServices = requiredServices; // depends on control dependency: [try], data = [none]
peerDiscoverers.clear(); // depends on control dependency: [try], data = [none]
addPeerDiscovery(MultiplexingDiscovery.forServices(params, requiredServices)); // depends on control dependency: [try], data = [none]
} finally {
lock.unlock();
}
} } |
public class class_name {
public final void makeAccentryMatchingReversed(
final Map<String, Object> pReqVars, final String pEntryId,
final BankStatementLine pBsl, final DateFormat pDateFormat,
final String pLangDef) throws Exception {
EBankEntryResultType resultRecordType = EBankEntryResultType.ACC_ENTRY;
EBankEntryResultAction resultAction = null;
String resultDescription = null;
Long resultRecordId = null;
AccountingEntry accent = getSrvOrm().retrieveEntityById(pReqVars,
AccountingEntry.class, Long.parseLong(pEntryId));
if (accent == null) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"cant_found_accentry");
}
if (accent.getSourceType() == 1010) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"AlreadyDone");
}
if (pBsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0 && pBsl
.getItsAmount().compareTo(accent.getDebit()) != 0
&& accent.getSubaccDebitType() != 2002 && !pBsl.getItsOwner()
.getBankAccount().getItsId().equals(accent.getSubaccDebitId())) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"record_is_not_matching");
} else if (pBsl.getItsAmount().compareTo(BigDecimal.ZERO) < 0 && pBsl
.getItsAmount().abs().compareTo(accent.getCredit()) != 0
&& accent.getSubaccCreditType() != 2002 && !pBsl.getItsOwner()
.getBankAccount().getItsId().equals(accent.getSubaccCreditId())) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"record_is_not_matching");
}
if (EBankEntryStatus.VOIDED.equals(pBsl.getItsStatus())) {
if (accent.getIdDatabaseBirth() != getSrvOrm().getIdDatabase()) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"can_not_change_foreign_src");
}
resultAction = EBankEntryResultAction.CREATE;
AccountingEntry reversed = accent;
accent = new AccountingEntry();
accent.setSourceType(pBsl.constTypeCode());
accent.setSourceId(pBsl.getItsId());
accent.setSourceDatabaseBirth(pBsl.getIdDatabaseBirth());
accent.setIdDatabaseBirth(reversed.getIdDatabaseBirth());
accent.setReversedId(reversed.getItsId());
accent.setReversedIdDatabaseBirth(reversed.getIdDatabaseBirth());
accent.setItsDate(new Date(reversed.getItsDate().getTime() + 1));
accent.setAccDebit(reversed.getAccDebit());
accent.setSubaccDebit(reversed.getSubaccDebit());
accent.setSubaccDebitId(reversed.getSubaccDebitId());
accent.setSubaccDebitType(reversed.getSubaccDebitType());
accent.setDebit(reversed.getDebit().negate());
accent.setAccCredit(reversed.getAccCredit());
accent.setSubaccCredit(reversed.getSubaccCredit());
accent.setSubaccCreditId(reversed.getSubaccCreditId());
accent.setSubaccCreditType(reversed.getSubaccCreditType());
accent.setCredit(reversed.getCredit().negate());
accent.setDescription(makeDescrForCreated(pBsl, pDateFormat, pLangDef)
+ " " + getSrvI18n().getMsg("reversed_n", pLangDef)
+ accent.getReversedIdDatabaseBirth() + "-" + accent.getReversedId());
getSrvOrm().insertEntity(pReqVars, accent);
accent.setIsNew(false);
String oldDesr = "";
if (reversed.getDescription() != null) {
oldDesr = reversed.getDescription();
}
reversed.setDescription(oldDesr + " " + getSrvI18n()
.getMsg("reversing_n", pLangDef) + accent.getIdDatabaseBirth() + "-"
+ accent.getItsId());
reversed.setReversedId(accent.getItsId());
reversed.setReversedIdDatabaseBirth(accent.getIdDatabaseBirth());
getSrvOrm().updateEntity(pReqVars, reversed);
} else {
resultAction = EBankEntryResultAction.MATCH;
}
resultRecordId = accent.getItsId();
resultDescription = makeBslResDescr(resultAction, pDateFormat, accent,
accent.getItsDate(), pLangDef);
pBsl.setResultAction(resultAction);
pBsl.setResultRecordType(resultRecordType);
pBsl.setResultRecordId(resultRecordId);
pBsl.setResultDescription(resultDescription);
getSrvOrm().updateEntity(pReqVars, pBsl);
} } | public class class_name {
public final void makeAccentryMatchingReversed(
final Map<String, Object> pReqVars, final String pEntryId,
final BankStatementLine pBsl, final DateFormat pDateFormat,
final String pLangDef) throws Exception {
EBankEntryResultType resultRecordType = EBankEntryResultType.ACC_ENTRY;
EBankEntryResultAction resultAction = null;
String resultDescription = null;
Long resultRecordId = null;
AccountingEntry accent = getSrvOrm().retrieveEntityById(pReqVars,
AccountingEntry.class, Long.parseLong(pEntryId));
if (accent == null) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"cant_found_accentry");
}
if (accent.getSourceType() == 1010) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"AlreadyDone");
}
if (pBsl.getItsAmount().compareTo(BigDecimal.ZERO) > 0 && pBsl
.getItsAmount().compareTo(accent.getDebit()) != 0
&& accent.getSubaccDebitType() != 2002 && !pBsl.getItsOwner()
.getBankAccount().getItsId().equals(accent.getSubaccDebitId())) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"record_is_not_matching");
} else if (pBsl.getItsAmount().compareTo(BigDecimal.ZERO) < 0 && pBsl
.getItsAmount().abs().compareTo(accent.getCredit()) != 0
&& accent.getSubaccCreditType() != 2002 && !pBsl.getItsOwner()
.getBankAccount().getItsId().equals(accent.getSubaccCreditId())) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"record_is_not_matching");
}
if (EBankEntryStatus.VOIDED.equals(pBsl.getItsStatus())) {
if (accent.getIdDatabaseBirth() != getSrvOrm().getIdDatabase()) {
throw new ExceptionWithCode(ExceptionWithCode.WRONG_PARAMETER,
"can_not_change_foreign_src");
}
resultAction = EBankEntryResultAction.CREATE;
AccountingEntry reversed = accent;
accent = new AccountingEntry();
accent.setSourceType(pBsl.constTypeCode());
accent.setSourceId(pBsl.getItsId());
accent.setSourceDatabaseBirth(pBsl.getIdDatabaseBirth());
accent.setIdDatabaseBirth(reversed.getIdDatabaseBirth());
accent.setReversedId(reversed.getItsId());
accent.setReversedIdDatabaseBirth(reversed.getIdDatabaseBirth());
accent.setItsDate(new Date(reversed.getItsDate().getTime() + 1));
accent.setAccDebit(reversed.getAccDebit());
accent.setSubaccDebit(reversed.getSubaccDebit());
accent.setSubaccDebitId(reversed.getSubaccDebitId());
accent.setSubaccDebitType(reversed.getSubaccDebitType());
accent.setDebit(reversed.getDebit().negate());
accent.setAccCredit(reversed.getAccCredit());
accent.setSubaccCredit(reversed.getSubaccCredit());
accent.setSubaccCreditId(reversed.getSubaccCreditId());
accent.setSubaccCreditType(reversed.getSubaccCreditType());
accent.setCredit(reversed.getCredit().negate());
accent.setDescription(makeDescrForCreated(pBsl, pDateFormat, pLangDef)
+ " " + getSrvI18n().getMsg("reversed_n", pLangDef)
+ accent.getReversedIdDatabaseBirth() + "-" + accent.getReversedId());
getSrvOrm().insertEntity(pReqVars, accent);
accent.setIsNew(false);
String oldDesr = "";
if (reversed.getDescription() != null) {
oldDesr = reversed.getDescription(); // depends on control dependency: [if], data = [none]
}
reversed.setDescription(oldDesr + " " + getSrvI18n()
.getMsg("reversing_n", pLangDef) + accent.getIdDatabaseBirth() + "-"
+ accent.getItsId());
reversed.setReversedId(accent.getItsId());
reversed.setReversedIdDatabaseBirth(accent.getIdDatabaseBirth());
getSrvOrm().updateEntity(pReqVars, reversed);
} else {
resultAction = EBankEntryResultAction.MATCH;
}
resultRecordId = accent.getItsId();
resultDescription = makeBslResDescr(resultAction, pDateFormat, accent,
accent.getItsDate(), pLangDef);
pBsl.setResultAction(resultAction);
pBsl.setResultRecordType(resultRecordType);
pBsl.setResultRecordId(resultRecordId);
pBsl.setResultDescription(resultDescription);
getSrvOrm().updateEntity(pReqVars, pBsl);
} } |
public class class_name {
public WebEndpointResponse<Health> mapDetails(Supplier<Health> health,
SecurityContext securityContext) {
if (canSeeDetails(securityContext, this.showDetails)) {
Health healthDetails = health.get();
if (healthDetails != null) {
return createWebEndpointResponse(healthDetails);
}
}
return new WebEndpointResponse<>(WebEndpointResponse.STATUS_NOT_FOUND);
} } | public class class_name {
public WebEndpointResponse<Health> mapDetails(Supplier<Health> health,
SecurityContext securityContext) {
if (canSeeDetails(securityContext, this.showDetails)) {
Health healthDetails = health.get();
if (healthDetails != null) {
return createWebEndpointResponse(healthDetails); // depends on control dependency: [if], data = [(healthDetails]
}
}
return new WebEndpointResponse<>(WebEndpointResponse.STATUS_NOT_FOUND);
} } |
public class class_name {
public static int intersectConvex(FastQueue<Point3D_F64> polygon,
LineParametric3D_F64 line , Point3D_F64 output,
Vector3D_F64 n, Vector3D_F64 u, Vector3D_F64 v, Vector3D_F64 w0) {
if( polygon.size < 3 )
throw new IllegalArgumentException("There must be 3 or more points");
double r, a, b; // params to calc ray-plane intersect
Point3D_F64 v0 = polygon.get(0);
Point3D_F64 v1 = polygon.get(1);
Point3D_F64 v2 = polygon.get(2);
// get triangle edge vectors and plane normal
u.minus(v1,v0); // NOTE: these could be precomputed
v.minus(v2,v0);
n.cross(u,v);
if ( n.normSq() == 0 ) // triangle is degenerate
return -1; // do not deal with this case
Vector3D_F64 dir = line.slope;
w0.minus(line.p,v0);
a = -n.dot(w0);
b = n.dot(dir);
if (Math.abs(b) < GrlConstants.EPS) { // ray is parallel to triangle plane
if (a == 0) // ray lies in triangle plane
return 2;
else return 0; // ray disjoint from plane
}
// get intersect point of ray with triangle plane
r = a / b;
// intersect point of ray and plane
output.x = line.p.x + r*dir.x;
output.y = line.p.y + r*dir.y;
output.z = line.p.z + r*dir.z;
// See if it's inside any of the triangles
for (int i = 2; i < polygon.size; i++) {
// is I inside T?
if (containedPlane(v0, output, u, v, w0)) {
if (r >= 0)
return 1;
else
return 3;
}
if (i < polygon.size - 1) {
u.minus(polygon.get(i), v0);
v.minus(polygon.get(i+1), v0);
}
}
return 0;
} } | public class class_name {
public static int intersectConvex(FastQueue<Point3D_F64> polygon,
LineParametric3D_F64 line , Point3D_F64 output,
Vector3D_F64 n, Vector3D_F64 u, Vector3D_F64 v, Vector3D_F64 w0) {
if( polygon.size < 3 )
throw new IllegalArgumentException("There must be 3 or more points");
double r, a, b; // params to calc ray-plane intersect
Point3D_F64 v0 = polygon.get(0);
Point3D_F64 v1 = polygon.get(1);
Point3D_F64 v2 = polygon.get(2);
// get triangle edge vectors and plane normal
u.minus(v1,v0); // NOTE: these could be precomputed
v.minus(v2,v0);
n.cross(u,v);
if ( n.normSq() == 0 ) // triangle is degenerate
return -1; // do not deal with this case
Vector3D_F64 dir = line.slope;
w0.minus(line.p,v0);
a = -n.dot(w0);
b = n.dot(dir);
if (Math.abs(b) < GrlConstants.EPS) { // ray is parallel to triangle plane
if (a == 0) // ray lies in triangle plane
return 2;
else return 0; // ray disjoint from plane
}
// get intersect point of ray with triangle plane
r = a / b;
// intersect point of ray and plane
output.x = line.p.x + r*dir.x;
output.y = line.p.y + r*dir.y;
output.z = line.p.z + r*dir.z;
// See if it's inside any of the triangles
for (int i = 2; i < polygon.size; i++) {
// is I inside T?
if (containedPlane(v0, output, u, v, w0)) {
if (r >= 0)
return 1;
else
return 3;
}
if (i < polygon.size - 1) {
u.minus(polygon.get(i), v0); // depends on control dependency: [if], data = [(i]
v.minus(polygon.get(i+1), v0); // depends on control dependency: [if], data = [(i]
}
}
return 0;
} } |
public class class_name {
public JvmAnnotationReference removeAnnotation(/* @NonNull */ JvmAnnotationTarget annotationTarget, /* @NonNull */ Class<? extends Annotation> type) {
JvmAnnotationReference result = findAnnotation(annotationTarget, type);
if (result != null) {
annotationTarget.getAnnotations().remove(result);
return result;
}
return null;
} } | public class class_name {
public JvmAnnotationReference removeAnnotation(/* @NonNull */ JvmAnnotationTarget annotationTarget, /* @NonNull */ Class<? extends Annotation> type) {
JvmAnnotationReference result = findAnnotation(annotationTarget, type);
if (result != null) {
annotationTarget.getAnnotations().remove(result); // depends on control dependency: [if], data = [(result]
return result; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public boolean the(Condition condition, Ticker ticker) {
try {
poll(ticker, condition);
return true;
} catch (PollTimeoutException ignored) {
return false;
}
} } | public class class_name {
public boolean the(Condition condition, Ticker ticker) {
try {
poll(ticker, condition); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (PollTimeoutException ignored) {
return false;
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public JobType addJobNotificationSubscriptionType(
JobNotificationSubscriptionType jobNotificationSubscription) {
if (this.jobNotificationSubscriptionTypes == null) {
this.jobNotificationSubscriptionTypes = new ArrayList<JobNotificationSubscriptionType>();
}
this.jobNotificationSubscriptionTypes.add(jobNotificationSubscription);
return this;
} } | public class class_name {
public JobType addJobNotificationSubscriptionType(
JobNotificationSubscriptionType jobNotificationSubscription) {
if (this.jobNotificationSubscriptionTypes == null) {
this.jobNotificationSubscriptionTypes = new ArrayList<JobNotificationSubscriptionType>(); // depends on control dependency: [if], data = [none]
}
this.jobNotificationSubscriptionTypes.add(jobNotificationSubscription);
return this;
} } |
public class class_name {
public static final void replace(char[] array, char toBeReplaced,
char replacementChar)
{
if (toBeReplaced != replacementChar)
{
for (int i = 0, max = array.length; i < max; i++)
{
if (array[i] == toBeReplaced)
{
array[i] = replacementChar;
}
}
}
} } | public class class_name {
public static final void replace(char[] array, char toBeReplaced,
char replacementChar)
{
if (toBeReplaced != replacementChar)
{
for (int i = 0, max = array.length; i < max; i++)
{
if (array[i] == toBeReplaced)
{
array[i] = replacementChar; // depends on control dependency: [if], data = [none]
}
}
}
} } |
public class class_name {
public int compareTo (FloatBuffer otherBuffer) {
int compareRemaining = (remaining() < otherBuffer.remaining()) ?
remaining() : otherBuffer.remaining();
int thisPos = position;
int otherPos = otherBuffer.position;
// BEGIN android-changed
float thisFloat, otherFloat;
while (compareRemaining > 0) {
thisFloat = get(thisPos);
otherFloat = otherBuffer.get(otherPos);
// checks for float and NaN inequality
if ((thisFloat != otherFloat) &&
((thisFloat == thisFloat) || (otherFloat == otherFloat))) {
return thisFloat < otherFloat ? -1 : 1;
}
thisPos++;
otherPos++;
compareRemaining--;
}
// END android-changed
return remaining() - otherBuffer.remaining();
} } | public class class_name {
public int compareTo (FloatBuffer otherBuffer) {
int compareRemaining = (remaining() < otherBuffer.remaining()) ?
remaining() : otherBuffer.remaining();
int thisPos = position;
int otherPos = otherBuffer.position;
// BEGIN android-changed
float thisFloat, otherFloat;
while (compareRemaining > 0) {
thisFloat = get(thisPos); // depends on control dependency: [while], data = [none]
otherFloat = otherBuffer.get(otherPos); // depends on control dependency: [while], data = [none]
// checks for float and NaN inequality
if ((thisFloat != otherFloat) &&
((thisFloat == thisFloat) || (otherFloat == otherFloat))) {
return thisFloat < otherFloat ? -1 : 1; // depends on control dependency: [if], data = [none]
}
thisPos++; // depends on control dependency: [while], data = [none]
otherPos++; // depends on control dependency: [while], data = [none]
compareRemaining--; // depends on control dependency: [while], data = [none]
}
// END android-changed
return remaining() - otherBuffer.remaining();
} } |
public class class_name {
public void displayed(double seconds) {
double end = System.currentTimeMillis() + (seconds * 1000);
try {
double timeTook = elementPresent(seconds);
WebDriverWait wait = new WebDriverWait(element.getDriver(), (long) (seconds - timeTook), DEFAULT_POLLING_INTERVAL);
wait.until(ExpectedConditions.visibilityOfElementLocated(element.defineByElement()));
timeTook = Math.min((seconds * 1000) - (end - System.currentTimeMillis()), seconds * 1000) / 1000;
checkDisplayed(seconds, timeTook);
} catch (TimeoutException e) {
checkDisplayed(seconds, seconds);
}
} } | public class class_name {
public void displayed(double seconds) {
double end = System.currentTimeMillis() + (seconds * 1000);
try {
double timeTook = elementPresent(seconds);
WebDriverWait wait = new WebDriverWait(element.getDriver(), (long) (seconds - timeTook), DEFAULT_POLLING_INTERVAL);
wait.until(ExpectedConditions.visibilityOfElementLocated(element.defineByElement())); // depends on control dependency: [try], data = [none]
timeTook = Math.min((seconds * 1000) - (end - System.currentTimeMillis()), seconds * 1000) / 1000; // depends on control dependency: [try], data = [none]
checkDisplayed(seconds, timeTook); // depends on control dependency: [try], data = [none]
} catch (TimeoutException e) {
checkDisplayed(seconds, seconds);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@SuppressWarnings("unchecked")
private Set<String> getResourcePaths() {
Set<String> resources = new HashSet<String>();
for (Component c : m_resources) {
if (c instanceof CmsRemovableFormRow<?>) {
String value = ((CmsRemovableFormRow<CmsPathSelectField>)c).getInput().getValue();
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(value)) {
resources.add(value);
}
}
}
return resources;
} } | public class class_name {
@SuppressWarnings("unchecked")
private Set<String> getResourcePaths() {
Set<String> resources = new HashSet<String>();
for (Component c : m_resources) {
if (c instanceof CmsRemovableFormRow<?>) {
String value = ((CmsRemovableFormRow<CmsPathSelectField>)c).getInput().getValue();
if (CmsStringUtil.isNotEmptyOrWhitespaceOnly(value)) {
resources.add(value); // depends on control dependency: [if], data = [none]
}
}
}
return resources;
} } |
public class class_name {
private void createNodePaths() {
TreeTraverser<OWLClassContainer> traverser =
new TreeTraverser<OWLClassContainer>() {
@Override
public Iterable<OWLClassContainer> children(OWLClassContainer container) {
int count = 0;
List<OWLClassContainer> containers = new ArrayList<>();
for (OWLClass childClass : loader.getChildClass(container.getOwlClass())) {
containers.add(
new OWLClassContainer(
childClass, constructNodePath(container.getNodePath(), count), false));
count++;
}
return containers;
}
};
OWLClass pseudoRootClass = loader.createClass(PSEUDO_ROOT_CLASS_LABEL, loader.getRootClasses());
for (OWLClassContainer container :
traverser.preOrderTraversal(
new OWLClassContainer(pseudoRootClass, PSEUDO_ROOT_CLASS_NODEPATH, true))) {
OWLClass ontologyTerm = container.getOwlClass();
String ontologyTermNodePath = container.getNodePath();
String ontologyTermIRI = ontologyTerm.getIRI().toString();
OntologyTermNodePath nodePathEntity = createNodePathEntity(container, ontologyTermNodePath);
nodePathsPerOntologyTerm.put(ontologyTermIRI, nodePathEntity);
}
} } | public class class_name {
private void createNodePaths() {
TreeTraverser<OWLClassContainer> traverser =
new TreeTraverser<OWLClassContainer>() {
@Override
public Iterable<OWLClassContainer> children(OWLClassContainer container) {
int count = 0;
List<OWLClassContainer> containers = new ArrayList<>();
for (OWLClass childClass : loader.getChildClass(container.getOwlClass())) {
containers.add(
new OWLClassContainer(
childClass, constructNodePath(container.getNodePath(), count), false)); // depends on control dependency: [for], data = [none]
count++; // depends on control dependency: [for], data = [none]
}
return containers;
}
};
OWLClass pseudoRootClass = loader.createClass(PSEUDO_ROOT_CLASS_LABEL, loader.getRootClasses());
for (OWLClassContainer container :
traverser.preOrderTraversal(
new OWLClassContainer(pseudoRootClass, PSEUDO_ROOT_CLASS_NODEPATH, true))) {
OWLClass ontologyTerm = container.getOwlClass();
String ontologyTermNodePath = container.getNodePath();
String ontologyTermIRI = ontologyTerm.getIRI().toString();
OntologyTermNodePath nodePathEntity = createNodePathEntity(container, ontologyTermNodePath);
nodePathsPerOntologyTerm.put(ontologyTermIRI, nodePathEntity); // depends on control dependency: [for], data = [none]
}
} } |
public class class_name {
public boolean removePredecessor(Task targetTask, RelationType type, Duration lag)
{
boolean matchFound = false;
//
// Retrieve the list of predecessors
//
List<Relation> predecessorList = getPredecessors();
if (!predecessorList.isEmpty())
{
//
// Ensure that we have a valid lag duration
//
if (lag == null)
{
lag = Duration.getInstance(0, TimeUnit.DAYS);
}
//
// Ensure that there is a predecessor relationship between
// these two tasks, and remove it.
//
matchFound = removeRelation(predecessorList, targetTask, type, lag);
//
// If we have removed a predecessor, then we must remove the
// corresponding successor entry from the target task list
//
if (matchFound)
{
//
// Retrieve the list of successors
//
List<Relation> successorList = targetTask.getSuccessors();
if (!successorList.isEmpty())
{
//
// Ensure that there is a successor relationship between
// these two tasks, and remove it.
//
removeRelation(successorList, this, type, lag);
}
}
}
return matchFound;
} } | public class class_name {
public boolean removePredecessor(Task targetTask, RelationType type, Duration lag)
{
boolean matchFound = false;
//
// Retrieve the list of predecessors
//
List<Relation> predecessorList = getPredecessors();
if (!predecessorList.isEmpty())
{
//
// Ensure that we have a valid lag duration
//
if (lag == null)
{
lag = Duration.getInstance(0, TimeUnit.DAYS); // depends on control dependency: [if], data = [none]
}
//
// Ensure that there is a predecessor relationship between
// these two tasks, and remove it.
//
matchFound = removeRelation(predecessorList, targetTask, type, lag); // depends on control dependency: [if], data = [none]
//
// If we have removed a predecessor, then we must remove the
// corresponding successor entry from the target task list
//
if (matchFound)
{
//
// Retrieve the list of successors
//
List<Relation> successorList = targetTask.getSuccessors();
if (!successorList.isEmpty())
{
//
// Ensure that there is a successor relationship between
// these two tasks, and remove it.
//
removeRelation(successorList, this, type, lag); // depends on control dependency: [if], data = [none]
}
}
}
return matchFound;
} } |
public class class_name {
public void addFilter(PropFilter filter)
{
if (mFilter == null)
{
mFilter = new ArrayList<PropFilter>(8);
}
mFilter.add(filter);
} } | public class class_name {
public void addFilter(PropFilter filter)
{
if (mFilter == null)
{
mFilter = new ArrayList<PropFilter>(8); // depends on control dependency: [if], data = [none]
}
mFilter.add(filter);
} } |
public class class_name {
public void put(Short value) {
if (value == null) {
this.compiledStatement.bindNull(compiledStatementBindIndex++);
} else {
compiledStatement.bindLong(compiledStatementBindIndex++, (short) value);
}
} } | public class class_name {
public void put(Short value) {
if (value == null) {
this.compiledStatement.bindNull(compiledStatementBindIndex++); // depends on control dependency: [if], data = [none]
} else {
compiledStatement.bindLong(compiledStatementBindIndex++, (short) value); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
private void joinTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map<String, Object> data,
TableItem tableItem, Map<String, Object> esFieldData) {
ESMapping mapping = config.getEsMapping();
Map<String, Object> paramsTmp = new LinkedHashMap<>();
for (Map.Entry<FieldItem, List<FieldItem>> entry : tableItem.getRelationTableFields().entrySet()) {
for (FieldItem fieldItem : entry.getValue()) {
if (fieldItem.getColumnItems().size() == 1) {
Object value = esTemplate.getValFromData(mapping,
data,
fieldItem.getFieldName(),
entry.getKey().getColumn().getColumnName());
String fieldName = fieldItem.getFieldName();
// 判断是否是主键
if (fieldName.equals(mapping.get_id())) {
fieldName = "_id";
}
paramsTmp.put(fieldName, value);
}
}
}
if (logger.isDebugEnabled()) {
logger.trace("Join table update es index by foreign key, destination:{}, table: {}, index: {}",
config.getDestination(),
dml.getTable(),
mapping.get_index());
}
esTemplate.updateByQuery(config, paramsTmp, esFieldData);
} } | public class class_name {
private void joinTableSimpleFieldOperation(ESSyncConfig config, Dml dml, Map<String, Object> data,
TableItem tableItem, Map<String, Object> esFieldData) {
ESMapping mapping = config.getEsMapping();
Map<String, Object> paramsTmp = new LinkedHashMap<>();
for (Map.Entry<FieldItem, List<FieldItem>> entry : tableItem.getRelationTableFields().entrySet()) {
for (FieldItem fieldItem : entry.getValue()) {
if (fieldItem.getColumnItems().size() == 1) {
Object value = esTemplate.getValFromData(mapping,
data,
fieldItem.getFieldName(),
entry.getKey().getColumn().getColumnName());
String fieldName = fieldItem.getFieldName();
// 判断是否是主键
if (fieldName.equals(mapping.get_id())) {
fieldName = "_id"; // depends on control dependency: [if], data = [none]
}
paramsTmp.put(fieldName, value); // depends on control dependency: [if], data = [none]
}
}
}
if (logger.isDebugEnabled()) {
logger.trace("Join table update es index by foreign key, destination:{}, table: {}, index: {}",
config.getDestination(),
dml.getTable(),
mapping.get_index()); // depends on control dependency: [if], data = [none]
}
esTemplate.updateByQuery(config, paramsTmp, esFieldData);
} } |
public class class_name {
public boolean countCheck() {
i.incrementAndGet();
if (i.get() > freq) {
target = r.nextInt(freq);
i.set(0);
}
return i.get() == target;
} } | public class class_name {
public boolean countCheck() {
i.incrementAndGet();
if (i.get() > freq) {
target = r.nextInt(freq); // depends on control dependency: [if], data = [freq)]
i.set(0); // depends on control dependency: [if], data = [none]
}
return i.get() == target;
} } |
public class class_name {
@Override
public double getSortProcessingRate(long currentTime) {
long timeSpentSorting = 0;
float progress = 0;
Phase phase = getPhase();
long sortFinishTime = getSortFinishTime();
long shuffleFinishTime = getShuffleFinishTime();
if (phase == Phase.SHUFFLE ) {
return 0;
} else if (getPhase() == Phase.SORT) {
if (shuffleFinishTime < currentTime) {
LOG.error("Shuffle finish time is " + shuffleFinishTime +
" which is < current time " + currentTime +
" in " + this.getTaskID());
}
timeSpentSorting = currentTime - shuffleFinishTime;
progress = getProgress() - (float)1.0/3;
if (progress < 0) {
LOG.error("Shuffle progress calculated to be " + progress +
" in task status for " + this.getTaskID() + ". Settings to 0");
progress = 0;
}
} else if (getPhase() == Phase.REDUCE) {
// when it is reduce phase, use 33%/(sort finish time - shuffle
// finish time as the progress rate. Using percentages instead of bytes
// as it is tricky
progress = (float)1.0/3;
if (shuffleFinishTime <= sortFinishTime) {
LOG.error("Shuffle finish fime is " + shuffleFinishTime +
" which is <= sort finish time " + sortFinishTime +
" in " + this.getTaskID());
return 0;
}
timeSpentSorting = sortFinishTime - shuffleFinishTime;
}
sortProcessingRate = progress/timeSpentSorting;
return sortProcessingRate;
} } | public class class_name {
@Override
public double getSortProcessingRate(long currentTime) {
long timeSpentSorting = 0;
float progress = 0;
Phase phase = getPhase();
long sortFinishTime = getSortFinishTime();
long shuffleFinishTime = getShuffleFinishTime();
if (phase == Phase.SHUFFLE ) {
return 0; // depends on control dependency: [if], data = [none]
} else if (getPhase() == Phase.SORT) {
if (shuffleFinishTime < currentTime) {
LOG.error("Shuffle finish time is " + shuffleFinishTime +
" which is < current time " + currentTime +
" in " + this.getTaskID()); // depends on control dependency: [if], data = [none]
}
timeSpentSorting = currentTime - shuffleFinishTime; // depends on control dependency: [if], data = [none]
progress = getProgress() - (float)1.0/3; // depends on control dependency: [if], data = [none]
if (progress < 0) {
LOG.error("Shuffle progress calculated to be " + progress +
" in task status for " + this.getTaskID() + ". Settings to 0"); // depends on control dependency: [if], data = [none]
progress = 0; // depends on control dependency: [if], data = [none]
}
} else if (getPhase() == Phase.REDUCE) {
// when it is reduce phase, use 33%/(sort finish time - shuffle
// finish time as the progress rate. Using percentages instead of bytes
// as it is tricky
progress = (float)1.0/3; // depends on control dependency: [if], data = [none]
if (shuffleFinishTime <= sortFinishTime) {
LOG.error("Shuffle finish fime is " + shuffleFinishTime +
" which is <= sort finish time " + sortFinishTime +
" in " + this.getTaskID()); // depends on control dependency: [if], data = [none]
return 0; // depends on control dependency: [if], data = [none]
}
timeSpentSorting = sortFinishTime - shuffleFinishTime; // depends on control dependency: [if], data = [none]
}
sortProcessingRate = progress/timeSpentSorting;
return sortProcessingRate;
} } |
public class class_name {
public Integer indexOf(Record o) {
if(o!=null) {
for(Map.Entry<Integer, Record> e : entries()) {
Integer rId = e.getKey();
Record r = e.getValue();
if(o.equals(r)) {
return rId;
}
}
}
return null;
} } | public class class_name {
public Integer indexOf(Record o) {
if(o!=null) {
for(Map.Entry<Integer, Record> e : entries()) {
Integer rId = e.getKey();
Record r = e.getValue();
if(o.equals(r)) {
return rId; // depends on control dependency: [if], data = [none]
}
}
}
return null;
} } |
public class class_name {
public void marshall(DeleteGraphqlApiRequest deleteGraphqlApiRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteGraphqlApiRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteGraphqlApiRequest.getApiId(), APIID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(DeleteGraphqlApiRequest deleteGraphqlApiRequest, ProtocolMarshaller protocolMarshaller) {
if (deleteGraphqlApiRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(deleteGraphqlApiRequest.getApiId(), APIID_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public Observable<ServiceResponse<Page<RecommendationInner>>> listRecommendedRulesForWebAppSinglePageAsync(final String resourceGroupName, final String siteName, final Boolean featured, final String filter) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (siteName == null) {
throw new IllegalArgumentException("Parameter siteName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listRecommendedRulesForWebApp(resourceGroupName, siteName, this.client.subscriptionId(), featured, filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RecommendationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<RecommendationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<RecommendationInner>> result = listRecommendedRulesForWebAppDelegate(response);
return Observable.just(new ServiceResponse<Page<RecommendationInner>>(result.body(), result.response()));
} catch (Throwable t) {
return Observable.error(t);
}
}
});
} } | public class class_name {
public Observable<ServiceResponse<Page<RecommendationInner>>> listRecommendedRulesForWebAppSinglePageAsync(final String resourceGroupName, final String siteName, final Boolean featured, final String filter) {
if (resourceGroupName == null) {
throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null.");
}
if (siteName == null) {
throw new IllegalArgumentException("Parameter siteName is required and cannot be null.");
}
if (this.client.subscriptionId() == null) {
throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null.");
}
if (this.client.apiVersion() == null) {
throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null.");
}
return service.listRecommendedRulesForWebApp(resourceGroupName, siteName, this.client.subscriptionId(), featured, filter, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent())
.flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<RecommendationInner>>>>() {
@Override
public Observable<ServiceResponse<Page<RecommendationInner>>> call(Response<ResponseBody> response) {
try {
ServiceResponse<PageImpl<RecommendationInner>> result = listRecommendedRulesForWebAppDelegate(response);
return Observable.just(new ServiceResponse<Page<RecommendationInner>>(result.body(), result.response())); // depends on control dependency: [try], data = [none]
} catch (Throwable t) {
return Observable.error(t);
} // depends on control dependency: [catch], data = [none]
}
});
} } |
public class class_name {
public static Type getReturnType(ExpressionTree expressionTree) {
if (expressionTree instanceof JCFieldAccess) {
JCFieldAccess methodCall = (JCFieldAccess) expressionTree;
return methodCall.type.getReturnType();
} else if (expressionTree instanceof JCIdent) {
JCIdent methodCall = (JCIdent) expressionTree;
return methodCall.type.getReturnType();
} else if (expressionTree instanceof JCMethodInvocation) {
return getReturnType(((JCMethodInvocation) expressionTree).getMethodSelect());
} else if (expressionTree instanceof JCMemberReference) {
return ((JCMemberReference) expressionTree).sym.type.getReturnType();
}
throw new IllegalArgumentException("Expected a JCFieldAccess or JCIdent");
} } | public class class_name {
public static Type getReturnType(ExpressionTree expressionTree) {
if (expressionTree instanceof JCFieldAccess) {
JCFieldAccess methodCall = (JCFieldAccess) expressionTree;
return methodCall.type.getReturnType(); // depends on control dependency: [if], data = [none]
} else if (expressionTree instanceof JCIdent) {
JCIdent methodCall = (JCIdent) expressionTree;
return methodCall.type.getReturnType(); // depends on control dependency: [if], data = [none]
} else if (expressionTree instanceof JCMethodInvocation) {
return getReturnType(((JCMethodInvocation) expressionTree).getMethodSelect()); // depends on control dependency: [if], data = [none]
} else if (expressionTree instanceof JCMemberReference) {
return ((JCMemberReference) expressionTree).sym.type.getReturnType(); // depends on control dependency: [if], data = [none]
}
throw new IllegalArgumentException("Expected a JCFieldAccess or JCIdent");
} } |
public class class_name {
@Override
public Resource qualifiedNameToResource(QualifiedName name) {
String unescapedLocalName = qnU.unescapeProvLocalName(name.getLocalPart());
if (isBlankName(name)) {
return new BNodeImpl(unescapedLocalName);
} else {
return new URIImpl(name.getNamespaceURI() + unescapedLocalName);
}
} } | public class class_name {
@Override
public Resource qualifiedNameToResource(QualifiedName name) {
String unescapedLocalName = qnU.unescapeProvLocalName(name.getLocalPart());
if (isBlankName(name)) {
return new BNodeImpl(unescapedLocalName); // depends on control dependency: [if], data = [none]
} else {
return new URIImpl(name.getNamespaceURI() + unescapedLocalName); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static FileInputStream toStream(File file) {
try {
return new FileInputStream(file);
} catch (FileNotFoundException e) {
throw new IORuntimeException(e);
}
} } | public class class_name {
public static FileInputStream toStream(File file) {
try {
return new FileInputStream(file);
// depends on control dependency: [try], data = [none]
} catch (FileNotFoundException e) {
throw new IORuntimeException(e);
}
// depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected static int[] newKeySizeArray(int minSize, int maxSize, int step)
{
int[] result = new int[((maxSize - minSize) / step) + 1];
for (int i = minSize, j = 0; i <= maxSize; i += step, j++) {
result[j] = i;
}
return result;
} } | public class class_name {
protected static int[] newKeySizeArray(int minSize, int maxSize, int step)
{
int[] result = new int[((maxSize - minSize) / step) + 1];
for (int i = minSize, j = 0; i <= maxSize; i += step, j++) {
result[j] = i; // depends on control dependency: [for], data = [i]
}
return result;
} } |
public class class_name {
@Override
public final Object getObjectProperty(String name) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "getObjectProperty", name);
Object value = null;
/* Some JMSX and JMS_IBM properties need special processing for the JMS */
/* layer */
if (SIProperties.JMSXDeliveryCount.equals(name)) {
int deliveryCount = getJmsxDeliveryCount();
if (deliveryCount == 0) {
value = null;
}
else {
value = Integer.valueOf(deliveryCount);
}
}
//special case for JMS_IBM_MsgType
else if (name.equals(SIProperties.JMS_IBM_MsgType)) {
// If value wasn't set from feedback, check for a carried value
if (value == null) {
if (mayHaveMappedJmsSystemProperties()) {
value = getJmsSystemPropertyMap().get(name);
}
}
}
// ARM Correlator properties
else if (SIProperties.JMS_IBM_ArmCorrelator.equals(name)) {
value = getARMCorrelator();
}
else if (SIProperties.JMS_TOG_ARM_Correlator.equals(name)) {
value = getARMCorrelator();
}
/* The other JMSX and JMS_IBM_ properties can be obtained correctly */
/* simply by calling the appropriate super-class methods */
else if (name.startsWith(JMS_PREFIX)) {
if (name.startsWith(JMS_IBM_MQMD_PREFIX)) {
value = getMQMDProperty(name);
}
else {
value = getJMSSystemProperty(name, false);
}
}
/* For property names which do not start with JMS */
/* Maelstrom's transportVersion has its own field - the rest are in the */
/* JmsUserPropertyMap */
else {
if (name.equals(MfpConstants.PRP_TRANSVER)) {
value = getTransportVersion();
}
else {
if (mayHaveJmsUserProperties()) {
value = getJmsUserPropertyMap().get(name);
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "getObjectProperty", value);
return value;
} } | public class class_name {
@Override
public final Object getObjectProperty(String name) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "getObjectProperty", name);
Object value = null;
/* Some JMSX and JMS_IBM properties need special processing for the JMS */
/* layer */
if (SIProperties.JMSXDeliveryCount.equals(name)) {
int deliveryCount = getJmsxDeliveryCount();
if (deliveryCount == 0) {
value = null; // depends on control dependency: [if], data = [none]
}
else {
value = Integer.valueOf(deliveryCount); // depends on control dependency: [if], data = [(deliveryCount]
}
}
//special case for JMS_IBM_MsgType
else if (name.equals(SIProperties.JMS_IBM_MsgType)) {
// If value wasn't set from feedback, check for a carried value
if (value == null) {
if (mayHaveMappedJmsSystemProperties()) {
value = getJmsSystemPropertyMap().get(name); // depends on control dependency: [if], data = [none]
}
}
}
// ARM Correlator properties
else if (SIProperties.JMS_IBM_ArmCorrelator.equals(name)) {
value = getARMCorrelator(); // depends on control dependency: [if], data = [none]
}
else if (SIProperties.JMS_TOG_ARM_Correlator.equals(name)) {
value = getARMCorrelator(); // depends on control dependency: [if], data = [none]
}
/* The other JMSX and JMS_IBM_ properties can be obtained correctly */
/* simply by calling the appropriate super-class methods */
else if (name.startsWith(JMS_PREFIX)) {
if (name.startsWith(JMS_IBM_MQMD_PREFIX)) {
value = getMQMDProperty(name); // depends on control dependency: [if], data = [none]
}
else {
value = getJMSSystemProperty(name, false); // depends on control dependency: [if], data = [none]
}
}
/* For property names which do not start with JMS */
/* Maelstrom's transportVersion has its own field - the rest are in the */
/* JmsUserPropertyMap */
else {
if (name.equals(MfpConstants.PRP_TRANSVER)) {
value = getTransportVersion(); // depends on control dependency: [if], data = [none]
}
else {
if (mayHaveJmsUserProperties()) {
value = getJmsUserPropertyMap().get(name); // depends on control dependency: [if], data = [none]
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "getObjectProperty", value);
return value;
} } |
public class class_name {
private float scrollBy(@NonNull MotionEvent event, float dx) {
if (isSkipViewPager || isViewPagerDisabled) {
return dx;
}
final State state = getState();
getStateController().getMovementArea(state, tmpRectF);
float pagerDx = splitPagerScroll(dx, state, tmpRectF);
pagerDx = skipPagerMovement(pagerDx, state, tmpRectF);
float viewDx = dx - pagerDx;
// Applying pager scroll
boolean shouldFixViewX = isViewPagerInterceptedScroll && viewPagerX == 0;
int actualX = performViewPagerScroll(event, pagerDx);
viewPagerX += actualX;
if (shouldFixViewX) { // Adding back scroll not handled by ViewPager
viewDx += Math.round(pagerDx) - actualX;
}
// Returning altered scroll left for image
return viewDx;
} } | public class class_name {
private float scrollBy(@NonNull MotionEvent event, float dx) {
if (isSkipViewPager || isViewPagerDisabled) {
return dx; // depends on control dependency: [if], data = [none]
}
final State state = getState();
getStateController().getMovementArea(state, tmpRectF);
float pagerDx = splitPagerScroll(dx, state, tmpRectF);
pagerDx = skipPagerMovement(pagerDx, state, tmpRectF);
float viewDx = dx - pagerDx;
// Applying pager scroll
boolean shouldFixViewX = isViewPagerInterceptedScroll && viewPagerX == 0;
int actualX = performViewPagerScroll(event, pagerDx);
viewPagerX += actualX;
if (shouldFixViewX) { // Adding back scroll not handled by ViewPager
viewDx += Math.round(pagerDx) - actualX; // depends on control dependency: [if], data = [none]
}
// Returning altered scroll left for image
return viewDx;
} } |
public class class_name {
protected Query createDateRangeFilter(String fieldName, long startTime, long endTime) {
Query filter = null;
if ((startTime != Long.MIN_VALUE) || (endTime != Long.MAX_VALUE)) {
// a date range has been set for this document search
if (startTime == Long.MIN_VALUE) {
// default start will always be "yyyy1231" in order to reduce term size
Calendar cal = Calendar.getInstance(OpenCms.getLocaleManager().getTimeZone());
cal.setTimeInMillis(endTime);
cal.set(cal.get(Calendar.YEAR) - MAX_YEAR_RANGE, 11, 31, 0, 0, 0);
startTime = cal.getTimeInMillis();
} else if (endTime == Long.MAX_VALUE) {
// default end will always be "yyyy0101" in order to reduce term size
Calendar cal = Calendar.getInstance(OpenCms.getLocaleManager().getTimeZone());
cal.setTimeInMillis(startTime);
cal.set(cal.get(Calendar.YEAR) + MAX_YEAR_RANGE, 0, 1, 0, 0, 0);
endTime = cal.getTimeInMillis();
}
// get the list of all possible date range options
List<String> dateRange = getDateRangeSpan(startTime, endTime);
List<Term> terms = new ArrayList<Term>();
for (String range : dateRange) {
terms.add(new Term(fieldName, range));
}
// create the filter for the date
BooleanQuery.Builder build = new BooleanQuery.Builder();
terms.forEach(term -> build.add(new TermQuery(term), Occur.SHOULD));
filter = build.build();
}
return filter;
} } | public class class_name {
protected Query createDateRangeFilter(String fieldName, long startTime, long endTime) {
Query filter = null;
if ((startTime != Long.MIN_VALUE) || (endTime != Long.MAX_VALUE)) {
// a date range has been set for this document search
if (startTime == Long.MIN_VALUE) {
// default start will always be "yyyy1231" in order to reduce term size
Calendar cal = Calendar.getInstance(OpenCms.getLocaleManager().getTimeZone());
cal.setTimeInMillis(endTime); // depends on control dependency: [if], data = [none]
cal.set(cal.get(Calendar.YEAR) - MAX_YEAR_RANGE, 11, 31, 0, 0, 0); // depends on control dependency: [if], data = [none]
startTime = cal.getTimeInMillis(); // depends on control dependency: [if], data = [none]
} else if (endTime == Long.MAX_VALUE) {
// default end will always be "yyyy0101" in order to reduce term size
Calendar cal = Calendar.getInstance(OpenCms.getLocaleManager().getTimeZone());
cal.setTimeInMillis(startTime); // depends on control dependency: [if], data = [none]
cal.set(cal.get(Calendar.YEAR) + MAX_YEAR_RANGE, 0, 1, 0, 0, 0); // depends on control dependency: [if], data = [none]
endTime = cal.getTimeInMillis(); // depends on control dependency: [if], data = [none]
}
// get the list of all possible date range options
List<String> dateRange = getDateRangeSpan(startTime, endTime);
List<Term> terms = new ArrayList<Term>();
for (String range : dateRange) {
terms.add(new Term(fieldName, range)); // depends on control dependency: [for], data = [range]
}
// create the filter for the date
BooleanQuery.Builder build = new BooleanQuery.Builder();
terms.forEach(term -> build.add(new TermQuery(term), Occur.SHOULD)); // depends on control dependency: [if], data = [none]
filter = build.build(); // depends on control dependency: [if], data = [none]
}
return filter;
} } |
public class class_name {
public boolean send(T graph, long correlationId) {
Object payload = graph;
if (m_marshaller != null) {
Result result = m_resultFactory.createResult(graph);
if (result == null) {
throw new MessagingException(
"Unable to marshal payload, ResultFactory returned null.");
}
try {
m_marshaller.marshal(graph, result);
} catch (Exception e) {
throw new WorkflowException("Failed to marshal payload",e);
}
Document doc = (Document)m_resultTransformer.transformResult(result);
payload = doc;
}
MessageBuilder<?> messageBuilder = MessageBuilder.withPayload(payload);
if (getReplyChannel() != null) {
messageBuilder.setReplyChannel(getReplyChannel());
}
if (getErrorChannel() != null) {
messageBuilder.setErrorChannel(getErrorChannel());
}
if (getMessagePriority() != null) {
messageBuilder.setPriority(getMessagePriority());
}
messageBuilder.setCorrelationId(correlationId);
Message<?> ret = messageBuilder.build();
return getChannel().send(messageBuilder.build());
} } | public class class_name {
public boolean send(T graph, long correlationId) {
Object payload = graph;
if (m_marshaller != null) {
Result result = m_resultFactory.createResult(graph);
if (result == null) {
throw new MessagingException(
"Unable to marshal payload, ResultFactory returned null.");
}
try {
m_marshaller.marshal(graph, result); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new WorkflowException("Failed to marshal payload",e);
} // depends on control dependency: [catch], data = [none]
Document doc = (Document)m_resultTransformer.transformResult(result);
payload = doc; // depends on control dependency: [if], data = [none]
}
MessageBuilder<?> messageBuilder = MessageBuilder.withPayload(payload);
if (getReplyChannel() != null) {
messageBuilder.setReplyChannel(getReplyChannel()); // depends on control dependency: [if], data = [(getReplyChannel()]
}
if (getErrorChannel() != null) {
messageBuilder.setErrorChannel(getErrorChannel()); // depends on control dependency: [if], data = [(getErrorChannel()]
}
if (getMessagePriority() != null) {
messageBuilder.setPriority(getMessagePriority()); // depends on control dependency: [if], data = [(getMessagePriority()]
}
messageBuilder.setCorrelationId(correlationId);
Message<?> ret = messageBuilder.build();
return getChannel().send(messageBuilder.build());
} } |
public class class_name {
@SuppressWarnings("unchecked")
public <TO> ChainedTransformer<I, TO> chain(Transformer<O, TO> transformer) {
if (transformer != null) {
transformers.add(transformer);
}
return (ChainedTransformer<I, TO>) this;
} } | public class class_name {
@SuppressWarnings("unchecked")
public <TO> ChainedTransformer<I, TO> chain(Transformer<O, TO> transformer) {
if (transformer != null) {
transformers.add(transformer); // depends on control dependency: [if], data = [(transformer]
}
return (ChainedTransformer<I, TO>) this;
} } |
public class class_name {
private void obtainValidator()
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (fromValidatorFactory)
{
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtaining Validator instance from ValidatorFactory...");
}
ValidatorFactory validatorFactory = ivValidatorFactoryLocator.getValidatorFactory();
ivValidator = validatorFactory.getValidator();
}
else
{
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtaining Validator instance from ValidatorContext, using TraversableResolver " + ivSpecifiedTraversableResolver +
", message interpolator " + ivSpecifiedMessageInterpolator +
", parameter name provider " + ivSpecifiedParameterNameProvider
+ ", and constraint validator factory " + ivSpecifiedConstraintValidatorFactory);
}
ValidatorFactory validatorFactory = ivValidatorFactoryLocator.getValidatorFactory();
ValidatorContext validatorContext = validatorFactory.usingContext();
if (ivSpecifiedTraversableResolver != null)
{
validatorContext.traversableResolver(ivSpecifiedTraversableResolver);
}
if (ivSpecifiedMessageInterpolator != null)
{
validatorContext.messageInterpolator(ivSpecifiedMessageInterpolator);
}
if (ivSpecifiedConstraintValidatorFactory != null)
{
validatorContext.constraintValidatorFactory(ivSpecifiedConstraintValidatorFactory);
}
if (ivSpecifiedParameterNameProvider != null)
validatorContext.parameterNameProvider(ivSpecifiedParameterNameProvider);
if (ivClockProvider != null) {
validatorContext.clockProvider(ivClockProvider);
}
if (ivValueExtractorList != null && ivValueExtractorList.size() > 0) {
for (ValueExtractor<?> ve : ivValueExtractorList) {
validatorContext.addValueExtractor(ve);
}
}
ivValidator = validatorContext.getValidator();
}
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtained the Validator: " + ivValidator);
}
} } | public class class_name {
private void obtainValidator()
{
final boolean isTraceOn = TraceComponent.isAnyTracingEnabled();
if (fromValidatorFactory)
{
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtaining Validator instance from ValidatorFactory..."); // depends on control dependency: [if], data = [none]
}
ValidatorFactory validatorFactory = ivValidatorFactoryLocator.getValidatorFactory();
ivValidator = validatorFactory.getValidator(); // depends on control dependency: [if], data = [none]
}
else
{
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtaining Validator instance from ValidatorContext, using TraversableResolver " + ivSpecifiedTraversableResolver +
", message interpolator " + ivSpecifiedMessageInterpolator +
", parameter name provider " + ivSpecifiedParameterNameProvider
+ ", and constraint validator factory " + ivSpecifiedConstraintValidatorFactory); // depends on control dependency: [if], data = [none]
}
ValidatorFactory validatorFactory = ivValidatorFactoryLocator.getValidatorFactory();
ValidatorContext validatorContext = validatorFactory.usingContext();
if (ivSpecifiedTraversableResolver != null)
{
validatorContext.traversableResolver(ivSpecifiedTraversableResolver); // depends on control dependency: [if], data = [(ivSpecifiedTraversableResolver]
}
if (ivSpecifiedMessageInterpolator != null)
{
validatorContext.messageInterpolator(ivSpecifiedMessageInterpolator); // depends on control dependency: [if], data = [(ivSpecifiedMessageInterpolator]
}
if (ivSpecifiedConstraintValidatorFactory != null)
{
validatorContext.constraintValidatorFactory(ivSpecifiedConstraintValidatorFactory); // depends on control dependency: [if], data = [(ivSpecifiedConstraintValidatorFactory]
}
if (ivSpecifiedParameterNameProvider != null)
validatorContext.parameterNameProvider(ivSpecifiedParameterNameProvider);
if (ivClockProvider != null) {
validatorContext.clockProvider(ivClockProvider); // depends on control dependency: [if], data = [(ivClockProvider]
}
if (ivValueExtractorList != null && ivValueExtractorList.size() > 0) {
for (ValueExtractor<?> ve : ivValueExtractorList) {
validatorContext.addValueExtractor(ve); // depends on control dependency: [for], data = [ve]
}
}
ivValidator = validatorContext.getValidator(); // depends on control dependency: [if], data = [none]
}
if (isTraceOn && tc.isDebugEnabled())
{
Tr.debug(tc, "Obtained the Validator: " + ivValidator); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected void syncCompactor() {
ConcurrentLinkedQueue<Segment> queue = _compactor.getCompactedQueue();
while(!queue.isEmpty()) {
Segment seg = queue.remove();
consumeCompactionBatches();
_compactor.getFreeQueue().offer(seg);
}
consumeCompactionBatches();
} } | public class class_name {
protected void syncCompactor() {
ConcurrentLinkedQueue<Segment> queue = _compactor.getCompactedQueue();
while(!queue.isEmpty()) {
Segment seg = queue.remove();
consumeCompactionBatches(); // depends on control dependency: [while], data = [none]
_compactor.getFreeQueue().offer(seg); // depends on control dependency: [while], data = [none]
}
consumeCompactionBatches();
} } |
public class class_name {
public static Observable<Void> mapToVoid(Observable<?> fromObservable) {
if (fromObservable != null) {
return fromObservable.subscribeOn(Schedulers.io())
.map(new RXMapper<Void>());
} else {
return Observable.empty();
}
} } | public class class_name {
public static Observable<Void> mapToVoid(Observable<?> fromObservable) {
if (fromObservable != null) {
return fromObservable.subscribeOn(Schedulers.io())
.map(new RXMapper<Void>()); // depends on control dependency: [if], data = [none]
} else {
return Observable.empty(); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static void downloadOSMFile(File file, Envelope geometryEnvelope) throws IOException {
HttpURLConnection urlCon = (HttpURLConnection) createOsmUrl(geometryEnvelope).openConnection();
urlCon.setRequestMethod("GET");
urlCon.connect();
switch (urlCon.getResponseCode()) {
case 400:
throw new IOException("Error : Cannot query the OSM API with the following bounding box");
case 509:
throw new IOException("Error: You have downloaded too much data. Please try again later");
default:
InputStream in = urlCon.getInputStream();
OutputStream out = new FileOutputStream(file);
try {
byte[] data = new byte[4096];
while (true) {
int numBytes = in.read(data);
if (numBytes == -1) {
break;
}
out.write(data, 0, numBytes);
}
} finally {
out.close();
in.close();
} break;
}
} } | public class class_name {
public static void downloadOSMFile(File file, Envelope geometryEnvelope) throws IOException {
HttpURLConnection urlCon = (HttpURLConnection) createOsmUrl(geometryEnvelope).openConnection();
urlCon.setRequestMethod("GET");
urlCon.connect();
switch (urlCon.getResponseCode()) {
case 400:
throw new IOException("Error : Cannot query the OSM API with the following bounding box");
case 509:
throw new IOException("Error: You have downloaded too much data. Please try again later");
default:
InputStream in = urlCon.getInputStream();
OutputStream out = new FileOutputStream(file);
try {
byte[] data = new byte[4096];
while (true) {
int numBytes = in.read(data);
if (numBytes == -1) {
break;
}
out.write(data, 0, numBytes); // depends on control dependency: [while], data = [none]
}
} finally {
out.close();
in.close();
} break;
}
} } |
public class class_name {
public Integer getLoginTimeout()
{
if (childNode.getTextValueForPatternName("login-timeout") != null && !childNode.getTextValueForPatternName("login-timeout").equals("null")) {
return Integer.valueOf(childNode.getTextValueForPatternName("login-timeout"));
}
return null;
} } | public class class_name {
public Integer getLoginTimeout()
{
if (childNode.getTextValueForPatternName("login-timeout") != null && !childNode.getTextValueForPatternName("login-timeout").equals("null")) {
return Integer.valueOf(childNode.getTextValueForPatternName("login-timeout")); // depends on control dependency: [if], data = [(childNode.getTextValueForPatternName("login-timeout")]
}
return null;
} } |
public class class_name {
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
boolean memo = override.length>0 && override[0]!=null;
boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
hd.user = hdRole.user = trans.user();
hd.action = modified.name();
// Modifying User/Role is an Update to Role, not a Create. JG, 07-14-2015
hdRole.action = CRUD.update.name();
hd.target = TABLE;
hdRole.target = RoleDAO.TABLE;
hd.subject = subject?override[1] : (data.user + '|'+data.role);
hdRole.subject = data.role;
switch(modified) {
case create:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s added to %s",data.user,data.role);
break;
case update:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s - %s was updated",data.user,data.role);
break;
case delete:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s removed from %s",data.user,data.role);
try {
hd.reconstruct = hdRole.reconstruct = data.bytify();
} catch (IOException e) {
trans.warn().log(e,"Deleted UserRole could not be serialized");
}
break;
default:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: "n/a";
}
if(historyDAO.create(trans, hd).status!=Status.OK) {
trans.error().log("Cannot log to History");
}
if(historyDAO.create(trans, hdRole).status!=Status.OK) {
trans.error().log("Cannot log to History");
}
// uses User as Segment
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
trans.error().log("Cannot touch CacheInfo");
}
} } | public class class_name {
@Override
protected void wasModified(AuthzTrans trans, CRUD modified, Data data, String ... override) {
boolean memo = override.length>0 && override[0]!=null;
boolean subject = override.length>1 && override[1]!=null;
HistoryDAO.Data hd = HistoryDAO.newInitedData();
HistoryDAO.Data hdRole = HistoryDAO.newInitedData();
hd.user = hdRole.user = trans.user();
hd.action = modified.name();
// Modifying User/Role is an Update to Role, not a Create. JG, 07-14-2015
hdRole.action = CRUD.update.name();
hd.target = TABLE;
hdRole.target = RoleDAO.TABLE;
hd.subject = subject?override[1] : (data.user + '|'+data.role);
hdRole.subject = data.role;
switch(modified) {
case create:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s added to %s",data.user,data.role);
break;
case update:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s - %s was updated",data.user,data.role);
break;
case delete:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: String.format("%s removed from %s",data.user,data.role);
try {
hd.reconstruct = hdRole.reconstruct = data.bytify(); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
trans.warn().log(e,"Deleted UserRole could not be serialized");
} // depends on control dependency: [catch], data = [none]
break;
default:
hd.memo = hdRole.memo = memo
? String.format("%s by %s", override[0], hd.user)
: "n/a";
}
if(historyDAO.create(trans, hd).status!=Status.OK) {
trans.error().log("Cannot log to History");
}
if(historyDAO.create(trans, hdRole).status!=Status.OK) {
trans.error().log("Cannot log to History"); // depends on control dependency: [if], data = [none]
}
// uses User as Segment
if(infoDAO.touch(trans, TABLE,data.invalidate(cache)).notOK()) {
trans.error().log("Cannot touch CacheInfo"); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static List<ScanRange> intersection(ScanRange left, ScanRange right) {
List<ScanRange> unwrappedLeft = left.unwrapped();
List<ScanRange> unwrappedRight = right.unwrapped();
boolean leftWrapped = unwrappedLeft.size() > 1;
boolean rightWrapped = unwrappedRight.size() > 1;
if (!leftWrapped && !rightWrapped) {
// Neither side wrapped the token range, so a simple intersection is all that is required.
// Save some cycles by comparing the ranges directly.
ScanRange intersection = intersectionUnwrapped(left, right);
if (intersection == null) {
return ImmutableList.of();
}
return ImmutableList.of(intersection);
}
List<ScanRange> intersections = Lists.newArrayListWithExpectedSize(2);
for (ScanRange l : unwrappedLeft) {
for (ScanRange r : unwrappedRight) {
ScanRange intersection = intersectionUnwrapped(l, r);
if (intersection != null) {
intersections.add(intersection);
}
}
}
if (intersections.size() > 1) {
// For consistency always return the intersections sorted from low- to high-range.
Collections.sort(intersections);
// If multiple ranges are contiguous then join them. This can happen if one of the ranges is "all".
for (int i = intersections.size() - 1; i > 0; i--) {
if (intersections.get(i-1)._to.equals(intersections.get(i)._from)) {
intersections.set(i-1, ScanRange.create(intersections.get(i-1)._from, intersections.get(i)._to));
intersections.remove(i);
}
}
// If the intersections represent a contiguous high- to low-end wrapped range then combine them.
if (intersections.size() == 2 &&
intersections.get(0)._from.equals(MIN_VALUE) && intersections.get(1)._to.equals(MAX_VALUE)) {
ScanRange combined = ScanRange.create(intersections.get(1)._from, intersections.get(0)._to);
intersections.clear();
intersections.add(combined);
}
}
return intersections;
} } | public class class_name {
public static List<ScanRange> intersection(ScanRange left, ScanRange right) {
List<ScanRange> unwrappedLeft = left.unwrapped();
List<ScanRange> unwrappedRight = right.unwrapped();
boolean leftWrapped = unwrappedLeft.size() > 1;
boolean rightWrapped = unwrappedRight.size() > 1;
if (!leftWrapped && !rightWrapped) {
// Neither side wrapped the token range, so a simple intersection is all that is required.
// Save some cycles by comparing the ranges directly.
ScanRange intersection = intersectionUnwrapped(left, right);
if (intersection == null) {
return ImmutableList.of(); // depends on control dependency: [if], data = [none]
}
return ImmutableList.of(intersection); // depends on control dependency: [if], data = [none]
}
List<ScanRange> intersections = Lists.newArrayListWithExpectedSize(2);
for (ScanRange l : unwrappedLeft) {
for (ScanRange r : unwrappedRight) {
ScanRange intersection = intersectionUnwrapped(l, r);
if (intersection != null) {
intersections.add(intersection); // depends on control dependency: [if], data = [(intersection]
}
}
}
if (intersections.size() > 1) {
// For consistency always return the intersections sorted from low- to high-range.
Collections.sort(intersections); // depends on control dependency: [if], data = [none]
// If multiple ranges are contiguous then join them. This can happen if one of the ranges is "all".
for (int i = intersections.size() - 1; i > 0; i--) {
if (intersections.get(i-1)._to.equals(intersections.get(i)._from)) {
intersections.set(i-1, ScanRange.create(intersections.get(i-1)._from, intersections.get(i)._to)); // depends on control dependency: [if], data = [none]
intersections.remove(i); // depends on control dependency: [if], data = [none]
}
}
// If the intersections represent a contiguous high- to low-end wrapped range then combine them.
if (intersections.size() == 2 &&
intersections.get(0)._from.equals(MIN_VALUE) && intersections.get(1)._to.equals(MAX_VALUE)) {
ScanRange combined = ScanRange.create(intersections.get(1)._from, intersections.get(0)._to);
intersections.clear(); // depends on control dependency: [if], data = [none]
intersections.add(combined); // depends on control dependency: [if], data = [none]
}
}
return intersections;
} } |
public class class_name {
@Override
public Object onRequest(ActivityRuntimeContext context, Object content, Map<String,String> headers, Object connection) {
if (connection instanceof HttpConnection) {
HttpConnection httpConnection = (HttpConnection)connection;
Tracing tracing = TraceHelper.getTracing("mdw-adapter");
HttpTracing httpTracing = HttpTracing.create(tracing).toBuilder()
.clientParser(new HttpClientParser() {
public <Req> void request(HttpAdapter<Req, ?> adapter, Req req, SpanCustomizer customizer) {
// customize span name
customizer.name(context.getActivity().oneLineName());
}
})
.build();
Tracer tracer = httpTracing.tracing().tracer();
handler = HttpClientHandler.create(httpTracing, new ClientAdapter());
injector = httpTracing.tracing().propagation().injector((httpRequest, key, value) -> headers.put(key, value));
span = handler.handleSend(injector, new HttpRequest(httpConnection));
scope = tracer.withSpanInScope(span);
}
return null;
} } | public class class_name {
@Override
public Object onRequest(ActivityRuntimeContext context, Object content, Map<String,String> headers, Object connection) {
if (connection instanceof HttpConnection) {
HttpConnection httpConnection = (HttpConnection)connection;
Tracing tracing = TraceHelper.getTracing("mdw-adapter");
HttpTracing httpTracing = HttpTracing.create(tracing).toBuilder()
.clientParser(new HttpClientParser() {
public <Req> void request(HttpAdapter<Req, ?> adapter, Req req, SpanCustomizer customizer) {
// customize span name
customizer.name(context.getActivity().oneLineName());
}
})
.build();
Tracer tracer = httpTracing.tracing().tracer();
handler = HttpClientHandler.create(httpTracing, new ClientAdapter()); // depends on control dependency: [if], data = [none]
injector = httpTracing.tracing().propagation().injector((httpRequest, key, value) -> headers.put(key, value)); // depends on control dependency: [if], data = [none]
span = handler.handleSend(injector, new HttpRequest(httpConnection)); // depends on control dependency: [if], data = [none]
scope = tracer.withSpanInScope(span); // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public double get(double defaultValue)
{
final String value =
getInternal(
Double.toString(defaultValue),
false);
if (value == null) {
return limit(defaultValue);
}
double v = Double.parseDouble(value);
// need to limit value in case setString() was called directly with
// an out-of-range value
return limit(v);
} } | public class class_name {
public double get(double defaultValue)
{
final String value =
getInternal(
Double.toString(defaultValue),
false);
if (value == null) {
return limit(defaultValue); // depends on control dependency: [if], data = [none]
}
double v = Double.parseDouble(value);
// need to limit value in case setString() was called directly with
// an out-of-range value
return limit(v);
} } |
public class class_name {
@GuardedBy("maxNumberInvariant")
private void resetNumberOfKeys() {
maxNumberOfKeys.set(address2key.keySet().size() * bufferSize);
existingKeyCount.set(0);
if (trace) {
log.tracef("resetNumberOfKeys ends with: maxNumberOfKeys=%s, existingKeyCount=%s",
maxNumberOfKeys.get(), existingKeyCount.get());
}
} } | public class class_name {
@GuardedBy("maxNumberInvariant")
private void resetNumberOfKeys() {
maxNumberOfKeys.set(address2key.keySet().size() * bufferSize);
existingKeyCount.set(0);
if (trace) {
log.tracef("resetNumberOfKeys ends with: maxNumberOfKeys=%s, existingKeyCount=%s",
maxNumberOfKeys.get(), existingKeyCount.get()); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public static String replacePropertiesInString(String line, Resource propertyResource) {
Properties properties = new Properties();
try {
properties.load(propertyResource.getInputStream());
} catch (IOException e) {
return line;
}
return replacePropertiesInString(line, properties);
} } | public class class_name {
public static String replacePropertiesInString(String line, Resource propertyResource) {
Properties properties = new Properties();
try {
properties.load(propertyResource.getInputStream()); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
return line;
} // depends on control dependency: [catch], data = [none]
return replacePropertiesInString(line, properties);
} } |
public class class_name {
public Runnable createRunnableHandler(final IRequest ireq ,final IResponse ires,final HttpInboundConnection inboundConnection) {
String requestUri = ireq.getRequestURI();
//PI05525
// needs to make sure we normalize and then map it to an webapp. In case the request is http://host/contextroot/../ , map to /
if(normalizeRequestURI){
requestUri = WebApp.normalize(requestUri); // normalize is a helper method not mapped to any webapp
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc,"normalized request uri --> ", requestUri);
}
WebApp sc = (WebApp) findContext(requestUri);
if ((sc == null) && (requestUri.indexOf("%") >= 0)) {
// context could contain double byte char, so decode it now.
// for performance reasons this was not decoded before doing the first look up, since it is not
// common to have double byte chars in the context.
try {
if (this.webContainerParent.getDecodePlusSign()) {
requestUri = URLDecoder.decode(requestUri, this.webContainerParent.getURIEncoding());
} else {
requestUri = WSURLDecoder.decode(requestUri, this.webContainerParent.getURIEncoding());
}
} catch (IOException e) {
// unexpected - log FFDC and leave.
com.ibm.wsspi.webcontainer.util.FFDCWrapper.processException(e, CLASS_NAME, "222", this);
return null;
}
sc = (WebApp) findContext(requestUri);
}
final WebApp webApp = sc;
if (webApp == null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "request for--> ["+ requestUri+"], inboundConnection --> [" + inboundConnection + "], this --> " + this);
Tr.debug(tc, "Context not found, return null");
}
// check if we need to send back a 503 since its predefined
if (predefinedMatcher != null && predefinedMatcher.match(requestUri) != null) {
return new Runnable() {
@Override
public void run() {
sendError(inboundConnection, ireq, ires);
}
};
}
return null;
} else if (!webApp.isInitialized()) {
if (startWebApp(webApp) == false) {
return null;
}
}
return new Bridge() {
HttpInboundConnection httpInboundConnection = inboundConnection;
public void run() {
Exception error = null;
try {
// give a way to speed up finding the request processor:
// we know where it has to come back to..
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Webcontainer handleRequest start for--> ["+ireq.getRequestURI()+"], mapped webApp context ["+ webApp + "], inboundConnection --> [" + inboundConnection + "], this --> " + this);
}
webContainerParent.handleRequest(ireq, ires, DynamicVirtualHost.this, this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Webcontainer handleRequest complete for--> ["+ireq.getRequestURI()+"], mapped webApp context ["+ webApp + "], inboundConnection --> [" + inboundConnection + "], this --> " + this);
}
} catch (IOException e) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Error during request handling; " + e, inboundConnection, ireq);
}
error = e;
} finally {
// If not async, then finish. Else, finish will need to be done by the last thread working on the response
if (ireq.isStartAsync() == false)
inboundConnection.finish(error);
}
}
@Override
public String getName() {
return DynamicVirtualHost.this.getName();
}
/**
* Invocation of this is via the webContainerParent.handleRequest, and
* from other sources that look up this vhost by name..
* In this case, we're looking in our requestMapper by the request URI
* to see which processor should be used to handle the request.
*/
@Override
public void handleRequest(ServletRequest req, ServletResponse res) throws Exception {
String hostAlias = WebContainer.getHostAliasKey(req.getServerName(), req.getServerPort());
addSecureRedirect(req, hostAlias);
webApp.handleRequest(req, res, httpInboundConnection);
}
@Override
public boolean isInternal() {
return DynamicVirtualHost.this.isInternal();
}
};
} } | public class class_name {
public Runnable createRunnableHandler(final IRequest ireq ,final IResponse ires,final HttpInboundConnection inboundConnection) {
String requestUri = ireq.getRequestURI();
//PI05525
// needs to make sure we normalize and then map it to an webapp. In case the request is http://host/contextroot/../ , map to /
if(normalizeRequestURI){
requestUri = WebApp.normalize(requestUri); // normalize is a helper method not mapped to any webapp // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc,"normalized request uri --> ", requestUri);
}
WebApp sc = (WebApp) findContext(requestUri);
if ((sc == null) && (requestUri.indexOf("%") >= 0)) {
// context could contain double byte char, so decode it now.
// for performance reasons this was not decoded before doing the first look up, since it is not
// common to have double byte chars in the context.
try {
if (this.webContainerParent.getDecodePlusSign()) {
requestUri = URLDecoder.decode(requestUri, this.webContainerParent.getURIEncoding()); // depends on control dependency: [if], data = [none]
} else {
requestUri = WSURLDecoder.decode(requestUri, this.webContainerParent.getURIEncoding()); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
// unexpected - log FFDC and leave.
com.ibm.wsspi.webcontainer.util.FFDCWrapper.processException(e, CLASS_NAME, "222", this);
return null;
} // depends on control dependency: [catch], data = [none]
sc = (WebApp) findContext(requestUri); // depends on control dependency: [if], data = [none]
}
final WebApp webApp = sc;
if (webApp == null) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "request for--> ["+ requestUri+"], inboundConnection --> [" + inboundConnection + "], this --> " + this); // depends on control dependency: [if], data = [none]
Tr.debug(tc, "Context not found, return null"); // depends on control dependency: [if], data = [none]
}
// check if we need to send back a 503 since its predefined
if (predefinedMatcher != null && predefinedMatcher.match(requestUri) != null) {
return new Runnable() {
@Override
public void run() {
sendError(inboundConnection, ireq, ires);
}
}; // depends on control dependency: [if], data = [none]
}
return null; // depends on control dependency: [if], data = [none]
} else if (!webApp.isInitialized()) {
if (startWebApp(webApp) == false) {
return null; // depends on control dependency: [if], data = [none]
}
}
return new Bridge() {
HttpInboundConnection httpInboundConnection = inboundConnection;
public void run() {
Exception error = null;
try {
// give a way to speed up finding the request processor:
// we know where it has to come back to..
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Webcontainer handleRequest start for--> ["+ireq.getRequestURI()+"], mapped webApp context ["+ webApp + "], inboundConnection --> [" + inboundConnection + "], this --> " + this); // depends on control dependency: [if], data = [none]
}
webContainerParent.handleRequest(ireq, ires, DynamicVirtualHost.this, this); // depends on control dependency: [try], data = [none]
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Webcontainer handleRequest complete for--> ["+ireq.getRequestURI()+"], mapped webApp context ["+ webApp + "], inboundConnection --> [" + inboundConnection + "], this --> " + this); // depends on control dependency: [if], data = [none]
}
} catch (IOException e) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(this, tc, "Error during request handling; " + e, inboundConnection, ireq); // depends on control dependency: [if], data = [none]
}
error = e;
} finally { // depends on control dependency: [catch], data = [none]
// If not async, then finish. Else, finish will need to be done by the last thread working on the response
if (ireq.isStartAsync() == false)
inboundConnection.finish(error);
}
}
@Override
public String getName() {
return DynamicVirtualHost.this.getName();
}
/**
* Invocation of this is via the webContainerParent.handleRequest, and
* from other sources that look up this vhost by name..
* In this case, we're looking in our requestMapper by the request URI
* to see which processor should be used to handle the request.
*/
@Override
public void handleRequest(ServletRequest req, ServletResponse res) throws Exception {
String hostAlias = WebContainer.getHostAliasKey(req.getServerName(), req.getServerPort());
addSecureRedirect(req, hostAlias);
webApp.handleRequest(req, res, httpInboundConnection);
}
@Override
public boolean isInternal() {
return DynamicVirtualHost.this.isInternal();
}
};
} } |
public class class_name {
@SuppressWarnings("unchecked")
static public void addDecoder(CamelContext context, String name, ChannelUpstreamHandler decoder){
CombinedRegistry registry = getCombinedRegistry(context);
addCodecOnly(registry, name, decoder);
List<ChannelUpstreamHandler> decoders;
Object o = registry.lookup(NAME_DECODERS);
if (o == null){
decoders = new ArrayList<ChannelUpstreamHandler>();
registry.getDefaultSimpleRegistry().put(NAME_DECODERS, decoders);
}else{
try{
decoders = (List<ChannelUpstreamHandler>)o;
}catch(Exception e){
throw new IllegalArgumentException("Preserved name '" + NAME_DECODERS + "' is already being used by others in at least one of the registries.");
}
}
decoders.add(decoder);
} } | public class class_name {
@SuppressWarnings("unchecked")
static public void addDecoder(CamelContext context, String name, ChannelUpstreamHandler decoder){
CombinedRegistry registry = getCombinedRegistry(context);
addCodecOnly(registry, name, decoder);
List<ChannelUpstreamHandler> decoders;
Object o = registry.lookup(NAME_DECODERS);
if (o == null){
decoders = new ArrayList<ChannelUpstreamHandler>();
// depends on control dependency: [if], data = [none]
registry.getDefaultSimpleRegistry().put(NAME_DECODERS, decoders);
// depends on control dependency: [if], data = [none]
}else{
try{
decoders = (List<ChannelUpstreamHandler>)o;
// depends on control dependency: [try], data = [none]
}catch(Exception e){
throw new IllegalArgumentException("Preserved name '" + NAME_DECODERS + "' is already being used by others in at least one of the registries.");
}
// depends on control dependency: [catch], data = [none]
}
decoders.add(decoder);
} } |
public class class_name {
public void shuffleRowValues(ArrayList<String> rows) throws Exception {
doubleValues.clear();
ArrayList<String> columns = this.getColumns();
for (String row : rows) {
ArrayList<Integer> columnIndex = new ArrayList<Integer>();
for (int i = 0; i < columns.size(); i++) {
columnIndex.add(i);
}
Collections.shuffle(columnIndex);
for (int i = 0; i < columns.size(); i++) {
String column = columns.get(i);
int randomIndex = columnIndex.get(i);
String destinationCol = columns.get(randomIndex);
String temp = this.getCell(row, destinationCol);
String value = this.getCell(row, column);
this.addCell(row, destinationCol, value);
this.addCell(row, column, temp);
}
}
} } | public class class_name {
public void shuffleRowValues(ArrayList<String> rows) throws Exception {
doubleValues.clear();
ArrayList<String> columns = this.getColumns();
for (String row : rows) {
ArrayList<Integer> columnIndex = new ArrayList<Integer>();
for (int i = 0; i < columns.size(); i++) {
columnIndex.add(i); // depends on control dependency: [for], data = [i]
}
Collections.shuffle(columnIndex);
for (int i = 0; i < columns.size(); i++) {
String column = columns.get(i);
int randomIndex = columnIndex.get(i);
String destinationCol = columns.get(randomIndex);
String temp = this.getCell(row, destinationCol);
String value = this.getCell(row, column);
this.addCell(row, destinationCol, value); // depends on control dependency: [for], data = [none]
this.addCell(row, column, temp); // depends on control dependency: [for], data = [none]
}
}
} } |
public class class_name {
public void update(@NonNull View from) {
if (GestureDebug.isDebugAnimator()) {
Log.d(TAG, "Updating view");
}
updateInternal(from);
} } | public class class_name {
public void update(@NonNull View from) {
if (GestureDebug.isDebugAnimator()) {
Log.d(TAG, "Updating view"); // depends on control dependency: [if], data = [none]
}
updateInternal(from);
} } |
public class class_name {
private void suspectSessions(long exclude, long timestamp) {
for (ServerSessionContext session : executor.context().sessions().sessions.values()) {
if (session.id() != exclude && timestamp - session.timeout() > session.getTimestamp()) {
session.suspect();
}
}
} } | public class class_name {
private void suspectSessions(long exclude, long timestamp) {
for (ServerSessionContext session : executor.context().sessions().sessions.values()) {
if (session.id() != exclude && timestamp - session.timeout() > session.getTimestamp()) {
session.suspect(); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private void clearOfflineEntries() {
if (!isEnabled()) {
return;
}
if (LOG.isInfoEnabled()) {
LOG.info(Messages.get().getBundle().key(Messages.LOG_FLEXCACHE_CLEAR_OFFLINE_ENTRIES_0));
}
clearAccordingToSuffix(CACHE_OFFLINESUFFIX, true);
} } | public class class_name {
private void clearOfflineEntries() {
if (!isEnabled()) {
return; // depends on control dependency: [if], data = [none]
}
if (LOG.isInfoEnabled()) {
LOG.info(Messages.get().getBundle().key(Messages.LOG_FLEXCACHE_CLEAR_OFFLINE_ENTRIES_0)); // depends on control dependency: [if], data = [none]
}
clearAccordingToSuffix(CACHE_OFFLINESUFFIX, true);
} } |
public class class_name {
private static List<String> readUsingZipInputStream(final InputStream inputStream) throws IOException {
final BufferedInputStream bis = new BufferedInputStream(inputStream);
final ZipInputStream is = new ZipInputStream(bis);
final List<String> list = new ArrayList<>();
try {
ZipEntry entry;
while ((entry = is.getNextEntry()) != null) {
if (entry.getName().startsWith("API_SWE_")) {
list.addAll(readCsvContent(is));
}
}
} finally {
is.close();
}
return list;
} } | public class class_name {
private static List<String> readUsingZipInputStream(final InputStream inputStream) throws IOException {
final BufferedInputStream bis = new BufferedInputStream(inputStream);
final ZipInputStream is = new ZipInputStream(bis);
final List<String> list = new ArrayList<>();
try {
ZipEntry entry;
while ((entry = is.getNextEntry()) != null) {
if (entry.getName().startsWith("API_SWE_")) {
list.addAll(readCsvContent(is)); // depends on control dependency: [if], data = [none]
}
}
} finally {
is.close();
}
return list;
} } |
public class class_name {
public @Nullable <T> T get(@NotNull String name, @NotNull Class<T> type) {
@Nullable
T value = getPageProperty(currentPage, name, type);
if (value == null) {
value = getComponentProperty(currentComponent, name, type);
}
return value;
} } | public class class_name {
public @Nullable <T> T get(@NotNull String name, @NotNull Class<T> type) {
@Nullable
T value = getPageProperty(currentPage, name, type);
if (value == null) {
value = getComponentProperty(currentComponent, name, type); // depends on control dependency: [if], data = [none]
}
return value;
} } |
public class class_name {
public static <T> Constructor<T> getConstructor(Class<T> instantiable, Class<?>... constructorParameterTypes)
{
try
{
return instantiable.getConstructor(constructorParameterTypes);
}
catch (NoSuchMethodException ignored)
{
return ReflectionHelper.searchForConstructor(instantiable, constructorParameterTypes);
}
} } | public class class_name {
public static <T> Constructor<T> getConstructor(Class<T> instantiable, Class<?>... constructorParameterTypes)
{
try
{
return instantiable.getConstructor(constructorParameterTypes); // depends on control dependency: [try], data = [none]
}
catch (NoSuchMethodException ignored)
{
return ReflectionHelper.searchForConstructor(instantiable, constructorParameterTypes);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public void marshall(ListBackupJobsRequest listBackupJobsRequest, ProtocolMarshaller protocolMarshaller) {
if (listBackupJobsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listBackupJobsRequest.getNextToken(), NEXTTOKEN_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getMaxResults(), MAXRESULTS_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByResourceArn(), BYRESOURCEARN_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByState(), BYSTATE_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByBackupVaultName(), BYBACKUPVAULTNAME_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByCreatedBefore(), BYCREATEDBEFORE_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByCreatedAfter(), BYCREATEDAFTER_BINDING);
protocolMarshaller.marshall(listBackupJobsRequest.getByResourceType(), BYRESOURCETYPE_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
} } | public class class_name {
public void marshall(ListBackupJobsRequest listBackupJobsRequest, ProtocolMarshaller protocolMarshaller) {
if (listBackupJobsRequest == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(listBackupJobsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByResourceArn(), BYRESOURCEARN_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByState(), BYSTATE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByBackupVaultName(), BYBACKUPVAULTNAME_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByCreatedBefore(), BYCREATEDBEFORE_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByCreatedAfter(), BYCREATEDAFTER_BINDING); // depends on control dependency: [try], data = [none]
protocolMarshaller.marshall(listBackupJobsRequest.getByResourceType(), BYRESOURCETYPE_BINDING); // depends on control dependency: [try], data = [none]
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
protected void encodeMask(FacesContext context,
InputText inputText,
String fieldId,
ResponseWriter rw) throws IOException {
if (inputText.getMask() != null && !inputText.getMask().isEmpty()) {
rw.startElement("script", inputText);
rw.writeText("Inputmask(", null);
if (inputText.getMask().trim().startsWith("{")) {
rw.writeText(inputText.getMask().trim(), null);
}
else {
rw.writeText(String.format("\"%s\"", inputText.getMask().replace("\"", "\\\"")), null);
}
rw.writeText(").mask(document.getElementById(\"", null);
rw.writeText(fieldId, null);
rw.writeText("\"));", null);
rw.endElement("script");
}
} } | public class class_name {
protected void encodeMask(FacesContext context,
InputText inputText,
String fieldId,
ResponseWriter rw) throws IOException {
if (inputText.getMask() != null && !inputText.getMask().isEmpty()) {
rw.startElement("script", inputText);
rw.writeText("Inputmask(", null);
if (inputText.getMask().trim().startsWith("{")) {
rw.writeText(inputText.getMask().trim(), null); // depends on control dependency: [if], data = [none]
}
else {
rw.writeText(String.format("\"%s\"", inputText.getMask().replace("\"", "\\\"")), null); // depends on control dependency: [if], data = [none]
}
rw.writeText(").mask(document.getElementById(\"", null);
rw.writeText(fieldId, null);
rw.writeText("\"));", null);
rw.endElement("script");
}
} } |
public class class_name {
public static <T, X extends Throwable> Tuple3<CompletableFuture<Subscription>, Runnable, CompletableFuture<Boolean>> forEachXEvents(
final Stream<T> stream, final long x, final Consumer<? super T> consumerElement, final Consumer<? super Throwable> consumerError,
final Runnable onComplete) {
final CompletableFuture<Boolean> streamCompleted = new CompletableFuture<>();
final Subscription s = new Subscription() {
Iterator<T> it = stream.iterator();
volatile boolean running = true;
@Override
public void request(final long n) {
for (int i = 0; i < n && running; i++) {
try {
if (it.hasNext()) {
consumerElement.accept(it.next());
} else {
try {
onComplete.run();
} finally {
streamCompleted.complete(true);
break;
}
}
} catch (final Throwable t) {
consumerError.accept(t);
}
}
}
@Override
public void cancel() {
running = false;
}
};
final CompletableFuture<Subscription> subscription = CompletableFuture.completedFuture(s);
return tuple(subscription, () -> {
s.request(x);
} , streamCompleted);
} } | public class class_name {
public static <T, X extends Throwable> Tuple3<CompletableFuture<Subscription>, Runnable, CompletableFuture<Boolean>> forEachXEvents(
final Stream<T> stream, final long x, final Consumer<? super T> consumerElement, final Consumer<? super Throwable> consumerError,
final Runnable onComplete) {
final CompletableFuture<Boolean> streamCompleted = new CompletableFuture<>();
final Subscription s = new Subscription() {
Iterator<T> it = stream.iterator();
volatile boolean running = true;
@Override
public void request(final long n) {
for (int i = 0; i < n && running; i++) {
try {
if (it.hasNext()) {
consumerElement.accept(it.next()); // depends on control dependency: [if], data = [none]
} else {
try {
onComplete.run(); // depends on control dependency: [try], data = [none]
} finally {
streamCompleted.complete(true);
break;
}
}
} catch (final Throwable t) {
consumerError.accept(t);
} // depends on control dependency: [catch], data = [none]
}
}
@Override
public void cancel() {
running = false;
}
};
final CompletableFuture<Subscription> subscription = CompletableFuture.completedFuture(s);
return tuple(subscription, () -> {
s.request(x);
} , streamCompleted);
} } |
public class class_name {
public Long hdel(Object key, Object... fields) {
Jedis jedis = getJedis();
try {
return jedis.hdel(keyToBytes(key), fieldsToBytesArray(fields));
}
finally {close(jedis);}
} } | public class class_name {
public Long hdel(Object key, Object... fields) {
Jedis jedis = getJedis();
try {
return jedis.hdel(keyToBytes(key), fieldsToBytesArray(fields));
// depends on control dependency: [try], data = [none]
}
finally {close(jedis);}
} } |
public class class_name {
void writeAddHandler(IndentedWriter writer, FieldManager fieldManager,
String handlerVarName, String addHandlerMethodName,
String objectName) {
if (useLazyWidgetBuilders) {
fieldManager.require(objectName).addStatement("%1$s.%2$s(%3$s);",
objectName, addHandlerMethodName, handlerVarName);
} else {
writer.write("%1$s.%2$s(%3$s);", objectName, addHandlerMethodName,
handlerVarName);
}
} } | public class class_name {
void writeAddHandler(IndentedWriter writer, FieldManager fieldManager,
String handlerVarName, String addHandlerMethodName,
String objectName) {
if (useLazyWidgetBuilders) {
fieldManager.require(objectName).addStatement("%1$s.%2$s(%3$s);", // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
objectName, addHandlerMethodName, handlerVarName);
} else {
writer.write("%1$s.%2$s(%3$s);", objectName, addHandlerMethodName, // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none] // depends on control dependency: [if], data = [none]
handlerVarName);
}
} } |
public class class_name {
public void changePathInURL(String path, boolean changeHistory) {
jcrURL.setPath(path);
if (changeHistory) {
htmlHistory.newItem(jcrURL.toString(), false);
}
} } | public class class_name {
public void changePathInURL(String path, boolean changeHistory) {
jcrURL.setPath(path);
if (changeHistory) {
htmlHistory.newItem(jcrURL.toString(), false); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected void initializeArguments() {
Map<String, BasicDoubleLinkedNode<CliArgumentContainer>> argumentMap = new HashMap<>();
BasicDoubleLinkedNode<CliArgumentContainer> startHead = null;
BasicDoubleLinkedNode<CliArgumentContainer> startTail = null;
BasicDoubleLinkedNode<CliArgumentContainer> endHead = null;
BasicDoubleLinkedNode<CliArgumentContainer> endTail = null;
for (CliArgumentContainer argumentContainer : this.arguments) {
String id = argumentContainer.getId();
if ((CliArgument.ID_FIRST.equals(id)) || (CliArgument.ID_LAST.equals(id))) {
throw new NlsIllegalArgumentException(id, argumentContainer.toString());
}
if (argumentMap.containsKey(id)) {
throw new DuplicateObjectException(argumentContainer, id);
}
BasicDoubleLinkedNode<CliArgumentContainer> node = new BasicDoubleLinkedNode<>();
node.setValue(argumentContainer);
argumentMap.put(id, node);
CliArgument cliArgument = argumentContainer.getArgument();
boolean addAfter = cliArgument.addAfter();
String nextTo = cliArgument.addCloseTo();
if (CliArgument.ID_FIRST.equals(nextTo)) {
if ((startHead == null) || (startTail == null)) {
startHead = node;
startTail = node;
} else {
if (addAfter) {
startTail.insertAsNext(node);
startTail = node;
} else {
startHead.insertAsPrevious(node);
startHead = node;
}
}
argumentContainer.init();
} else if (CliArgument.ID_LAST.equals(nextTo)) {
if ((endTail == null) || (endHead == null)) {
endTail = node;
endHead = node;
} else {
if (addAfter) {
endTail.insertAsNext(node);
endTail = node;
} else {
endHead.insertAsPrevious(node);
endHead = node;
}
}
argumentContainer.init();
}
}
if ((startTail != null) && (endHead != null)) {
// connect start and end of list...
startTail.insertAsNext(endHead);
}
for (BasicDoubleLinkedNode<CliArgumentContainer> node : argumentMap.values()) {
List<String> cycle = initializeArgumentRecursive(node, argumentMap);
if (cycle != null) {
StringBuilder sb = new StringBuilder();
for (int i = cycle.size() - 1; i >= 0; i--) {
sb.append(cycle.get(i));
if (i > 0) {
sb.append("-->");
}
}
throw new IllegalStateException("Cyclic dependency of CLI modes: " + sb.toString());
}
}
// order arguments
if (startHead != null) {
this.arguments.clear();
startHead.addToList(this.arguments);
} else if (endHead != null) {
this.arguments.clear();
BasicDoubleLinkedNode<CliArgumentContainer> node = endHead;
BasicDoubleLinkedNode<CliArgumentContainer> previous = node.getPrevious();
while (previous != null) {
node = previous;
previous = node.getPrevious();
}
node.addToList(this.arguments);
}
} } | public class class_name {
protected void initializeArguments() {
Map<String, BasicDoubleLinkedNode<CliArgumentContainer>> argumentMap = new HashMap<>();
BasicDoubleLinkedNode<CliArgumentContainer> startHead = null;
BasicDoubleLinkedNode<CliArgumentContainer> startTail = null;
BasicDoubleLinkedNode<CliArgumentContainer> endHead = null;
BasicDoubleLinkedNode<CliArgumentContainer> endTail = null;
for (CliArgumentContainer argumentContainer : this.arguments) {
String id = argumentContainer.getId();
if ((CliArgument.ID_FIRST.equals(id)) || (CliArgument.ID_LAST.equals(id))) {
throw new NlsIllegalArgumentException(id, argumentContainer.toString());
}
if (argumentMap.containsKey(id)) {
throw new DuplicateObjectException(argumentContainer, id);
}
BasicDoubleLinkedNode<CliArgumentContainer> node = new BasicDoubleLinkedNode<>();
node.setValue(argumentContainer); // depends on control dependency: [for], data = [argumentContainer]
argumentMap.put(id, node); // depends on control dependency: [for], data = [none]
CliArgument cliArgument = argumentContainer.getArgument();
boolean addAfter = cliArgument.addAfter();
String nextTo = cliArgument.addCloseTo();
if (CliArgument.ID_FIRST.equals(nextTo)) {
if ((startHead == null) || (startTail == null)) {
startHead = node; // depends on control dependency: [if], data = [none]
startTail = node; // depends on control dependency: [if], data = [none]
} else {
if (addAfter) {
startTail.insertAsNext(node); // depends on control dependency: [if], data = [none]
startTail = node; // depends on control dependency: [if], data = [none]
} else {
startHead.insertAsPrevious(node); // depends on control dependency: [if], data = [none]
startHead = node; // depends on control dependency: [if], data = [none]
}
}
argumentContainer.init(); // depends on control dependency: [if], data = [none]
} else if (CliArgument.ID_LAST.equals(nextTo)) {
if ((endTail == null) || (endHead == null)) {
endTail = node; // depends on control dependency: [if], data = [none]
endHead = node; // depends on control dependency: [if], data = [none]
} else {
if (addAfter) {
endTail.insertAsNext(node); // depends on control dependency: [if], data = [none]
endTail = node; // depends on control dependency: [if], data = [none]
} else {
endHead.insertAsPrevious(node); // depends on control dependency: [if], data = [none]
endHead = node; // depends on control dependency: [if], data = [none]
}
}
argumentContainer.init(); // depends on control dependency: [if], data = [none]
}
}
if ((startTail != null) && (endHead != null)) {
// connect start and end of list...
startTail.insertAsNext(endHead); // depends on control dependency: [if], data = [none]
}
for (BasicDoubleLinkedNode<CliArgumentContainer> node : argumentMap.values()) {
List<String> cycle = initializeArgumentRecursive(node, argumentMap);
if (cycle != null) {
StringBuilder sb = new StringBuilder();
for (int i = cycle.size() - 1; i >= 0; i--) {
sb.append(cycle.get(i)); // depends on control dependency: [for], data = [i]
if (i > 0) {
sb.append("-->"); // depends on control dependency: [if], data = [none]
}
}
throw new IllegalStateException("Cyclic dependency of CLI modes: " + sb.toString());
}
}
// order arguments
if (startHead != null) {
this.arguments.clear(); // depends on control dependency: [if], data = [none]
startHead.addToList(this.arguments); // depends on control dependency: [if], data = [none]
} else if (endHead != null) {
this.arguments.clear(); // depends on control dependency: [if], data = [none]
BasicDoubleLinkedNode<CliArgumentContainer> node = endHead;
BasicDoubleLinkedNode<CliArgumentContainer> previous = node.getPrevious();
while (previous != null) {
node = previous; // depends on control dependency: [while], data = [none]
previous = node.getPrevious(); // depends on control dependency: [while], data = [none]
}
node.addToList(this.arguments); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
protected byte[] getFileBytes(String filename) {
try {
// is this a zip-file?
if (m_importZip != null) {
// yes
ZipEntry entry = m_importZip.getEntry(filename);
// path to file might be relative, too
if ((entry == null) && filename.startsWith("/")) {
entry = m_importZip.getEntry(filename.substring(1));
}
if (entry == null) {
throw new ZipException(
Messages.get().getBundle().key(Messages.LOG_IMPORTEXPORT_FILE_NOT_FOUND_IN_ZIP_1, filename));
}
InputStream stream = m_importZip.getInputStream(entry);
int size = new Long(entry.getSize()).intValue();
return CmsFileUtil.readFully(stream, size);
} else {
// no - use directory
File file = new File(m_importResource, filename);
return CmsFileUtil.readFile(file);
}
} catch (FileNotFoundException fnfe) {
if (LOG.isErrorEnabled()) {
LOG.error(Messages.get().getBundle().key(Messages.ERR_IMPORTEXPORT_FILE_NOT_FOUND_1, filename), fnfe);
}
m_report.println(fnfe);
} catch (IOException ioe) {
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(Messages.ERR_IMPORTEXPORT_ERROR_READING_FILE_1, filename),
ioe);
}
m_report.println(ioe);
}
// this will only be returned in case there was an exception
return "".getBytes();
} } | public class class_name {
protected byte[] getFileBytes(String filename) {
try {
// is this a zip-file?
if (m_importZip != null) {
// yes
ZipEntry entry = m_importZip.getEntry(filename);
// path to file might be relative, too
if ((entry == null) && filename.startsWith("/")) {
entry = m_importZip.getEntry(filename.substring(1)); // depends on control dependency: [if], data = [none]
}
if (entry == null) {
throw new ZipException(
Messages.get().getBundle().key(Messages.LOG_IMPORTEXPORT_FILE_NOT_FOUND_IN_ZIP_1, filename));
}
InputStream stream = m_importZip.getInputStream(entry);
int size = new Long(entry.getSize()).intValue();
return CmsFileUtil.readFully(stream, size); // depends on control dependency: [if], data = [none]
} else {
// no - use directory
File file = new File(m_importResource, filename);
return CmsFileUtil.readFile(file); // depends on control dependency: [if], data = [none]
}
} catch (FileNotFoundException fnfe) {
if (LOG.isErrorEnabled()) {
LOG.error(Messages.get().getBundle().key(Messages.ERR_IMPORTEXPORT_FILE_NOT_FOUND_1, filename), fnfe); // depends on control dependency: [if], data = [none]
}
m_report.println(fnfe);
} catch (IOException ioe) { // depends on control dependency: [catch], data = [none]
if (LOG.isErrorEnabled()) {
LOG.error(
Messages.get().getBundle().key(Messages.ERR_IMPORTEXPORT_ERROR_READING_FILE_1, filename),
ioe); // depends on control dependency: [if], data = [none]
}
m_report.println(ioe);
} // depends on control dependency: [catch], data = [none]
// this will only be returned in case there was an exception
return "".getBytes();
} } |
public class class_name {
private static Parameter[] getParametersWithName(Executable m) {
final java.lang.reflect.Parameter[] ps = m.getParameters();
Parameter[] parameters = new Parameter[ps.length];
for(int i = 0; i < parameters.length; i++) {
parameters[i] = new Parameter(i, ps[i].getName(), ps[i].getType(), ps[i].getParameterizedType());
}
return parameters;
} } | public class class_name {
private static Parameter[] getParametersWithName(Executable m) {
final java.lang.reflect.Parameter[] ps = m.getParameters();
Parameter[] parameters = new Parameter[ps.length];
for(int i = 0; i < parameters.length; i++) {
parameters[i] = new Parameter(i, ps[i].getName(), ps[i].getType(), ps[i].getParameterizedType()); // depends on control dependency: [for], data = [i]
}
return parameters;
} } |
public class class_name {
@Override
public Every visit(final Every every) {
String description;
if (every.getPeriod().getValue() > 1) {
description = String.format("%s %s ", bundle.getString(EVERY), nominalValue(every.getPeriod())) + " %p ";
} else {
description = bundle.getString(EVERY) + " %s ";
}
//TODO save the description?
return every;
} } | public class class_name {
@Override
public Every visit(final Every every) {
String description;
if (every.getPeriod().getValue() > 1) {
description = String.format("%s %s ", bundle.getString(EVERY), nominalValue(every.getPeriod())) + " %p "; // depends on control dependency: [if], data = [none]
} else {
description = bundle.getString(EVERY) + " %s "; // depends on control dependency: [if], data = [none]
}
//TODO save the description?
return every;
} } |
public class class_name {
public synchronized Object put(Object pathSpec, Object object)
{
StringTokenizer tok = new StringTokenizer(pathSpec.toString(),__pathSpecSeparators);
Object old =null;
while (tok.hasMoreTokens())
{
String spec=tok.nextToken();
if (!spec.startsWith("/") && !spec.startsWith("*."))
{
log.warn("PathSpec "+spec+". must start with '/' or '*.'");
spec="/"+spec;
}
old = super.put(spec,object);
// Make entry that was just created.
Entry entry = new Entry(spec,object);
if (entry.getKey().equals(spec))
{
if (spec.equals("/*"))
_prefixDefault=entry;
else if (spec.endsWith("/*"))
{
_prefixMap.put(spec.substring(0,spec.length()-2),entry);
_exactMap.put(spec.substring(0,spec.length()-1),entry);
_exactMap.put(spec.substring(0,spec.length()-2),entry);
}
else if (spec.startsWith("*."))
_suffixMap.put(spec.substring(2),entry);
else if (spec.equals("/"))
{
if (_nodefault)
_exactMap.put(spec,entry);
else
{
_default=entry;
_defaultSingletonList=
SingletonList.newSingletonList(_default);
}
}
else
_exactMap.put(spec,entry);
}
}
return old;
} } | public class class_name {
public synchronized Object put(Object pathSpec, Object object)
{
StringTokenizer tok = new StringTokenizer(pathSpec.toString(),__pathSpecSeparators);
Object old =null;
while (tok.hasMoreTokens())
{
String spec=tok.nextToken();
if (!spec.startsWith("/") && !spec.startsWith("*."))
{
log.warn("PathSpec "+spec+". must start with '/' or '*.'"); // depends on control dependency: [if], data = [none]
spec="/"+spec; // depends on control dependency: [if], data = [none]
}
old = super.put(spec,object); // depends on control dependency: [while], data = [none]
// Make entry that was just created.
Entry entry = new Entry(spec,object);
if (entry.getKey().equals(spec))
{
if (spec.equals("/*"))
_prefixDefault=entry;
else if (spec.endsWith("/*"))
{
_prefixMap.put(spec.substring(0,spec.length()-2),entry); // depends on control dependency: [if], data = [none]
_exactMap.put(spec.substring(0,spec.length()-1),entry); // depends on control dependency: [if], data = [none]
_exactMap.put(spec.substring(0,spec.length()-2),entry); // depends on control dependency: [if], data = [none]
}
else if (spec.startsWith("*."))
_suffixMap.put(spec.substring(2),entry);
else if (spec.equals("/"))
{
if (_nodefault)
_exactMap.put(spec,entry);
else
{
_default=entry; // depends on control dependency: [if], data = [none]
_defaultSingletonList=
SingletonList.newSingletonList(_default); // depends on control dependency: [if], data = [none]
}
}
else
_exactMap.put(spec,entry);
}
}
return old;
} } |
public class class_name {
public static Pattern pattern(AbstractConfig config, String key) {
String pattern = config.getString(key);
try {
return Pattern.compile(pattern);
} catch (PatternSyntaxException e) {
throw new ConfigException(
key,
pattern,
String.format(
"Could not compile regex '%s'.",
pattern
)
);
}
} } | public class class_name {
public static Pattern pattern(AbstractConfig config, String key) {
String pattern = config.getString(key);
try {
return Pattern.compile(pattern); // depends on control dependency: [try], data = [none]
} catch (PatternSyntaxException e) {
throw new ConfigException(
key,
pattern,
String.format(
"Could not compile regex '%s'.",
pattern
)
);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private static String definitionListToHtml( DefinitionList dl){
if( dl == null ) {
return "null";
}
StringBuilder result = new StringBuilder();
result.append("<table class=\"DefinitionList\">\n" +
"<tr><th class=\"DefinitionList\">DefinitionList</th></tr>\n"+
"<tr><td class=\"DefinitionList\">" );
if( dl.getDefinedTerm() != null ) {
result.append( contentElementToHtml( dl.getDefinedTerm() )+ "\n");
}
result.append("<ul>");
for( ContentElement ce: dl.getDefinitions() ) {
result.append("<li>"+contentElementToHtml(ce)+"</li>" );
}
result.append("</ul>\n");
result.append("</td></tr>\n" );
result.append("</table>\n");
return result.toString();
} } | public class class_name {
private static String definitionListToHtml( DefinitionList dl){
if( dl == null ) {
return "null"; // depends on control dependency: [if], data = [none]
}
StringBuilder result = new StringBuilder();
result.append("<table class=\"DefinitionList\">\n" +
"<tr><th class=\"DefinitionList\">DefinitionList</th></tr>\n"+
"<tr><td class=\"DefinitionList\">" );
if( dl.getDefinedTerm() != null ) {
result.append( contentElementToHtml( dl.getDefinedTerm() )+ "\n"); // depends on control dependency: [if], data = [( dl.getDefinedTerm()]
}
result.append("<ul>");
for( ContentElement ce: dl.getDefinitions() ) {
result.append("<li>"+contentElementToHtml(ce)+"</li>" ); // depends on control dependency: [for], data = [ce]
}
result.append("</ul>\n");
result.append("</td></tr>\n" );
result.append("</table>\n");
return result.toString();
} } |
public class class_name {
public void closeBecauseAppStopping(CloseReason cr) {
if (connLink.getLinkStatus() == LINK_STATUS.IO_OK) {
try {
connLink.close(cr, true, true);
} catch (RuntimeException e) {
if (connLink.getLinkStatus() == LINK_STATUS.IO_OK) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "Runtime exception during application stop close. IO status is ok so throwing exception.");
}
throw e;
}
else {
if (tc.isDebugEnabled()) {
Tr.debug(tc,
"Runtime exception during application stop close. IO status is not ok so likely an exception during server shutdown, not throwing exception.");
}
}
}
}
else {
if (tc.isDebugEnabled()) {
Tr.debug(tc,
"Application stopped and tried to close connection, but IO status is not OK which is indicative of server shutting down, not attempting connection close.");
}
}
} } | public class class_name {
public void closeBecauseAppStopping(CloseReason cr) {
if (connLink.getLinkStatus() == LINK_STATUS.IO_OK) {
try {
connLink.close(cr, true, true); // depends on control dependency: [try], data = [none]
} catch (RuntimeException e) {
if (connLink.getLinkStatus() == LINK_STATUS.IO_OK) {
if (tc.isDebugEnabled()) {
Tr.debug(tc, "Runtime exception during application stop close. IO status is ok so throwing exception."); // depends on control dependency: [if], data = [none]
}
throw e;
}
else {
if (tc.isDebugEnabled()) {
Tr.debug(tc,
"Runtime exception during application stop close. IO status is not ok so likely an exception during server shutdown, not throwing exception."); // depends on control dependency: [if], data = [none]
}
}
} // depends on control dependency: [catch], data = [none]
}
else {
if (tc.isDebugEnabled()) {
Tr.debug(tc,
"Application stopped and tried to close connection, but IO status is not OK which is indicative of server shutting down, not attempting connection close."); // depends on control dependency: [if], data = [none]
}
}
} } |
public class class_name {
private int getXForY(int newX, int newY) {
int width = (int)Math.floor((newY - m_firstY) / m_heightToWidth);
int result = m_firstX + width;
if (((m_firstX - newX) * (m_firstX - result)) < 0) {
result = m_firstX - width;
}
return result;
} } | public class class_name {
private int getXForY(int newX, int newY) {
int width = (int)Math.floor((newY - m_firstY) / m_heightToWidth);
int result = m_firstX + width;
if (((m_firstX - newX) * (m_firstX - result)) < 0) {
result = m_firstX - width; // depends on control dependency: [if], data = [none]
}
return result;
} } |
public class class_name {
public void printXMLComment(String content, boolean escape)
{
try {
this.xmlWriter.write(new DefaultComment(escape ? XMLUtils.escapeXMLComment(content) : content));
} catch (IOException e) {
// TODO: add error log here
}
} } | public class class_name {
public void printXMLComment(String content, boolean escape)
{
try {
this.xmlWriter.write(new DefaultComment(escape ? XMLUtils.escapeXMLComment(content) : content)); // depends on control dependency: [try], data = [none]
} catch (IOException e) {
// TODO: add error log here
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(createdTime);
sb.append(dm).append(expiredTime);
sb.append(dm).append(name);
sb.append(dm).append(sessionId);
if (sb.length() > dm.length()) {
sb.delete(0, dm.length());
}
sb.insert(0, "{").append("}");
return sb.toString();
} } | public class class_name {
@Override
protected String doBuildColumnString(String dm) {
StringBuilder sb = new StringBuilder();
sb.append(dm).append(createdTime);
sb.append(dm).append(expiredTime);
sb.append(dm).append(name);
sb.append(dm).append(sessionId);
if (sb.length() > dm.length()) {
sb.delete(0, dm.length()); // depends on control dependency: [if], data = [dm.length())]
}
sb.insert(0, "{").append("}");
return sb.toString();
} } |
public class class_name {
public static void readLine(final String format, final String line, final Map<String, Map<String, Double>> mapMetricUserValue, final Set<String> usersToAvoid) {
String[] toks = line.split("\t");
// default (also trec_eval) format: metric \t user|all \t value
if (format.equals("default")) {
String metric = toks[0];
String user = toks[1];
Double score = Double.parseDouble(toks[2]);
if (usersToAvoid.contains(user)) {
return;
}
Map<String, Double> userValueMap = mapMetricUserValue.get(metric);
if (userValueMap == null) {
userValueMap = new HashMap<String, Double>();
mapMetricUserValue.put(metric, userValueMap);
}
userValueMap.put(user, score);
}
} } | public class class_name {
public static void readLine(final String format, final String line, final Map<String, Map<String, Double>> mapMetricUserValue, final Set<String> usersToAvoid) {
String[] toks = line.split("\t");
// default (also trec_eval) format: metric \t user|all \t value
if (format.equals("default")) {
String metric = toks[0];
String user = toks[1];
Double score = Double.parseDouble(toks[2]);
if (usersToAvoid.contains(user)) {
return; // depends on control dependency: [if], data = [none]
}
Map<String, Double> userValueMap = mapMetricUserValue.get(metric);
if (userValueMap == null) {
userValueMap = new HashMap<String, Double>(); // depends on control dependency: [if], data = [none]
mapMetricUserValue.put(metric, userValueMap); // depends on control dependency: [if], data = [none]
}
userValueMap.put(user, score); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
public boolean containsWordBreakingChar(final @NonNull CharSequence input) {
if (!TextUtils.isEmpty(input)) {
for (int i = 0; i < input.length(); i++) {
char c = input.charAt(i);
if (isWordBreakingChar(c)) {
return true;
}
}
}
return false;
} } | public class class_name {
public boolean containsWordBreakingChar(final @NonNull CharSequence input) {
if (!TextUtils.isEmpty(input)) {
for (int i = 0; i < input.length(); i++) {
char c = input.charAt(i);
if (isWordBreakingChar(c)) {
return true; // depends on control dependency: [if], data = [none]
}
}
}
return false;
} } |
public class class_name {
public static boolean isValid(final String cssValue) {
final String trimmedCssValue = TagStringUtil
.toLowerCase(StringUtil.strip(cssValue));
if (StringUtil.containsSpace(trimmedCssValue)) {
return false;
}
try {
Float.parseFloat(trimmedCssValue);
return true;
} catch (final NumberFormatException e) {
}
return PREDEFINED_CONSTANTS.contains(trimmedCssValue);
} } | public class class_name {
public static boolean isValid(final String cssValue) {
final String trimmedCssValue = TagStringUtil
.toLowerCase(StringUtil.strip(cssValue));
if (StringUtil.containsSpace(trimmedCssValue)) {
return false; // depends on control dependency: [if], data = [none]
}
try {
Float.parseFloat(trimmedCssValue); // depends on control dependency: [try], data = [none]
return true; // depends on control dependency: [try], data = [none]
} catch (final NumberFormatException e) {
} // depends on control dependency: [catch], data = [none]
return PREDEFINED_CONSTANTS.contains(trimmedCssValue);
} } |
public class class_name {
protected void firePseudoAttributes()
{
if (m_tracer != null)
{
try
{
// flush out the "<elemName" if not already flushed
m_writer.flush();
// make a StringBuffer to write the name="value" pairs to.
StringBuffer sb = new StringBuffer();
int nAttrs = m_attributes.getLength();
if (nAttrs > 0)
{
// make a writer that internally appends to the same
// StringBuffer
java.io.Writer writer =
new ToStream.WritertoStringBuffer(sb);
processAttributes(writer, nAttrs);
// Don't clear the attributes!
// We only want to see what would be written out
// at this point, we don't want to loose them.
}
sb.append('>'); // the potential > after the attributes.
// convert the StringBuffer to a char array and
// emit the trace event that these characters "might"
// be written
char ch[] = sb.toString().toCharArray();
m_tracer.fireGenerateEvent(
SerializerTrace.EVENTTYPE_OUTPUT_PSEUDO_CHARACTERS,
ch,
0,
ch.length);
}
catch (IOException ioe)
{
// ignore ?
}
catch (SAXException se)
{
// ignore ?
}
}
} } | public class class_name {
protected void firePseudoAttributes()
{
if (m_tracer != null)
{
try
{
// flush out the "<elemName" if not already flushed
m_writer.flush(); // depends on control dependency: [try], data = [none]
// make a StringBuffer to write the name="value" pairs to.
StringBuffer sb = new StringBuffer();
int nAttrs = m_attributes.getLength();
if (nAttrs > 0)
{
// make a writer that internally appends to the same
// StringBuffer
java.io.Writer writer =
new ToStream.WritertoStringBuffer(sb);
processAttributes(writer, nAttrs); // depends on control dependency: [if], data = [none]
// Don't clear the attributes!
// We only want to see what would be written out
// at this point, we don't want to loose them.
}
sb.append('>'); // the potential > after the attributes. // depends on control dependency: [try], data = [none]
// convert the StringBuffer to a char array and
// emit the trace event that these characters "might"
// be written
char ch[] = sb.toString().toCharArray();
m_tracer.fireGenerateEvent(
SerializerTrace.EVENTTYPE_OUTPUT_PSEUDO_CHARACTERS,
ch,
0,
ch.length); // depends on control dependency: [try], data = [none]
}
catch (IOException ioe)
{
// ignore ?
} // depends on control dependency: [catch], data = [none]
catch (SAXException se)
{
// ignore ?
} // depends on control dependency: [catch], data = [none]
}
} } |
public class class_name {
private Reflections getReflection(List<String> packNameList) {
//
// filter
//
FilterBuilder filterBuilder = new FilterBuilder().includePackage(Constants.DISCONF_PACK_NAME);
for (String packName : packNameList) {
filterBuilder = filterBuilder.includePackage(packName);
}
Predicate<String> filter = filterBuilder;
//
// urls
//
Collection<URL> urlTotals = new ArrayList<URL>();
for (String packName : packNameList) {
Set<URL> urls = ClasspathHelper.forPackage(packName);
urlTotals.addAll(urls);
}
//
Reflections reflections = new Reflections(new ConfigurationBuilder().filterInputsBy(filter)
.setScanners(new SubTypesScanner().filterResultsBy(filter),
new TypeAnnotationsScanner()
.filterResultsBy(filter),
new FieldAnnotationsScanner()
.filterResultsBy(filter),
new MethodAnnotationsScanner()
.filterResultsBy(filter),
new MethodParameterScanner()).setUrls(urlTotals));
return reflections;
} } | public class class_name {
private Reflections getReflection(List<String> packNameList) {
//
// filter
//
FilterBuilder filterBuilder = new FilterBuilder().includePackage(Constants.DISCONF_PACK_NAME);
for (String packName : packNameList) {
filterBuilder = filterBuilder.includePackage(packName); // depends on control dependency: [for], data = [packName]
}
Predicate<String> filter = filterBuilder;
//
// urls
//
Collection<URL> urlTotals = new ArrayList<URL>();
for (String packName : packNameList) {
Set<URL> urls = ClasspathHelper.forPackage(packName);
urlTotals.addAll(urls); // depends on control dependency: [for], data = [none]
}
//
Reflections reflections = new Reflections(new ConfigurationBuilder().filterInputsBy(filter)
.setScanners(new SubTypesScanner().filterResultsBy(filter),
new TypeAnnotationsScanner()
.filterResultsBy(filter),
new FieldAnnotationsScanner()
.filterResultsBy(filter),
new MethodAnnotationsScanner()
.filterResultsBy(filter),
new MethodParameterScanner()).setUrls(urlTotals));
return reflections;
} } |
public class class_name {
public void clearAll() {
ThreadSafeBitSetSegments segments = this.segments.get();
for(int i=0;i<segments.numSegments();i++) {
AtomicLongArray segment = segments.getSegment(i);
for(int j=0;j<segment.length();j++) {
segment.set(j, 0L);
}
}
} } | public class class_name {
public void clearAll() {
ThreadSafeBitSetSegments segments = this.segments.get();
for(int i=0;i<segments.numSegments();i++) {
AtomicLongArray segment = segments.getSegment(i);
for(int j=0;j<segment.length();j++) {
segment.set(j, 0L); // depends on control dependency: [for], data = [j]
}
}
} } |
public class class_name {
private static JobRecord queryJobOrAbandonTask(Key key, JobRecord.InflationType inflationType) {
try {
return backEnd.queryJob(key, inflationType);
} catch (NoSuchObjectException e) {
logger.log(
Level.WARNING, "Cannot find some part of the job: " + key + ". Ignoring the task.", e);
throw new AbandonTaskException();
}
} } | public class class_name {
private static JobRecord queryJobOrAbandonTask(Key key, JobRecord.InflationType inflationType) {
try {
return backEnd.queryJob(key, inflationType); // depends on control dependency: [try], data = [none]
} catch (NoSuchObjectException e) {
logger.log(
Level.WARNING, "Cannot find some part of the job: " + key + ". Ignoring the task.", e);
throw new AbandonTaskException();
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
private void genNullCheck(DiagnosticPosition pos) {
if (allowBetterNullChecks) {
callMethod(pos, syms.objectsType, names.requireNonNull,
List.of(syms.objectType), true);
} else {
callMethod(pos, syms.objectType, names.getClass,
List.nil(), false);
}
code.emitop0(pop);
} } | public class class_name {
private void genNullCheck(DiagnosticPosition pos) {
if (allowBetterNullChecks) {
callMethod(pos, syms.objectsType, names.requireNonNull,
List.of(syms.objectType), true); // depends on control dependency: [if], data = [none]
} else {
callMethod(pos, syms.objectType, names.getClass,
List.nil(), false); // depends on control dependency: [if], data = [none]
}
code.emitop0(pop);
} } |
public class class_name {
public static InputStream recordMerge(File directory, String branchName, long... revisions) throws IOException {
// svn merge -c 3328 --record-only ^/calc/trunk
CommandLine cmdLine = new CommandLine(SVN_CMD);
cmdLine.addArgument(CMD_MERGE);
addDefaultArguments(cmdLine, null, null);
cmdLine.addArgument("--record-only");
cmdLine.addArgument("-c");
StringBuilder revs = new StringBuilder();
for (long revision : revisions) {
revs.append(revision).append(",");
}
cmdLine.addArgument(revs.toString());
// leads to "non-inheritable merges"
// cmdLine.addArgument("--depth");
// cmdLine.addArgument("empty");
cmdLine.addArgument("^" + branchName);
//cmdLine.addArgument("."); // current dir
return ExecutionHelper.getCommandResult(cmdLine, directory, 0, 120000);
} } | public class class_name {
public static InputStream recordMerge(File directory, String branchName, long... revisions) throws IOException {
// svn merge -c 3328 --record-only ^/calc/trunk
CommandLine cmdLine = new CommandLine(SVN_CMD);
cmdLine.addArgument(CMD_MERGE);
addDefaultArguments(cmdLine, null, null);
cmdLine.addArgument("--record-only");
cmdLine.addArgument("-c");
StringBuilder revs = new StringBuilder();
for (long revision : revisions) {
revs.append(revision).append(","); // depends on control dependency: [for], data = [revision]
}
cmdLine.addArgument(revs.toString());
// leads to "non-inheritable merges"
// cmdLine.addArgument("--depth");
// cmdLine.addArgument("empty");
cmdLine.addArgument("^" + branchName);
//cmdLine.addArgument("."); // current dir
return ExecutionHelper.getCommandResult(cmdLine, directory, 0, 120000);
} } |
public class class_name {
public void mutate(float amount) {
for (int i = 0; i < numKnots; i++) {
int rgb = yKnots[i];
int r = ((rgb >> 16) & 0xff);
int g = ((rgb >> 8) & 0xff);
int b = (rgb & 0xff);
r = PixelUtils.clamp( (int)(r + amount * 255 * (Math.random()-0.5)) );
g = PixelUtils.clamp( (int)(g + amount * 255 * (Math.random()-0.5)) );
b = PixelUtils.clamp( (int)(b + amount * 255 * (Math.random()-0.5)) );
yKnots[i] = 0xff000000 | (r << 16) | (g << 8) | b;
knotTypes[i] = RGB|SPLINE;
}
sortKnots();
rebuildGradient();
} } | public class class_name {
public void mutate(float amount) {
for (int i = 0; i < numKnots; i++) {
int rgb = yKnots[i];
int r = ((rgb >> 16) & 0xff);
int g = ((rgb >> 8) & 0xff);
int b = (rgb & 0xff);
r = PixelUtils.clamp( (int)(r + amount * 255 * (Math.random()-0.5)) ); // depends on control dependency: [for], data = [none]
g = PixelUtils.clamp( (int)(g + amount * 255 * (Math.random()-0.5)) ); // depends on control dependency: [for], data = [none]
b = PixelUtils.clamp( (int)(b + amount * 255 * (Math.random()-0.5)) ); // depends on control dependency: [for], data = [none]
yKnots[i] = 0xff000000 | (r << 16) | (g << 8) | b; // depends on control dependency: [for], data = [i]
knotTypes[i] = RGB|SPLINE; // depends on control dependency: [for], data = [i]
}
sortKnots();
rebuildGradient();
} } |
public class class_name {
public ListBuilder<T> addAll( Iterable<T> elements)
{
for( T element : elements)
{
list_.add( element);
}
return this;
} } | public class class_name {
public ListBuilder<T> addAll( Iterable<T> elements)
{
for( T element : elements)
{
list_.add( element); // depends on control dependency: [for], data = [element]
}
return this;
} } |
public class class_name {
public static Class<?> mapSimpleType(RamlParamType param, String format, String rawType) {
switch (param) {
case BOOLEAN:
return Boolean.class;
case DATE:
return mapDateFormat(rawType);
case INTEGER: {
Class<?> fromFormat = mapNumberFromFormat(format);
if (fromFormat == Double.class) {
throw new IllegalStateException();
}
if (fromFormat == null) {
return Long.class; // retained for backward compatibility
} else {
return fromFormat;
}
}
case NUMBER: {
Class<?> fromFormat = mapNumberFromFormat(format);
if (fromFormat == null) {
return BigDecimal.class; // retained for backward
// compatibility
} else {
return fromFormat;
}
}
case FILE:
return MultipartFile.class;
default:
return String.class;
}
} } | public class class_name {
public static Class<?> mapSimpleType(RamlParamType param, String format, String rawType) {
switch (param) {
case BOOLEAN:
return Boolean.class;
case DATE:
return mapDateFormat(rawType);
case INTEGER: {
Class<?> fromFormat = mapNumberFromFormat(format);
if (fromFormat == Double.class) {
throw new IllegalStateException();
}
if (fromFormat == null) {
return Long.class; // retained for backward compatibility // depends on control dependency: [if], data = [none]
} else {
return fromFormat; // depends on control dependency: [if], data = [none]
}
}
case NUMBER: {
Class<?> fromFormat = mapNumberFromFormat(format);
if (fromFormat == null) {
return BigDecimal.class; // retained for backward
// compatibility
} else {
return fromFormat;
}
}
case FILE:
return MultipartFile.class;
default:
return String.class;
}
} } |
public class class_name {
@Override
public void savePolicy(Model model) {
if (filePath.equals("")) {
throw new Error("invalid file path, file path cannot be empty");
}
StringBuilder tmp = new StringBuilder();
for (Map.Entry<String, Assertion> entry : model.model.get("p").entrySet()) {
String ptype = entry.getKey();
Assertion ast = entry.getValue();
for (List<String> rule : ast.policy) {
tmp.append(ptype + ", ");
tmp.append(Util.arrayToString(rule));
tmp.append("\n");
}
}
for (Map.Entry<String, Assertion> entry : model.model.get("g").entrySet()) {
String ptype = entry.getKey();
Assertion ast = entry.getValue();
for (List<String> rule : ast.policy) {
tmp.append(ptype + ", ");
tmp.append(Util.arrayToString(rule));
tmp.append("\n");
}
}
savePolicyFile(tmp.toString().trim());
} } | public class class_name {
@Override
public void savePolicy(Model model) {
if (filePath.equals("")) {
throw new Error("invalid file path, file path cannot be empty");
}
StringBuilder tmp = new StringBuilder();
for (Map.Entry<String, Assertion> entry : model.model.get("p").entrySet()) {
String ptype = entry.getKey();
Assertion ast = entry.getValue();
for (List<String> rule : ast.policy) {
tmp.append(ptype + ", "); // depends on control dependency: [for], data = [none]
tmp.append(Util.arrayToString(rule)); // depends on control dependency: [for], data = [rule]
tmp.append("\n"); // depends on control dependency: [for], data = [none]
}
}
for (Map.Entry<String, Assertion> entry : model.model.get("g").entrySet()) {
String ptype = entry.getKey();
Assertion ast = entry.getValue();
for (List<String> rule : ast.policy) {
tmp.append(ptype + ", "); // depends on control dependency: [for], data = [none]
tmp.append(Util.arrayToString(rule)); // depends on control dependency: [for], data = [rule]
tmp.append("\n"); // depends on control dependency: [for], data = [none]
}
}
savePolicyFile(tmp.toString().trim());
} } |
public class class_name {
public static <I, O> List<Example<DynamicAssignment, DynamicAssignment>> reformatTrainingData(
List<? extends TaggedSequence<I, O>> sequences, FeatureVectorGenerator<LocalContext<I>> featureGen,
Function<? super LocalContext<I>, ? extends Object> inputGen, DynamicVariableSet modelVariables,
I startInput, O startLabel) {
Preconditions.checkArgument(!(startInput == null ^ startLabel == null));
DynamicVariableSet plate = modelVariables.getPlate(PLATE_NAME);
VariableNumMap x = plate.getFixedVariables().getVariablesByName(INPUT_FEATURES_NAME);
VariableNumMap xInput = plate.getFixedVariables().getVariablesByName(INPUT_NAME);
VariableNumMap y = plate.getFixedVariables().getVariablesByName(OUTPUT_NAME);
List<Example<DynamicAssignment, DynamicAssignment>> examples = Lists.newArrayList();
for (TaggedSequence<I, O> sequence : sequences) {
List<Assignment> inputs = Lists.newArrayList();
if (startInput != null) {
List<I> newItems = Lists.newArrayList();
newItems.add(startInput);
newItems.addAll(sequence.getItems());
LocalContext<I> startContext = new ListLocalContext<I>(newItems, 0);
Assignment inputFeatureVector = x.outcomeArrayToAssignment(featureGen.apply(startContext));
Assignment inputElement = xInput.outcomeArrayToAssignment(inputGen.apply(startContext));
Assignment firstLabel = y.outcomeArrayToAssignment(startLabel);
inputs.add(Assignment.unionAll(inputFeatureVector, inputElement, firstLabel));
}
List<LocalContext<I>> contexts = sequence.getLocalContexts();
for (int i = 0; i < contexts.size(); i++) {
Assignment inputFeatureVector = x.outcomeArrayToAssignment(featureGen.apply(contexts.get(i)));
Assignment inputElement = xInput.outcomeArrayToAssignment(inputGen.apply(contexts.get(i)));
inputs.add(inputFeatureVector.union(inputElement));
}
DynamicAssignment input = DynamicAssignment.createPlateAssignment(PLATE_NAME, inputs);
DynamicAssignment output = DynamicAssignment.EMPTY;
if (sequence.getLabels() != null) {
List<Assignment> outputs = Lists.newArrayList();
if (startInput != null) {
// First label is given (and equal to the special start label).
outputs.add(Assignment.EMPTY);
}
List<O> labels = sequence.getLabels();
for (int i = 0; i < contexts.size(); i++) {
outputs.add(y.outcomeArrayToAssignment(labels.get(i)));
}
output = DynamicAssignment.createPlateAssignment(PLATE_NAME, outputs);
}
examples.add(Example.create(input, output));
}
return examples;
} } | public class class_name {
public static <I, O> List<Example<DynamicAssignment, DynamicAssignment>> reformatTrainingData(
List<? extends TaggedSequence<I, O>> sequences, FeatureVectorGenerator<LocalContext<I>> featureGen,
Function<? super LocalContext<I>, ? extends Object> inputGen, DynamicVariableSet modelVariables,
I startInput, O startLabel) {
Preconditions.checkArgument(!(startInput == null ^ startLabel == null));
DynamicVariableSet plate = modelVariables.getPlate(PLATE_NAME);
VariableNumMap x = plate.getFixedVariables().getVariablesByName(INPUT_FEATURES_NAME);
VariableNumMap xInput = plate.getFixedVariables().getVariablesByName(INPUT_NAME);
VariableNumMap y = plate.getFixedVariables().getVariablesByName(OUTPUT_NAME);
List<Example<DynamicAssignment, DynamicAssignment>> examples = Lists.newArrayList();
for (TaggedSequence<I, O> sequence : sequences) {
List<Assignment> inputs = Lists.newArrayList();
if (startInput != null) {
List<I> newItems = Lists.newArrayList();
newItems.add(startInput); // depends on control dependency: [if], data = [(startInput]
newItems.addAll(sequence.getItems()); // depends on control dependency: [if], data = [none]
LocalContext<I> startContext = new ListLocalContext<I>(newItems, 0);
Assignment inputFeatureVector = x.outcomeArrayToAssignment(featureGen.apply(startContext));
Assignment inputElement = xInput.outcomeArrayToAssignment(inputGen.apply(startContext));
Assignment firstLabel = y.outcomeArrayToAssignment(startLabel);
inputs.add(Assignment.unionAll(inputFeatureVector, inputElement, firstLabel)); // depends on control dependency: [if], data = [none]
}
List<LocalContext<I>> contexts = sequence.getLocalContexts();
for (int i = 0; i < contexts.size(); i++) {
Assignment inputFeatureVector = x.outcomeArrayToAssignment(featureGen.apply(contexts.get(i)));
Assignment inputElement = xInput.outcomeArrayToAssignment(inputGen.apply(contexts.get(i)));
inputs.add(inputFeatureVector.union(inputElement)); // depends on control dependency: [for], data = [none]
}
DynamicAssignment input = DynamicAssignment.createPlateAssignment(PLATE_NAME, inputs);
DynamicAssignment output = DynamicAssignment.EMPTY;
if (sequence.getLabels() != null) {
List<Assignment> outputs = Lists.newArrayList();
if (startInput != null) {
// First label is given (and equal to the special start label).
outputs.add(Assignment.EMPTY); // depends on control dependency: [if], data = [none]
}
List<O> labels = sequence.getLabels();
for (int i = 0; i < contexts.size(); i++) {
outputs.add(y.outcomeArrayToAssignment(labels.get(i))); // depends on control dependency: [for], data = [i]
}
output = DynamicAssignment.createPlateAssignment(PLATE_NAME, outputs); // depends on control dependency: [if], data = [none]
}
examples.add(Example.create(input, output)); // depends on control dependency: [for], data = [none]
}
return examples;
} } |
public class class_name {
public final void commitUpdate(PersistentTransaction transaction) throws SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "commitUpdate", transaction);
AbstractItem item = null;
boolean linkHasBecomeReleasable = false;
synchronized (this)
{
if (ItemLinkState.STATE_UPDATING_DATA == _itemLinkState)
{
_assertCorrectTransaction(transaction);
item = _getAndAssertItem();
ListStatistics stats = getParentStatistics();
synchronized (stats)
{
stats.decrementUpdating();
stats.incrementAvailable();
}
linkHasBecomeReleasable = _declareDiscardable();
_itemLinkState = ItemLinkState.STATE_AVAILABLE;
}
else
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
SibTr.event(this, tc, "Invalid Item state: " + _itemLinkState);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "commitUpdate");
throw new StateException(_itemLinkState.toString());
}
_transactionId = null;
}
// this stuff has to be done outside the synchronized block
if (linkHasBecomeReleasable)
{
_declareReleasable(item);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "commitUpdate");
} } | public class class_name {
public final void commitUpdate(PersistentTransaction transaction) throws SevereMessageStoreException
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "commitUpdate", transaction);
AbstractItem item = null;
boolean linkHasBecomeReleasable = false;
synchronized (this)
{
if (ItemLinkState.STATE_UPDATING_DATA == _itemLinkState)
{
_assertCorrectTransaction(transaction); // depends on control dependency: [if], data = [none]
item = _getAndAssertItem(); // depends on control dependency: [if], data = [none]
ListStatistics stats = getParentStatistics();
synchronized (stats) // depends on control dependency: [if], data = [none]
{
stats.decrementUpdating();
stats.incrementAvailable();
}
linkHasBecomeReleasable = _declareDiscardable(); // depends on control dependency: [if], data = [none]
_itemLinkState = ItemLinkState.STATE_AVAILABLE; // depends on control dependency: [if], data = [none]
}
else
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled())
SibTr.event(this, tc, "Invalid Item state: " + _itemLinkState);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "commitUpdate");
throw new StateException(_itemLinkState.toString());
}
_transactionId = null;
}
// this stuff has to be done outside the synchronized block
if (linkHasBecomeReleasable)
{
_declareReleasable(item);
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "commitUpdate");
} } |
public class class_name {
public void preallocateRids(final OTransactionInternal clientTx) {
try {
checkOpenness();
checkLowDiskSpaceRequestsAndReadOnlyConditions();
final Iterable<ORecordOperation> entries = clientTx.getRecordOperations();
final TreeMap<Integer, OCluster> clustersToLock = new TreeMap<>();
final Set<ORecordOperation> newRecords = new TreeSet<>(COMMIT_RECORD_OPERATION_COMPARATOR);
for (final ORecordOperation txEntry : entries) {
if (txEntry.type == ORecordOperation.CREATED) {
newRecords.add(txEntry);
final int clusterId = txEntry.getRID().getClusterId();
clustersToLock.put(clusterId, getClusterById(clusterId));
}
}
stateLock.acquireReadLock();
try {
checkOpenness();
makeStorageDirty();
boolean rollback = false;
atomicOperationsManager.startAtomicOperation((String) null, true);
try {
lockClusters(clustersToLock);
for (final ORecordOperation txEntry : newRecords) {
final ORecord rec = txEntry.getRecord();
if (!rec.getIdentity().isPersistent()) {
if (rec.isDirty()) {
//This allocate a position for a new record
final ORecordId rid = (ORecordId) rec.getIdentity().copy();
final ORecordId oldRID = rid.copy();
final OCluster cluster = getClusterById(rid.getClusterId());
final OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
rid.setClusterPosition(ppos.clusterPosition);
clientTx.updateIdentityAfterCommit(oldRID, rid);
}
} else {
//This allocate position starting from a valid rid, used in distributed for allocate the same position on other nodes
final ORecordId rid = (ORecordId) rec.getIdentity();
final OPaginatedCluster cluster = (OPaginatedCluster) getClusterById(rid.getClusterId());
OPaginatedCluster.RECORD_STATUS recordStatus = cluster.getRecordStatus(rid.getClusterPosition());
if (recordStatus == OPaginatedCluster.RECORD_STATUS.NOT_EXISTENT) {
OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
while (ppos.clusterPosition < rid.getClusterPosition()) {
ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
}
if (ppos.clusterPosition != rid.getClusterPosition()) {
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), ppos.clusterPosition));
}
} else if (recordStatus == OPaginatedCluster.RECORD_STATUS.PRESENT
|| recordStatus == OPaginatedCluster.RECORD_STATUS.REMOVED) {
final OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), ppos.clusterPosition));
}
}
}
} catch (final Exception e) {
rollback = true;
throw e;
} finally {
atomicOperationsManager.endAtomicOperation(rollback);
}
} catch (final IOException | RuntimeException ioe) {
throw OException.wrapException(new OStorageException("Could not preallocate RIDs"), ioe);
} finally {
stateLock.releaseReadLock();
}
} catch (final RuntimeException ee) {
throw logAndPrepareForRethrow(ee);
} catch (final Error ee) {
throw logAndPrepareForRethrow(ee);
} catch (final Throwable t) {
throw logAndPrepareForRethrow(t);
}
} } | public class class_name {
public void preallocateRids(final OTransactionInternal clientTx) {
try {
checkOpenness(); // depends on control dependency: [try], data = [none]
checkLowDiskSpaceRequestsAndReadOnlyConditions(); // depends on control dependency: [try], data = [none]
final Iterable<ORecordOperation> entries = clientTx.getRecordOperations();
final TreeMap<Integer, OCluster> clustersToLock = new TreeMap<>();
final Set<ORecordOperation> newRecords = new TreeSet<>(COMMIT_RECORD_OPERATION_COMPARATOR);
for (final ORecordOperation txEntry : entries) {
if (txEntry.type == ORecordOperation.CREATED) {
newRecords.add(txEntry); // depends on control dependency: [if], data = [none]
final int clusterId = txEntry.getRID().getClusterId();
clustersToLock.put(clusterId, getClusterById(clusterId)); // depends on control dependency: [if], data = [none]
}
}
stateLock.acquireReadLock(); // depends on control dependency: [try], data = [none]
try {
checkOpenness(); // depends on control dependency: [try], data = [none]
makeStorageDirty(); // depends on control dependency: [try], data = [none]
boolean rollback = false;
atomicOperationsManager.startAtomicOperation((String) null, true); // depends on control dependency: [try], data = [none]
try {
lockClusters(clustersToLock); // depends on control dependency: [try], data = [none]
for (final ORecordOperation txEntry : newRecords) {
final ORecord rec = txEntry.getRecord();
if (!rec.getIdentity().isPersistent()) {
if (rec.isDirty()) {
//This allocate a position for a new record
final ORecordId rid = (ORecordId) rec.getIdentity().copy();
final ORecordId oldRID = rid.copy();
final OCluster cluster = getClusterById(rid.getClusterId());
final OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
rid.setClusterPosition(ppos.clusterPosition); // depends on control dependency: [if], data = [none]
clientTx.updateIdentityAfterCommit(oldRID, rid); // depends on control dependency: [if], data = [none]
}
} else {
//This allocate position starting from a valid rid, used in distributed for allocate the same position on other nodes
final ORecordId rid = (ORecordId) rec.getIdentity();
final OPaginatedCluster cluster = (OPaginatedCluster) getClusterById(rid.getClusterId());
OPaginatedCluster.RECORD_STATUS recordStatus = cluster.getRecordStatus(rid.getClusterPosition());
if (recordStatus == OPaginatedCluster.RECORD_STATUS.NOT_EXISTENT) {
OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
while (ppos.clusterPosition < rid.getClusterPosition()) {
ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec)); // depends on control dependency: [while], data = [none]
}
if (ppos.clusterPosition != rid.getClusterPosition()) {
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), ppos.clusterPosition));
}
} else if (recordStatus == OPaginatedCluster.RECORD_STATUS.PRESENT
|| recordStatus == OPaginatedCluster.RECORD_STATUS.REMOVED) {
final OPhysicalPosition ppos = cluster.allocatePosition(ORecordInternal.getRecordType(rec));
throw new OConcurrentCreateException(rid, new ORecordId(rid.getClusterId(), ppos.clusterPosition));
}
}
}
} catch (final Exception e) {
rollback = true;
throw e;
} finally { // depends on control dependency: [catch], data = [none]
atomicOperationsManager.endAtomicOperation(rollback);
}
} catch (final IOException | RuntimeException ioe) {
throw OException.wrapException(new OStorageException("Could not preallocate RIDs"), ioe);
} finally { // depends on control dependency: [catch], data = [none]
stateLock.releaseReadLock();
}
} catch (final RuntimeException ee) {
throw logAndPrepareForRethrow(ee);
} catch (final Error ee) { // depends on control dependency: [catch], data = [none]
throw logAndPrepareForRethrow(ee);
} catch (final Throwable t) { // depends on control dependency: [catch], data = [none]
throw logAndPrepareForRethrow(t);
} // depends on control dependency: [catch], data = [none]
} } |
public class class_name {
public final Buffer limit (int newLimit) {
if (newLimit < 0 || newLimit > capacity) {
throw new IllegalArgumentException();
}
limit = newLimit;
if (position > newLimit) {
position = newLimit;
}
if ((mark != UNSET_MARK) && (mark > newLimit)) {
mark = UNSET_MARK;
}
return this;
} } | public class class_name {
public final Buffer limit (int newLimit) {
if (newLimit < 0 || newLimit > capacity) {
throw new IllegalArgumentException();
}
limit = newLimit;
if (position > newLimit) {
position = newLimit; // depends on control dependency: [if], data = [none]
}
if ((mark != UNSET_MARK) && (mark > newLimit)) {
mark = UNSET_MARK; // depends on control dependency: [if], data = [none]
}
return this;
} } |
public class class_name {
public PhoneNumberOrder withOrderedPhoneNumbers(OrderedPhoneNumber... orderedPhoneNumbers) {
if (this.orderedPhoneNumbers == null) {
setOrderedPhoneNumbers(new java.util.ArrayList<OrderedPhoneNumber>(orderedPhoneNumbers.length));
}
for (OrderedPhoneNumber ele : orderedPhoneNumbers) {
this.orderedPhoneNumbers.add(ele);
}
return this;
} } | public class class_name {
public PhoneNumberOrder withOrderedPhoneNumbers(OrderedPhoneNumber... orderedPhoneNumbers) {
if (this.orderedPhoneNumbers == null) {
setOrderedPhoneNumbers(new java.util.ArrayList<OrderedPhoneNumber>(orderedPhoneNumbers.length)); // depends on control dependency: [if], data = [none]
}
for (OrderedPhoneNumber ele : orderedPhoneNumbers) {
this.orderedPhoneNumbers.add(ele); // depends on control dependency: [for], data = [ele]
}
return this;
} } |
public class class_name {
public boolean expire(boolean cancelTimer)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "expire", new Boolean(cancelTimer));
if ((expiryHandle != null) && cancelTimer)
{
expiryHandle.cancel();
}
expiryHandle = null;
if (!satisfied)
{
expired = true;
if (TraceComponent.isAnyTracingEnabled() && UserTrace.tc_mt.isDebugEnabled())
SibTr.debug(UserTrace.tc_mt,
nls_mt.getFormattedMessage(
"REMOTE_REQUEST_EXPIRED_CWSJU0033",
new Object[] {
tick},
null));
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "expire", new Boolean(expired));
return expired;
} } | public class class_name {
public boolean expire(boolean cancelTimer)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "expire", new Boolean(cancelTimer));
if ((expiryHandle != null) && cancelTimer)
{
expiryHandle.cancel(); // depends on control dependency: [if], data = [none]
}
expiryHandle = null;
if (!satisfied)
{
expired = true; // depends on control dependency: [if], data = [none]
if (TraceComponent.isAnyTracingEnabled() && UserTrace.tc_mt.isDebugEnabled())
SibTr.debug(UserTrace.tc_mt,
nls_mt.getFormattedMessage(
"REMOTE_REQUEST_EXPIRED_CWSJU0033",
new Object[] {
tick},
null));
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "expire", new Boolean(expired));
return expired;
} } |
public class class_name {
private List<File> loadFiles(File workingDir) {
List<File> dataFile = new ArrayList<>();
if (workingDir.exists()) {
File[] files = workingDir.listFiles();
for (File eachFile : files) {
if (eachFile.isDirectory()) {
dataFile.addAll(loadFiles(eachFile));
} else if (eachFile.getName().endsWith(".yaml") || eachFile.getName().endsWith(".yml")) {
dataFile.add(eachFile);
}
}
}
return dataFile;
} } | public class class_name {
private List<File> loadFiles(File workingDir) {
List<File> dataFile = new ArrayList<>();
if (workingDir.exists()) {
File[] files = workingDir.listFiles();
for (File eachFile : files) {
if (eachFile.isDirectory()) {
dataFile.addAll(loadFiles(eachFile)); // depends on control dependency: [if], data = [none]
} else if (eachFile.getName().endsWith(".yaml") || eachFile.getName().endsWith(".yml")) {
dataFile.add(eachFile);
}
}
}
return dataFile;
} } |
public class class_name {
public String getFinalReason(Path path) {
StringBuilder sb = new StringBuilder(path + " cannot be modified; ");
StringBuilder finalPath = new StringBuilder("/");
// Step through the nodes based on the given path. If any intermediate
// nodes are marked as final, we can just return true.
Node currentNode = root;
for (Term t : path.getTerms()) {
finalPath.append(t.toString());
Node nextNode = currentNode.getChild(t);
if (nextNode == null) {
return null;
} else if (nextNode.isFinal()) {
sb.append(finalPath.toString() + " is marked as final");
return sb.toString();
}
finalPath.append("/");
currentNode = nextNode;
}
// Strip off the last slash. It is not needed.
finalPath.deleteCharAt(finalPath.length() - 1);
// Either the path itself is final or a descendant.
if (currentNode.isFinal()) {
sb.append(finalPath.toString() + " is marked as final");
} else if (currentNode.hasChild()) {
sb.append(finalPath.toString()
+ currentNode.getFinalDescendantPath()
+ " is marked as final");
return sb.toString();
} else {
return null;
}
return null;
} } | public class class_name {
public String getFinalReason(Path path) {
StringBuilder sb = new StringBuilder(path + " cannot be modified; ");
StringBuilder finalPath = new StringBuilder("/");
// Step through the nodes based on the given path. If any intermediate
// nodes are marked as final, we can just return true.
Node currentNode = root;
for (Term t : path.getTerms()) {
finalPath.append(t.toString()); // depends on control dependency: [for], data = [t]
Node nextNode = currentNode.getChild(t);
if (nextNode == null) {
return null; // depends on control dependency: [if], data = [none]
} else if (nextNode.isFinal()) {
sb.append(finalPath.toString() + " is marked as final"); // depends on control dependency: [if], data = [none]
return sb.toString(); // depends on control dependency: [if], data = [none]
}
finalPath.append("/"); // depends on control dependency: [for], data = [t]
currentNode = nextNode; // depends on control dependency: [for], data = [t]
}
// Strip off the last slash. It is not needed.
finalPath.deleteCharAt(finalPath.length() - 1);
// Either the path itself is final or a descendant.
if (currentNode.isFinal()) {
sb.append(finalPath.toString() + " is marked as final"); // depends on control dependency: [if], data = [none]
} else if (currentNode.hasChild()) {
sb.append(finalPath.toString()
+ currentNode.getFinalDescendantPath()
+ " is marked as final"); // depends on control dependency: [if], data = [none]
return sb.toString(); // depends on control dependency: [if], data = [none]
} else {
return null; // depends on control dependency: [if], data = [none]
}
return null;
} } |
public class class_name {
public void setArtifacts(java.util.Collection<Artifact> artifacts) {
if (artifacts == null) {
this.artifacts = null;
return;
}
this.artifacts = new java.util.ArrayList<Artifact>(artifacts);
} } | public class class_name {
public void setArtifacts(java.util.Collection<Artifact> artifacts) {
if (artifacts == null) {
this.artifacts = null; // depends on control dependency: [if], data = [none]
return; // depends on control dependency: [if], data = [none]
}
this.artifacts = new java.util.ArrayList<Artifact>(artifacts);
} } |
public class class_name {
public static <T extends ImageGray<T>>
void yuvToRgb(Planar<T> yuv , Planar<T> rgb) {
rgb.reshape(rgb.width,rgb.height,3);
if( rgb.getBandType() == GrayF32.class ) {
if(BoofConcurrency.USE_CONCURRENT ) {
ImplColorYuv_MT.yuvToRgb_F32((Planar<GrayF32>)yuv,(Planar<GrayF32>)rgb);
} else {
ImplColorYuv.yuvToRgb_F32((Planar<GrayF32>)yuv,(Planar<GrayF32>)rgb);
}
} else if( rgb.getBandType() == GrayU8.class ) {
if(BoofConcurrency.USE_CONCURRENT ) {
ImplColorYuv_MT.ycbcrToRgb_U8((Planar<GrayU8>)yuv,(Planar<GrayU8>)rgb);
} else {
ImplColorYuv.ycbcrToRgb_U8((Planar<GrayU8>)yuv,(Planar<GrayU8>)rgb);
}
} else {
throw new IllegalArgumentException("Unsupported band type "+rgb.getBandType().getSimpleName());
}
} } | public class class_name {
public static <T extends ImageGray<T>>
void yuvToRgb(Planar<T> yuv , Planar<T> rgb) {
rgb.reshape(rgb.width,rgb.height,3);
if( rgb.getBandType() == GrayF32.class ) {
if(BoofConcurrency.USE_CONCURRENT ) {
ImplColorYuv_MT.yuvToRgb_F32((Planar<GrayF32>)yuv,(Planar<GrayF32>)rgb); // depends on control dependency: [if], data = [none]
} else {
ImplColorYuv.yuvToRgb_F32((Planar<GrayF32>)yuv,(Planar<GrayF32>)rgb); // depends on control dependency: [if], data = [none]
}
} else if( rgb.getBandType() == GrayU8.class ) {
if(BoofConcurrency.USE_CONCURRENT ) {
ImplColorYuv_MT.ycbcrToRgb_U8((Planar<GrayU8>)yuv,(Planar<GrayU8>)rgb); // depends on control dependency: [if], data = [none]
} else {
ImplColorYuv.ycbcrToRgb_U8((Planar<GrayU8>)yuv,(Planar<GrayU8>)rgb); // depends on control dependency: [if], data = [none]
}
} else {
throw new IllegalArgumentException("Unsupported band type "+rgb.getBandType().getSimpleName());
}
} } |
public class class_name {
@Override
protected List<ColumnDescription> getColumnDescriptions(
LoginAggregationDiscriminator columnDiscriminator, LoginReportForm form) {
final String groupName = columnDiscriminator.getAggregatedGroup().getGroupName();
if (form.isTotalLogins() && form.isUniqueLogins()) {
return ImmutableList.of(
// THE ORDER OF RETURNED COLUMNS HERE MUST MATCH THE ORDER OF THE VALUES
// RETURNED IN createRowValues
new ColumnDescription(
groupName + "-uniqueLogins",
ValueType.NUMBER,
groupName + " - Unique Logins"),
new ColumnDescription(
groupName + "-totalLogins",
ValueType.NUMBER,
groupName + " - Total Logins"));
} else if (form.isUniqueLogins()) {
return Collections.singletonList(
new ColumnDescription(
groupName + "-uniqueLogins",
ValueType.NUMBER,
groupName + " - Unique Logins"));
} else {
return Collections.singletonList(
new ColumnDescription(
groupName + "-totalLogins",
ValueType.NUMBER,
groupName + " - Total Logins"));
}
} } | public class class_name {
@Override
protected List<ColumnDescription> getColumnDescriptions(
LoginAggregationDiscriminator columnDiscriminator, LoginReportForm form) {
final String groupName = columnDiscriminator.getAggregatedGroup().getGroupName();
if (form.isTotalLogins() && form.isUniqueLogins()) {
return ImmutableList.of(
// THE ORDER OF RETURNED COLUMNS HERE MUST MATCH THE ORDER OF THE VALUES
// RETURNED IN createRowValues
new ColumnDescription(
groupName + "-uniqueLogins",
ValueType.NUMBER,
groupName + " - Unique Logins"),
new ColumnDescription(
groupName + "-totalLogins",
ValueType.NUMBER,
groupName + " - Total Logins")); // depends on control dependency: [if], data = [none]
} else if (form.isUniqueLogins()) {
return Collections.singletonList(
new ColumnDescription(
groupName + "-uniqueLogins",
ValueType.NUMBER,
groupName + " - Unique Logins")); // depends on control dependency: [if], data = [none]
} else {
return Collections.singletonList(
new ColumnDescription(
groupName + "-totalLogins",
ValueType.NUMBER,
groupName + " - Total Logins")); // depends on control dependency: [if], data = [none]
}
} } |
public class class_name {
@Override
protected TermsByQueryShardResponse shardOperation(TermsByQueryShardRequest shardRequest) throws ElasticsearchException {
IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId().getIndex());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId().id());
TermsByQueryRequest request = shardRequest.request();
OrderByShardOperation orderByOperation = OrderByShardOperation.get(request.getOrderBy(), request.maxTermsPerShard());
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(),
shardRequest.shardId().getIndex(),
shardRequest.shardId().id());
ShardSearchRequest shardSearchRequest = new ShardSearchLocalRequest(request.types(), request.nowInMillis(),
shardRequest.filteringAliases());
SearchContext context = new DefaultSearchContext(0, shardSearchRequest, shardTarget,
indexShard.acquireSearcher("termsByQuery"), indexService, indexShard, scriptService,
pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
SearchService.NO_TIMEOUT);
SearchContext.setCurrent(context);
try {
MappedFieldType fieldType = context.smartNameFieldType(request.field());
if (fieldType == null) {
throw new SearchContextException(context, "[termsByQuery] field '" + request.field() +
"' not found for types " + Arrays.toString(request.types()));
}
IndexFieldData indexFieldData = context.fieldData().getForField(fieldType);
BytesReference querySource = request.querySource();
if (querySource != null && querySource.length() > 0) {
XContentParser queryParser = null;
try {
queryParser = XContentFactory.xContent(querySource).createParser(querySource);
QueryParseContext.setTypes(request.types());
ParsedQuery parsedQuery = orderByOperation.getParsedQuery(queryParser, indexService);
if (parsedQuery != null) {
context.parsedQuery(parsedQuery);
}
}
finally {
QueryParseContext.removeTypes();
if (queryParser != null) {
queryParser.close();
}
}
}
context.preProcess();
// execute the search only gathering the hit count and bitset for each segment
logger.debug("{}: Executes search for collecting terms {}", Thread.currentThread().getName(),
shardRequest.shardId());
TermsCollector termsCollector = this.getTermsCollector(request.termsEncoding(), indexFieldData, context);
if (request.expectedTerms() != null) termsCollector.setExpectedTerms(request.expectedTerms());
if (request.maxTermsPerShard() != null) termsCollector.setMaxTerms(request.maxTermsPerShard());
HitStream hitStream = orderByOperation.getHitStream(context);
TermsSet terms = termsCollector.collect(hitStream);
logger.debug("{}: Returns terms response with {} terms for shard {}", Thread.currentThread().getName(),
terms.size(), shardRequest.shardId());
return new TermsByQueryShardResponse(shardRequest.shardId(), terms);
}
catch (Throwable e) {
logger.error("[termsByQuery] Error executing shard operation", e);
throw new QueryPhaseExecutionException(context, "[termsByQuery] Failed to execute query", e);
}
finally {
// this will also release the index searcher
context.close();
SearchContext.removeCurrent();
}
} } | public class class_name {
@Override
protected TermsByQueryShardResponse shardOperation(TermsByQueryShardRequest shardRequest) throws ElasticsearchException {
IndexService indexService = indicesService.indexServiceSafe(shardRequest.shardId().getIndex());
IndexShard indexShard = indexService.shardSafe(shardRequest.shardId().id());
TermsByQueryRequest request = shardRequest.request();
OrderByShardOperation orderByOperation = OrderByShardOperation.get(request.getOrderBy(), request.maxTermsPerShard());
SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().id(),
shardRequest.shardId().getIndex(),
shardRequest.shardId().id());
ShardSearchRequest shardSearchRequest = new ShardSearchLocalRequest(request.types(), request.nowInMillis(),
shardRequest.filteringAliases());
SearchContext context = new DefaultSearchContext(0, shardSearchRequest, shardTarget,
indexShard.acquireSearcher("termsByQuery"), indexService, indexShard, scriptService,
pageCacheRecycler, bigArrays, threadPool.estimatedTimeInMillisCounter(), parseFieldMatcher,
SearchService.NO_TIMEOUT);
SearchContext.setCurrent(context);
try {
MappedFieldType fieldType = context.smartNameFieldType(request.field());
if (fieldType == null) {
throw new SearchContextException(context, "[termsByQuery] field '" + request.field() +
"' not found for types " + Arrays.toString(request.types()));
}
IndexFieldData indexFieldData = context.fieldData().getForField(fieldType);
BytesReference querySource = request.querySource();
if (querySource != null && querySource.length() > 0) {
XContentParser queryParser = null;
try {
queryParser = XContentFactory.xContent(querySource).createParser(querySource); // depends on control dependency: [try], data = [none]
QueryParseContext.setTypes(request.types()); // depends on control dependency: [try], data = [none]
ParsedQuery parsedQuery = orderByOperation.getParsedQuery(queryParser, indexService);
if (parsedQuery != null) {
context.parsedQuery(parsedQuery); // depends on control dependency: [if], data = [(parsedQuery]
}
}
finally {
QueryParseContext.removeTypes();
if (queryParser != null) {
queryParser.close(); // depends on control dependency: [if], data = [none]
}
}
}
context.preProcess();
// execute the search only gathering the hit count and bitset for each segment
logger.debug("{}: Executes search for collecting terms {}", Thread.currentThread().getName(),
shardRequest.shardId());
TermsCollector termsCollector = this.getTermsCollector(request.termsEncoding(), indexFieldData, context);
if (request.expectedTerms() != null) termsCollector.setExpectedTerms(request.expectedTerms());
if (request.maxTermsPerShard() != null) termsCollector.setMaxTerms(request.maxTermsPerShard());
HitStream hitStream = orderByOperation.getHitStream(context);
TermsSet terms = termsCollector.collect(hitStream);
logger.debug("{}: Returns terms response with {} terms for shard {}", Thread.currentThread().getName(),
terms.size(), shardRequest.shardId());
return new TermsByQueryShardResponse(shardRequest.shardId(), terms);
}
catch (Throwable e) {
logger.error("[termsByQuery] Error executing shard operation", e);
throw new QueryPhaseExecutionException(context, "[termsByQuery] Failed to execute query", e);
}
finally {
// this will also release the index searcher
context.close();
SearchContext.removeCurrent();
}
} } |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.