code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public static Set<String> buildConsumes(Method method) { Set<String> consumes = new LinkedHashSet<String>(); Class<?> controller = method.getDeclaringClass(); if (isAnnotated(controller, RequestMapping.class)) { RequestMapping requestMapping = getAnnotation(controller, RequestMapping.class); if (requestMapping.consumes().length > 0) { consumes.addAll(Arrays.asList(requestMapping.consumes())); } } if (isAnnotated(method, RequestMapping.class)) { RequestMapping requestMapping = getAnnotation(method, RequestMapping.class); if (requestMapping.consumes().length > 0) { consumes.clear(); consumes.addAll(Arrays.asList(requestMapping.consumes())); } } if(consumes.isEmpty()) { consumes.add(MediaType.APPLICATION_JSON_VALUE); } return consumes; } }
public class class_name { public static Set<String> buildConsumes(Method method) { Set<String> consumes = new LinkedHashSet<String>(); Class<?> controller = method.getDeclaringClass(); if (isAnnotated(controller, RequestMapping.class)) { RequestMapping requestMapping = getAnnotation(controller, RequestMapping.class); if (requestMapping.consumes().length > 0) { consumes.addAll(Arrays.asList(requestMapping.consumes())); // depends on control dependency: [if], data = [none] } } if (isAnnotated(method, RequestMapping.class)) { RequestMapping requestMapping = getAnnotation(method, RequestMapping.class); if (requestMapping.consumes().length > 0) { consumes.clear(); // depends on control dependency: [if], data = [none] consumes.addAll(Arrays.asList(requestMapping.consumes())); // depends on control dependency: [if], data = [none] } } if(consumes.isEmpty()) { consumes.add(MediaType.APPLICATION_JSON_VALUE); // depends on control dependency: [if], data = [none] } return consumes; } }
public class class_name { @SuppressWarnings("unchecked") public static <ViewType extends View<? extends ViewModelType>, ViewModelType extends ViewModel> ViewModelType createViewModel( ViewType view) { final Class<?> viewModelType = TypeResolver.resolveRawArgument(View.class, view.getClass()); if (viewModelType == ViewModel.class) { return null; } if (TypeResolver.Unknown.class == viewModelType) { return null; } return (ViewModelType) DependencyInjector.getInstance().getInstanceOf(viewModelType); } }
public class class_name { @SuppressWarnings("unchecked") public static <ViewType extends View<? extends ViewModelType>, ViewModelType extends ViewModel> ViewModelType createViewModel( ViewType view) { final Class<?> viewModelType = TypeResolver.resolveRawArgument(View.class, view.getClass()); if (viewModelType == ViewModel.class) { return null; // depends on control dependency: [if], data = [none] } if (TypeResolver.Unknown.class == viewModelType) { return null; // depends on control dependency: [if], data = [none] } return (ViewModelType) DependencyInjector.getInstance().getInstanceOf(viewModelType); } }
public class class_name { public InputStream getResourceAsStream(String path) { try { return (getResource(path).openStream()); } catch (Throwable t) { return (null); } } }
public class class_name { public InputStream getResourceAsStream(String path) { try { return (getResource(path).openStream()); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return (null); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public final void normalize() { Node next; for (Node node = getFirstChild(); node != null; node = next) { next = node.getNextSibling(); node.normalize(); if (node.getNodeType() == Node.TEXT_NODE) { ((TextImpl) node).minimize(); } } } }
public class class_name { @Override public final void normalize() { Node next; for (Node node = getFirstChild(); node != null; node = next) { next = node.getNextSibling(); // depends on control dependency: [for], data = [node] node.normalize(); // depends on control dependency: [for], data = [node] if (node.getNodeType() == Node.TEXT_NODE) { ((TextImpl) node).minimize(); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public final Map<String, OperationEntry> getOperationDescriptions(final PathAddress address, boolean inherited) { if (parent != null) { RootInvocation ri = getRootInvocation(); return ri.root.getOperationDescriptions(ri.pathAddress.append(address), inherited); } // else we are the root Map<String, OperationEntry> providers = new TreeMap<String, OperationEntry>(); getOperationDescriptions(address.iterator(), providers, inherited); return providers; } }
public class class_name { @Override public final Map<String, OperationEntry> getOperationDescriptions(final PathAddress address, boolean inherited) { if (parent != null) { RootInvocation ri = getRootInvocation(); return ri.root.getOperationDescriptions(ri.pathAddress.append(address), inherited); // depends on control dependency: [if], data = [none] } // else we are the root Map<String, OperationEntry> providers = new TreeMap<String, OperationEntry>(); getOperationDescriptions(address.iterator(), providers, inherited); return providers; } }
public class class_name { @SuppressWarnings("unchecked") public <P> P as(final Class<P> proxyType) { final boolean isMap = (object instanceof Map); final InvocationHandler handler = new InvocationHandler() { @Override @SuppressWarnings("null") public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String name = method.getName(); // Actual method name matches always come first try { return on(type, object).call(name, args).get(); } // [#14] Emulate POJO behaviour on wrapped map objects catch (ReflectException e) { if (isMap) { Map<String, Object> map = (Map<String, Object>) object; int length = (args == null ? 0 : args.length); if (length == 0 && name.startsWith("get")) { return map.get(property(name.substring(3))); } else if (length == 0 && name.startsWith("is")) { return map.get(property(name.substring(2))); } else if (length == 1 && name.startsWith("set")) { map.put(property(name.substring(3)), args[0]); return null; } } throw e; } } }; return (P) Proxy.newProxyInstance(proxyType.getClassLoader(), new Class[] { proxyType }, handler); } }
public class class_name { @SuppressWarnings("unchecked") public <P> P as(final Class<P> proxyType) { final boolean isMap = (object instanceof Map); final InvocationHandler handler = new InvocationHandler() { @Override @SuppressWarnings("null") public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { String name = method.getName(); // Actual method name matches always come first try { return on(type, object).call(name, args).get(); } // [#14] Emulate POJO behaviour on wrapped map objects catch (ReflectException e) { if (isMap) { Map<String, Object> map = (Map<String, Object>) object; int length = (args == null ? 0 : args.length); if (length == 0 && name.startsWith("get")) { return map.get(property(name.substring(3))); // depends on control dependency: [if], data = [none] } else if (length == 0 && name.startsWith("is")) { return map.get(property(name.substring(2))); // depends on control dependency: [if], data = [none] } else if (length == 1 && name.startsWith("set")) { map.put(property(name.substring(3)), args[0]); // depends on control dependency: [if], data = [none] return null; // depends on control dependency: [if], data = [none] } } throw e; } } }; return (P) Proxy.newProxyInstance(proxyType.getClassLoader(), new Class[] { proxyType }, handler); } }
public class class_name { public static IntDoubleVector getRandomBernoulliDirection(int p) { IntDoubleVector e = new IntDoubleDenseVector(p); for (int i=0; i<p; i++) { // Bernoulli distribution chooses either positive or negative 1. e.set(i, (Prng.nextBoolean()) ? 1 : -1); } return e; } }
public class class_name { public static IntDoubleVector getRandomBernoulliDirection(int p) { IntDoubleVector e = new IntDoubleDenseVector(p); for (int i=0; i<p; i++) { // Bernoulli distribution chooses either positive or negative 1. e.set(i, (Prng.nextBoolean()) ? 1 : -1); // depends on control dependency: [for], data = [i] } return e; } }
public class class_name { public Date getLastBackupTime() { long time = System.currentTimeMillis(); String lastBackupTime = redis.getSet(KEY, Long.toString(time)); if (lastBackupTime == null || lastBackupTime.length() == 0) { return null; } long backupTime = Long.parseLong(lastBackupTime); if (backupTime <= 0) { return null; } return new Date(backupTime); } }
public class class_name { public Date getLastBackupTime() { long time = System.currentTimeMillis(); String lastBackupTime = redis.getSet(KEY, Long.toString(time)); if (lastBackupTime == null || lastBackupTime.length() == 0) { return null; // depends on control dependency: [if], data = [none] } long backupTime = Long.parseLong(lastBackupTime); if (backupTime <= 0) { return null; // depends on control dependency: [if], data = [none] } return new Date(backupTime); } }
public class class_name { public void marshall(ListDeploymentGroupsRequest listDeploymentGroupsRequest, ProtocolMarshaller protocolMarshaller) { if (listDeploymentGroupsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listDeploymentGroupsRequest.getApplicationName(), APPLICATIONNAME_BINDING); protocolMarshaller.marshall(listDeploymentGroupsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(ListDeploymentGroupsRequest listDeploymentGroupsRequest, ProtocolMarshaller protocolMarshaller) { if (listDeploymentGroupsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(listDeploymentGroupsRequest.getApplicationName(), APPLICATIONNAME_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(listDeploymentGroupsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static Date parse(String date) { Date d = parseUsingPatterns(date, STD_PATTERNS); if (d == null) { d = parseRFC822Date(date); } if (d == null) { d = parseW3CDateTime(date); } if (d == null) { try { d = DateFormat.getInstance().parse(date); } catch (ParseException e) { d = null; } } return d; } }
public class class_name { public static Date parse(String date) { Date d = parseUsingPatterns(date, STD_PATTERNS); if (d == null) { d = parseRFC822Date(date); // depends on control dependency: [if], data = [(d] } if (d == null) { d = parseW3CDateTime(date); // depends on control dependency: [if], data = [(d] } if (d == null) { try { d = DateFormat.getInstance().parse(date); // depends on control dependency: [try], data = [none] } catch (ParseException e) { d = null; } // depends on control dependency: [catch], data = [none] } return d; } }
public class class_name { @Override public File findByMillis(long timestamp) { File[] files = listFiles(filter); File result = null; long max = Long.MIN_VALUE; for (File file : files) { long time = getLogFileTimestamp(file); // Select file with a maximum time stamp which is smaller than 'timestamp'. if (max < time && time <= timestamp) { max = time; result = file; } } return result; } }
public class class_name { @Override public File findByMillis(long timestamp) { File[] files = listFiles(filter); File result = null; long max = Long.MIN_VALUE; for (File file : files) { long time = getLogFileTimestamp(file); // Select file with a maximum time stamp which is smaller than 'timestamp'. if (max < time && time <= timestamp) { max = time; // depends on control dependency: [if], data = [none] result = file; // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { public String collectionTableName(String ownerEntity, String ownerEntityTable, String associatedEntity, String associatedEntityTable, String propertyName) { String ownerTable = null; Class<?> entityClass = null; try { entityClass = Class.forName(ownerEntity); } catch (ClassNotFoundException e) { } // Just for annotation configuration,it's ownerEntity is classname(not entityName), and // ownerEntityTable is class shortname if (Character.isUpperCase(ownerEntityTable.charAt(0))) { ownerTable = tableNamingStrategy.classToTableName(entityClass); } else { ownerTable = tableName(ownerEntityTable); } String tblName = tableNamingStrategy.collectionToTableName(entityClass, ownerTable, propertyName); if (tblName.length() > MaxLength) logger.warn("{}'s length greate than 30!", tblName); return tblName; } }
public class class_name { public String collectionTableName(String ownerEntity, String ownerEntityTable, String associatedEntity, String associatedEntityTable, String propertyName) { String ownerTable = null; Class<?> entityClass = null; try { entityClass = Class.forName(ownerEntity); // depends on control dependency: [try], data = [none] } catch (ClassNotFoundException e) { } // depends on control dependency: [catch], data = [none] // Just for annotation configuration,it's ownerEntity is classname(not entityName), and // ownerEntityTable is class shortname if (Character.isUpperCase(ownerEntityTable.charAt(0))) { ownerTable = tableNamingStrategy.classToTableName(entityClass); // depends on control dependency: [if], data = [none] } else { ownerTable = tableName(ownerEntityTable); // depends on control dependency: [if], data = [none] } String tblName = tableNamingStrategy.collectionToTableName(entityClass, ownerTable, propertyName); if (tblName.length() > MaxLength) logger.warn("{}'s length greate than 30!", tblName); return tblName; } }
public class class_name { public static IPath toFilterPath(String filePath, IProject project) { IPath path = new Path(filePath); IPath commonPath; if (project != null) { commonPath = project.getLocation(); IPath relativePath = getRelativePath(path, commonPath); if (!relativePath.equals(path)) { return relativePath; } } commonPath = ResourcesPlugin.getWorkspace().getRoot().getLocation(); return getRelativePath(path, commonPath); } }
public class class_name { public static IPath toFilterPath(String filePath, IProject project) { IPath path = new Path(filePath); IPath commonPath; if (project != null) { commonPath = project.getLocation(); // depends on control dependency: [if], data = [none] IPath relativePath = getRelativePath(path, commonPath); if (!relativePath.equals(path)) { return relativePath; // depends on control dependency: [if], data = [none] } } commonPath = ResourcesPlugin.getWorkspace().getRoot().getLocation(); return getRelativePath(path, commonPath); } }
public class class_name { @SuppressWarnings("unchecked") public static void register(@SuppressWarnings("rawtypes") Map tagletMap) { WRTagTaglet tag = new WRTagTaglet(); Taglet t = (Taglet) tagletMap.get(tag.getName()); if (t != null) { tagletMap.remove(tag.getName()); } tagletMap.put(tag.getName(), tag); } }
public class class_name { @SuppressWarnings("unchecked") public static void register(@SuppressWarnings("rawtypes") Map tagletMap) { WRTagTaglet tag = new WRTagTaglet(); Taglet t = (Taglet) tagletMap.get(tag.getName()); if (t != null) { tagletMap.remove(tag.getName()); // depends on control dependency: [if], data = [(t] } tagletMap.put(tag.getName(), tag); } }
public class class_name { int indexOf(ResourcePath key) { QueryDocumentSnapshot document = keyIndex.get(key); if (document == null) { return -1; } return sortedSet.indexOf(document); } }
public class class_name { int indexOf(ResourcePath key) { QueryDocumentSnapshot document = keyIndex.get(key); if (document == null) { return -1; // depends on control dependency: [if], data = [none] } return sortedSet.indexOf(document); } }
public class class_name { @Override public RandomVariableInterface getVolatility(int timeIndex, int liborIndex) { // Create a very simple volatility model here double time = getTimeDiscretization().getTime(timeIndex); double maturity = getLiborPeriodDiscretization().getTime(liborIndex); double timeToMaturity = maturity-time; double volatilityInstanteaneous; if(timeToMaturity <= 0) { volatilityInstanteaneous = 0.0; // This forward rate is already fixed, no volatility } else { volatilityInstanteaneous = (a[liborIndex] + b[liborIndex] * timeToMaturity) * Math.exp(-c[liborIndex] * timeToMaturity) + d[liborIndex]; } if(volatilityInstanteaneous < 0.0) { volatilityInstanteaneous = Math.max(volatilityInstanteaneous,0.0); } return new RandomVariable(getTimeDiscretization().getTime(timeIndex),volatilityInstanteaneous); } }
public class class_name { @Override public RandomVariableInterface getVolatility(int timeIndex, int liborIndex) { // Create a very simple volatility model here double time = getTimeDiscretization().getTime(timeIndex); double maturity = getLiborPeriodDiscretization().getTime(liborIndex); double timeToMaturity = maturity-time; double volatilityInstanteaneous; if(timeToMaturity <= 0) { volatilityInstanteaneous = 0.0; // This forward rate is already fixed, no volatility // depends on control dependency: [if], data = [none] } else { volatilityInstanteaneous = (a[liborIndex] + b[liborIndex] * timeToMaturity) * Math.exp(-c[liborIndex] * timeToMaturity) + d[liborIndex]; // depends on control dependency: [if], data = [none] } if(volatilityInstanteaneous < 0.0) { volatilityInstanteaneous = Math.max(volatilityInstanteaneous,0.0); // depends on control dependency: [if], data = [(volatilityInstanteaneous] } return new RandomVariable(getTimeDiscretization().getTime(timeIndex),volatilityInstanteaneous); } }
public class class_name { public static boolean isUnitary(ZMatrixRMaj Q , double tol ) { if( Q.numRows < Q.numCols ) { throw new IllegalArgumentException("The number of rows must be more than or equal to the number of columns"); } Complex_F64 prod = new Complex_F64(); ZMatrixRMaj u[] = CommonOps_ZDRM.columnsToVector(Q, null); for( int i = 0; i < u.length; i++ ) { ZMatrixRMaj a = u[i]; VectorVectorMult_ZDRM.innerProdH(a, a, prod); if( Math.abs(prod.real-1) > tol) return false; if( Math.abs(prod.imaginary) > tol) return false; for( int j = i+1; j < u.length; j++ ) { VectorVectorMult_ZDRM.innerProdH(a, u[j], prod); if( !(prod.getMagnitude2() <= tol*tol)) return false; } } return true; } }
public class class_name { public static boolean isUnitary(ZMatrixRMaj Q , double tol ) { if( Q.numRows < Q.numCols ) { throw new IllegalArgumentException("The number of rows must be more than or equal to the number of columns"); } Complex_F64 prod = new Complex_F64(); ZMatrixRMaj u[] = CommonOps_ZDRM.columnsToVector(Q, null); for( int i = 0; i < u.length; i++ ) { ZMatrixRMaj a = u[i]; VectorVectorMult_ZDRM.innerProdH(a, a, prod); // depends on control dependency: [for], data = [none] if( Math.abs(prod.real-1) > tol) return false; if( Math.abs(prod.imaginary) > tol) return false; for( int j = i+1; j < u.length; j++ ) { VectorVectorMult_ZDRM.innerProdH(a, u[j], prod); // depends on control dependency: [for], data = [j] if( !(prod.getMagnitude2() <= tol*tol)) return false; } } return true; } }
public class class_name { private void pruneExcessiveHistoricalRecordsLocked() { List<HistoricalRecord> choiceRecords = mHistoricalRecords; final int pruneCount = choiceRecords.size() - mHistoryMaxSize; if (pruneCount <= 0) { return; } mHistoricalRecordsChanged = true; for (int i = 0; i < pruneCount; i++) { HistoricalRecord prunedRecord = choiceRecords.remove(0); if (DEBUG) { Log.i(LOG_TAG, "Pruned: " + prunedRecord); } } } }
public class class_name { private void pruneExcessiveHistoricalRecordsLocked() { List<HistoricalRecord> choiceRecords = mHistoricalRecords; final int pruneCount = choiceRecords.size() - mHistoryMaxSize; if (pruneCount <= 0) { return; // depends on control dependency: [if], data = [none] } mHistoricalRecordsChanged = true; for (int i = 0; i < pruneCount; i++) { HistoricalRecord prunedRecord = choiceRecords.remove(0); if (DEBUG) { Log.i(LOG_TAG, "Pruned: " + prunedRecord); // depends on control dependency: [if], data = [none] } } } }
public class class_name { protected String printList() { String output = "["; SimpleEntry pointer = first; int counter = 0; while((pointer != null) && (counter < 3)) { output += "@"+Integer.toHexString(pointer.hashCode()); pointer = pointer.next; if(pointer != null) output += ", "; counter++; } if(pointer != null) { output += "..., @"+Integer.toHexString(last.hashCode()) + "]"; } else output += "]"; return output; } }
public class class_name { protected String printList() { String output = "["; SimpleEntry pointer = first; int counter = 0; while((pointer != null) && (counter < 3)) { output += "@"+Integer.toHexString(pointer.hashCode()); // depends on control dependency: [while], data = [none] pointer = pointer.next; // depends on control dependency: [while], data = [none] if(pointer != null) output += ", "; counter++; // depends on control dependency: [while], data = [none] } if(pointer != null) { output += "..., @"+Integer.toHexString(last.hashCode()) + "]"; // depends on control dependency: [if], data = [none] } else output += "]"; return output; } }
public class class_name { public Observable<ServiceResponse<Page<JobTargetGroupInner>>> listByAgentSinglePageAsync(final String resourceGroupName, final String serverName, final String jobAgentName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (serverName == null) { throw new IllegalArgumentException("Parameter serverName is required and cannot be null."); } if (jobAgentName == null) { throw new IllegalArgumentException("Parameter jobAgentName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.listByAgent(resourceGroupName, serverName, jobAgentName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<JobTargetGroupInner>>>>() { @Override public Observable<ServiceResponse<Page<JobTargetGroupInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<JobTargetGroupInner>> result = listByAgentDelegate(response); return Observable.just(new ServiceResponse<Page<JobTargetGroupInner>>(result.body(), result.response())); } catch (Throwable t) { return Observable.error(t); } } }); } }
public class class_name { public Observable<ServiceResponse<Page<JobTargetGroupInner>>> listByAgentSinglePageAsync(final String resourceGroupName, final String serverName, final String jobAgentName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (serverName == null) { throw new IllegalArgumentException("Parameter serverName is required and cannot be null."); } if (jobAgentName == null) { throw new IllegalArgumentException("Parameter jobAgentName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.listByAgent(resourceGroupName, serverName, jobAgentName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Page<JobTargetGroupInner>>>>() { @Override public Observable<ServiceResponse<Page<JobTargetGroupInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<JobTargetGroupInner>> result = listByAgentDelegate(response); return Observable.just(new ServiceResponse<Page<JobTargetGroupInner>>(result.body(), result.response())); // depends on control dependency: [try], data = [none] } catch (Throwable t) { return Observable.error(t); } // depends on control dependency: [catch], data = [none] } }); } }
public class class_name { public void openPath(String path, I_CmsOuTreeType type, CmsUUID groupID) { if (type == null) { return; } try { expandItem(m_rootOu); String[] pathP = path.split("/"); String complPath = ""; for (String subP : pathP) { complPath += subP + "/"; CmsOrganizationalUnit ou = OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, complPath); addChildrenForOUNode(ou); expandItem(ou); } if (type.isGroup() || type.isRole()) { String itemId = type.getId() + OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path).getName(); expandItem(itemId); if (groupID == null) { setValue(itemId); return; } setValue(groupID); return; } if (type.isUser()) { setValue(type.getId() + OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path).getName()); return; } setValue(OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path)); } catch (CmsException e) { LOG.error("Unable to read OU", e); } } }
public class class_name { public void openPath(String path, I_CmsOuTreeType type, CmsUUID groupID) { if (type == null) { return; // depends on control dependency: [if], data = [none] } try { expandItem(m_rootOu); // depends on control dependency: [try], data = [none] String[] pathP = path.split("/"); String complPath = ""; for (String subP : pathP) { complPath += subP + "/"; // depends on control dependency: [for], data = [subP] CmsOrganizationalUnit ou = OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, complPath); addChildrenForOUNode(ou); // depends on control dependency: [for], data = [none] expandItem(ou); // depends on control dependency: [for], data = [none] } if (type.isGroup() || type.isRole()) { String itemId = type.getId() + OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path).getName(); expandItem(itemId); // depends on control dependency: [if], data = [none] if (groupID == null) { setValue(itemId); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } setValue(groupID); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (type.isUser()) { setValue(type.getId() + OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path).getName()); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } setValue(OpenCms.getOrgUnitManager().readOrganizationalUnit(m_cms, path)); // depends on control dependency: [try], data = [none] } catch (CmsException e) { LOG.error("Unable to read OU", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private DoubleArrayResult arrangingEnergy(ArrayList<Double> array) { DoubleArrayResult results = new DoubleArrayResult(); int count = array.size(); for (int i = 0; i < count; i++) { double min = array.get(0); int pos = 0; for (int j = 0; j < array.size(); j++) { double value = array.get(j); if (value < min) { min = value; pos = j; } } array.remove(pos); results.add(min); } return results; } }
public class class_name { private DoubleArrayResult arrangingEnergy(ArrayList<Double> array) { DoubleArrayResult results = new DoubleArrayResult(); int count = array.size(); for (int i = 0; i < count; i++) { double min = array.get(0); int pos = 0; for (int j = 0; j < array.size(); j++) { double value = array.get(j); if (value < min) { min = value; // depends on control dependency: [if], data = [none] pos = j; // depends on control dependency: [if], data = [none] } } array.remove(pos); // depends on control dependency: [for], data = [none] results.add(min); // depends on control dependency: [for], data = [none] } return results; } }
public class class_name { public static void closeQuietly(final MessageConsumer consumer) { if (consumer != null) { try { consumer.close(); } catch (JMSException je) { if (je.getCause() instanceof InterruptedException) { LOG.trace("ActiveMQ caught and wrapped InterruptedException"); } if (je.getCause() instanceof InterruptedIOException) { LOG.trace("ActiveMQ caught and wrapped InterruptedIOException"); } else { LOG.warnDebug(je, "While closing consumer"); } } } } }
public class class_name { public static void closeQuietly(final MessageConsumer consumer) { if (consumer != null) { try { consumer.close(); // depends on control dependency: [try], data = [none] } catch (JMSException je) { if (je.getCause() instanceof InterruptedException) { LOG.trace("ActiveMQ caught and wrapped InterruptedException"); // depends on control dependency: [if], data = [none] } if (je.getCause() instanceof InterruptedIOException) { LOG.trace("ActiveMQ caught and wrapped InterruptedIOException"); // depends on control dependency: [if], data = [none] } else { LOG.warnDebug(je, "While closing consumer"); // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public Optional<DataType> handleFinalSet( TraversalStep traversalStep, Object tree, String key, DataType data ) { Optional<DataType> optSub = traversalStep.get( tree, key ); if ( !optSub.isPresent() || optSub.get() == null ) { // nothing is here so just set the data traversalStep.overwriteSet( tree, key, data ); } else if ( optSub.get() instanceof List ) { // there is a list here, so we just add to it ((List<Object>) optSub.get()).add( data ); } else { // take whatever is there and make it the first element in an Array List<Object> temp = new ArrayList<>(); temp.add( optSub.get() ); temp.add( data ); traversalStep.overwriteSet( tree, key, temp ); } return Optional.of( data ); } }
public class class_name { public Optional<DataType> handleFinalSet( TraversalStep traversalStep, Object tree, String key, DataType data ) { Optional<DataType> optSub = traversalStep.get( tree, key ); if ( !optSub.isPresent() || optSub.get() == null ) { // nothing is here so just set the data traversalStep.overwriteSet( tree, key, data ); // depends on control dependency: [if], data = [none] } else if ( optSub.get() instanceof List ) { // there is a list here, so we just add to it ((List<Object>) optSub.get()).add( data ); // depends on control dependency: [if], data = [none] } else { // take whatever is there and make it the first element in an Array List<Object> temp = new ArrayList<>(); temp.add( optSub.get() ); // depends on control dependency: [if], data = [none] temp.add( data ); // depends on control dependency: [if], data = [none] traversalStep.overwriteSet( tree, key, temp ); // depends on control dependency: [if], data = [none] } return Optional.of( data ); } }
public class class_name { private static boolean isPageInTemplateDefinition(Page page) { Resource resource = page.adaptTo(Resource.class); if (resource != null) { Resource parent = resource.getParent(); if (parent != null) { return StringUtils.equals(NT_TEMPLATE, parent.getValueMap().get(JCR_PRIMARYTYPE, String.class)); } } return false; } }
public class class_name { private static boolean isPageInTemplateDefinition(Page page) { Resource resource = page.adaptTo(Resource.class); if (resource != null) { Resource parent = resource.getParent(); if (parent != null) { return StringUtils.equals(NT_TEMPLATE, parent.getValueMap().get(JCR_PRIMARYTYPE, String.class)); // depends on control dependency: [if], data = [none] } } return false; } }
public class class_name { public void setAssociatedRoles(java.util.Collection<DBInstanceRole> associatedRoles) { if (associatedRoles == null) { this.associatedRoles = null; return; } this.associatedRoles = new com.amazonaws.internal.SdkInternalList<DBInstanceRole>(associatedRoles); } }
public class class_name { public void setAssociatedRoles(java.util.Collection<DBInstanceRole> associatedRoles) { if (associatedRoles == null) { this.associatedRoles = null; // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } this.associatedRoles = new com.amazonaws.internal.SdkInternalList<DBInstanceRole>(associatedRoles); } }
public class class_name { public PolyhedralSurface toPolyhedralSurfaceWithOptions( MultiPolygonOptions multiPolygonOptions, boolean hasZ, boolean hasM) { PolyhedralSurface polyhedralSurface = new PolyhedralSurface(hasZ, hasM); for (PolygonOptions mapPolygon : multiPolygonOptions .getPolygonOptions()) { Polygon polygon = toPolygon(mapPolygon); polyhedralSurface.addPolygon(polygon); } return polyhedralSurface; } }
public class class_name { public PolyhedralSurface toPolyhedralSurfaceWithOptions( MultiPolygonOptions multiPolygonOptions, boolean hasZ, boolean hasM) { PolyhedralSurface polyhedralSurface = new PolyhedralSurface(hasZ, hasM); for (PolygonOptions mapPolygon : multiPolygonOptions .getPolygonOptions()) { Polygon polygon = toPolygon(mapPolygon); polyhedralSurface.addPolygon(polygon); // depends on control dependency: [for], data = [none] } return polyhedralSurface; } }
public class class_name { public static void main(String args[]) { String dbDriver = "COM.ibm.db2.jdbc.app.DB2Driver"; String url = "jdbc:db2:sample"; String user = "Murali"; String pass = "ibm"; String querystring = "Select * from Murali.department"; try { ConnectionProperties cp = new ConnectionProperties(dbDriver, url, user, pass); Query query = new Query(cp, querystring); QueryResults qs = query.execute(); System.out.println("Number of rows = " + qs.size()); for (int i = 0; i < qs.size(); i++) { String dept = qs.getValue("DEPTNAME", i); System.out.println("Department:" + dept); } /*Enumeration enum= qs.getRows(); while (enum.hasMoreElements()) { QueryRow qr = (QueryRow)enum.nextElement(); String fn = qr.getValue("DEPT"); String ln = qr.getValue("DEPTNAME"); // String bd = qr.getValue("BIRTHDATE"); //String sal = qr.getValue("SALARY"); System.out.println(fn + " " + ln); }*/ // while } // try catch (Exception e) { //com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.webcontainer.jsp.tsx.db.Query.main", "310"); System.out.println("Exception:: " + e.getMessage()); } //catch } }
public class class_name { public static void main(String args[]) { String dbDriver = "COM.ibm.db2.jdbc.app.DB2Driver"; String url = "jdbc:db2:sample"; String user = "Murali"; String pass = "ibm"; String querystring = "Select * from Murali.department"; try { ConnectionProperties cp = new ConnectionProperties(dbDriver, url, user, pass); Query query = new Query(cp, querystring); QueryResults qs = query.execute(); System.out.println("Number of rows = " + qs.size()); // depends on control dependency: [try], data = [none] for (int i = 0; i < qs.size(); i++) { String dept = qs.getValue("DEPTNAME", i); System.out.println("Department:" + dept); // depends on control dependency: [for], data = [none] } /*Enumeration enum= qs.getRows(); while (enum.hasMoreElements()) { QueryRow qr = (QueryRow)enum.nextElement(); String fn = qr.getValue("DEPT"); String ln = qr.getValue("DEPTNAME"); // String bd = qr.getValue("BIRTHDATE"); //String sal = qr.getValue("SALARY"); System.out.println(fn + " " + ln); }*/ // while } // try catch (Exception e) { //com.ibm.ws.ffdc.FFDCFilter.processException(e, "com.ibm.ws.webcontainer.jsp.tsx.db.Query.main", "310"); System.out.println("Exception:: " + e.getMessage()); } //catch // depends on control dependency: [catch], data = [none] } }
public class class_name { private static boolean isParamLiteral(String param) { try { Double.parseDouble(param); return true; } catch (NumberFormatException e) { // Check to see if the parameter is a literal - either a quoted string or // numeric literal if (param.length() > 1 && (param.charAt(0) == '"' || param.charAt(0) == '\'') && param.charAt(0) == param.charAt(param.length() - 1)) { return true; } } return false; } }
public class class_name { private static boolean isParamLiteral(String param) { try { Double.parseDouble(param); // depends on control dependency: [try], data = [none] return true; // depends on control dependency: [try], data = [none] } catch (NumberFormatException e) { // Check to see if the parameter is a literal - either a quoted string or // numeric literal if (param.length() > 1 && (param.charAt(0) == '"' || param.charAt(0) == '\'') && param.charAt(0) == param.charAt(param.length() - 1)) { return true; // depends on control dependency: [if], data = [none] } } // depends on control dependency: [catch], data = [none] return false; } }
public class class_name { boolean addTarget(DatanodeDescriptor node, long generationStamp) { if (this.targets == null) { this.targets = new DatanodeDescriptor[0]; } for (int i=0; i<targets.length; i++) { if (targets[i].equals(node)) { if (generationStamp != targetGSs[i]) { targetGSs[i] = generationStamp; return true; } return false; } } if (node != null) { node.addINode(this); } // allocate new data structure to store additional target DatanodeDescriptor[] newt = new DatanodeDescriptor[targets.length + 1]; long[] newgs = new long[targets.length + 1]; for (int i = 0; i < targets.length; i++) { newt[i] = this.targets[i]; newgs[i] = this.targetGSs[i]; } newt[targets.length] = node; newgs[targets.length] = generationStamp; this.targets = newt; this.targetGSs = newgs; this.primaryNodeIndex = -1; return true; } }
public class class_name { boolean addTarget(DatanodeDescriptor node, long generationStamp) { if (this.targets == null) { this.targets = new DatanodeDescriptor[0]; // depends on control dependency: [if], data = [none] } for (int i=0; i<targets.length; i++) { if (targets[i].equals(node)) { if (generationStamp != targetGSs[i]) { targetGSs[i] = generationStamp; // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } return false; // depends on control dependency: [if], data = [none] } } if (node != null) { node.addINode(this); // depends on control dependency: [if], data = [none] } // allocate new data structure to store additional target DatanodeDescriptor[] newt = new DatanodeDescriptor[targets.length + 1]; long[] newgs = new long[targets.length + 1]; for (int i = 0; i < targets.length; i++) { newt[i] = this.targets[i]; // depends on control dependency: [for], data = [i] newgs[i] = this.targetGSs[i]; // depends on control dependency: [for], data = [i] } newt[targets.length] = node; newgs[targets.length] = generationStamp; this.targets = newt; this.targetGSs = newgs; this.primaryNodeIndex = -1; return true; } }
public class class_name { public static String encodeToString(byte[] bytes, boolean lineBreaks) { try { return new String(encode(bytes, lineBreaks), "ASCII"); } catch (UnsupportedEncodingException iex) { // ASCII should be supported throw new RuntimeException(iex); } } }
public class class_name { public static String encodeToString(byte[] bytes, boolean lineBreaks) { try { return new String(encode(bytes, lineBreaks), "ASCII"); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException iex) { // ASCII should be supported throw new RuntimeException(iex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void configureCredential(List<ConfigProperty> properties) { // if a credential-reference has been defined, get the password property from it if (credentialSourceSupplier != null) { try { CredentialSource credentialSource = credentialSourceSupplier.get(); if (credentialSource != null) { char[] password = credentialSource.getCredential(PasswordCredential.class).getPassword(ClearPassword.class).getPassword(); if (password != null) { // add the password property properties.add(simpleProperty15("password", String.class.getName(), new String(password))); } } } catch (Exception e) { throw new RuntimeException(e); } } } }
public class class_name { private void configureCredential(List<ConfigProperty> properties) { // if a credential-reference has been defined, get the password property from it if (credentialSourceSupplier != null) { try { CredentialSource credentialSource = credentialSourceSupplier.get(); if (credentialSource != null) { char[] password = credentialSource.getCredential(PasswordCredential.class).getPassword(ClearPassword.class).getPassword(); if (password != null) { // add the password property properties.add(simpleProperty15("password", String.class.getName(), new String(password))); // depends on control dependency: [if], data = [(password] } } } catch (Exception e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { public UpdateTarget withSupportedOperations(SupportedOperation... supportedOperations) { if (this.supportedOperations == null) { setSupportedOperations(new com.amazonaws.internal.SdkInternalList<SupportedOperation>(supportedOperations.length)); } for (SupportedOperation ele : supportedOperations) { this.supportedOperations.add(ele); } return this; } }
public class class_name { public UpdateTarget withSupportedOperations(SupportedOperation... supportedOperations) { if (this.supportedOperations == null) { setSupportedOperations(new com.amazonaws.internal.SdkInternalList<SupportedOperation>(supportedOperations.length)); // depends on control dependency: [if], data = [none] } for (SupportedOperation ele : supportedOperations) { this.supportedOperations.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public void show() { int height = buildDropDown(); int widthSpec = 0; int heightSpec = 0; boolean noInputMethod = isInputMethodNotNeeded(); if (mPopup.isShowing()) { if (mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT) { // The call to PopupWindow's update method below can accept -1 for any // value you do not want to update. widthSpec = -1; } else if (mDropDownWidth == ViewGroup.LayoutParams.WRAP_CONTENT) { widthSpec = getAnchorView().getWidth(); } else { widthSpec = mDropDownWidth; } if (mDropDownHeight == ViewGroup.LayoutParams.MATCH_PARENT) { // The call to PopupWindow's update method below can accept -1 for any // value you do not want to update. heightSpec = noInputMethod ? height : ViewGroup.LayoutParams.MATCH_PARENT; if (noInputMethod) { mPopup.setWindowLayoutMode( mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT ? ViewGroup.LayoutParams.MATCH_PARENT : 0, 0); } else { mPopup.setWindowLayoutMode( mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT ? ViewGroup.LayoutParams.MATCH_PARENT : 0, ViewGroup.LayoutParams.MATCH_PARENT); } } else if (mDropDownHeight == ViewGroup.LayoutParams.WRAP_CONTENT) { heightSpec = height; } else { heightSpec = mDropDownHeight; } mPopup.setOutsideTouchable(!mForceIgnoreOutsideTouch && !mDropDownAlwaysVisible); mPopup.update(getAnchorView(), mDropDownHorizontalOffset, mDropDownVerticalOffset, widthSpec, heightSpec); } else { if (mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT) { widthSpec = ViewGroup.LayoutParams.MATCH_PARENT; } else { if (mDropDownWidth == ViewGroup.LayoutParams.WRAP_CONTENT) { mPopup.setWidth(getAnchorView().getWidth()); } else { mPopup.setWidth(mDropDownWidth); } } if (mDropDownHeight == ViewGroup.LayoutParams.MATCH_PARENT) { heightSpec = ViewGroup.LayoutParams.MATCH_PARENT; } else { if (mDropDownHeight == ViewGroup.LayoutParams.WRAP_CONTENT) { mPopup.setHeight(height); } else { mPopup.setHeight(mDropDownHeight); } } mPopup.setWindowLayoutMode(widthSpec, heightSpec); setPopupClipToScreenEnabled(true); // use outside touchable to dismiss drop down when touching outside of it, so // only set this if the dropdown is not always visible mPopup.setOutsideTouchable(!mForceIgnoreOutsideTouch && !mDropDownAlwaysVisible); mPopup.setTouchInterceptor(mTouchInterceptor); PopupWindowCompat.showAsDropDown(mPopup, getAnchorView(), mDropDownHorizontalOffset, mDropDownVerticalOffset, mDropDownGravity); mDropDownList.setSelection(ListView.INVALID_POSITION); if (!mModal || mDropDownList.isInTouchMode()) { clearListSelection(); } if (!mModal) { mHandler.post(mHideSelector); } // show item animation if(mItemAnimationId != 0) mPopup.getContentView().getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { mPopup.getContentView().getViewTreeObserver().removeOnPreDrawListener(this); for(int i = 0, count = mDropDownList.getChildCount(); i < count; i ++){ View v = mDropDownList.getChildAt(i); Animation anim = AnimationUtils.loadAnimation(mContext, mItemAnimationId); anim.setStartOffset(mItemAnimationOffset * i); v.startAnimation(anim); } return false; } }); } } }
public class class_name { public void show() { int height = buildDropDown(); int widthSpec = 0; int heightSpec = 0; boolean noInputMethod = isInputMethodNotNeeded(); if (mPopup.isShowing()) { if (mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT) { // The call to PopupWindow's update method below can accept -1 for any // value you do not want to update. widthSpec = -1; // depends on control dependency: [if], data = [none] } else if (mDropDownWidth == ViewGroup.LayoutParams.WRAP_CONTENT) { widthSpec = getAnchorView().getWidth(); // depends on control dependency: [if], data = [none] } else { widthSpec = mDropDownWidth; // depends on control dependency: [if], data = [none] } if (mDropDownHeight == ViewGroup.LayoutParams.MATCH_PARENT) { // The call to PopupWindow's update method below can accept -1 for any // value you do not want to update. heightSpec = noInputMethod ? height : ViewGroup.LayoutParams.MATCH_PARENT; // depends on control dependency: [if], data = [none] if (noInputMethod) { mPopup.setWindowLayoutMode( mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT ? ViewGroup.LayoutParams.MATCH_PARENT : 0, 0); // depends on control dependency: [if], data = [none] } else { mPopup.setWindowLayoutMode( mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT ? ViewGroup.LayoutParams.MATCH_PARENT : 0, ViewGroup.LayoutParams.MATCH_PARENT); // depends on control dependency: [if], data = [none] } } else if (mDropDownHeight == ViewGroup.LayoutParams.WRAP_CONTENT) { heightSpec = height; // depends on control dependency: [if], data = [none] } else { heightSpec = mDropDownHeight; // depends on control dependency: [if], data = [none] } mPopup.setOutsideTouchable(!mForceIgnoreOutsideTouch && !mDropDownAlwaysVisible); // depends on control dependency: [if], data = [none] mPopup.update(getAnchorView(), mDropDownHorizontalOffset, mDropDownVerticalOffset, widthSpec, heightSpec); // depends on control dependency: [if], data = [none] } else { if (mDropDownWidth == ViewGroup.LayoutParams.MATCH_PARENT) { widthSpec = ViewGroup.LayoutParams.MATCH_PARENT; // depends on control dependency: [if], data = [none] } else { if (mDropDownWidth == ViewGroup.LayoutParams.WRAP_CONTENT) { mPopup.setWidth(getAnchorView().getWidth()); // depends on control dependency: [if], data = [none] } else { mPopup.setWidth(mDropDownWidth); // depends on control dependency: [if], data = [(mDropDownWidth] } } if (mDropDownHeight == ViewGroup.LayoutParams.MATCH_PARENT) { heightSpec = ViewGroup.LayoutParams.MATCH_PARENT; // depends on control dependency: [if], data = [none] } else { if (mDropDownHeight == ViewGroup.LayoutParams.WRAP_CONTENT) { mPopup.setHeight(height); // depends on control dependency: [if], data = [none] } else { mPopup.setHeight(mDropDownHeight); // depends on control dependency: [if], data = [(mDropDownHeight] } } mPopup.setWindowLayoutMode(widthSpec, heightSpec); // depends on control dependency: [if], data = [none] setPopupClipToScreenEnabled(true); // depends on control dependency: [if], data = [none] // use outside touchable to dismiss drop down when touching outside of it, so // only set this if the dropdown is not always visible mPopup.setOutsideTouchable(!mForceIgnoreOutsideTouch && !mDropDownAlwaysVisible); // depends on control dependency: [if], data = [none] mPopup.setTouchInterceptor(mTouchInterceptor); // depends on control dependency: [if], data = [none] PopupWindowCompat.showAsDropDown(mPopup, getAnchorView(), mDropDownHorizontalOffset, mDropDownVerticalOffset, mDropDownGravity); // depends on control dependency: [if], data = [none] mDropDownList.setSelection(ListView.INVALID_POSITION); // depends on control dependency: [if], data = [none] if (!mModal || mDropDownList.isInTouchMode()) { clearListSelection(); // depends on control dependency: [if], data = [none] } if (!mModal) { mHandler.post(mHideSelector); // depends on control dependency: [if], data = [none] } // show item animation if(mItemAnimationId != 0) mPopup.getContentView().getViewTreeObserver().addOnPreDrawListener(new ViewTreeObserver.OnPreDrawListener() { @Override public boolean onPreDraw() { mPopup.getContentView().getViewTreeObserver().removeOnPreDrawListener(this); for(int i = 0, count = mDropDownList.getChildCount(); i < count; i ++){ View v = mDropDownList.getChildAt(i); Animation anim = AnimationUtils.loadAnimation(mContext, mItemAnimationId); anim.setStartOffset(mItemAnimationOffset * i); // depends on control dependency: [for], data = [i] v.startAnimation(anim); // depends on control dependency: [for], data = [none] } return false; } }); } } }
public class class_name { public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) { // TODO: implement with resizable array, to not need dim. final int dim = RelationUtil.dimensionality(relation); final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation); // Compute absolute minsupport final int minsupp = getMinimumSupport(relation.size()); LOG.verbose("Finding item frequencies for ordering."); final int[] counts = countItemSupport(relation, dim); // Forward and backward indexes int[] iidx = new int[dim]; final int[] idx = buildIndex(counts, iidx, minsupp); final int items = idx.length; LOG.statistics(new LongStatistic(STAT + "raw-items", dim)); LOG.statistics(new LongStatistic(STAT + "raw-transactions", relation.size())); LOG.statistics(new DoubleStatistic(STAT + "minsupp-relative", minsupp / (double) relation.size())); LOG.statistics(new LongStatistic(STAT + "minsupp-absolute", minsupp)); LOG.verbose("Building FP-Tree."); Duration ctime = LOG.newDuration(STAT + "fp-tree.construction.time").begin(); FPTree tree = buildFPTree(relation, iidx, items); if(LOG.isStatistics()) { tree.logStatistics(); } if(LOG.isDebuggingFinest()) { StringBuilder buf = new StringBuilder(10000).append("FP-tree:\n"); tree.appendTo(buf, new FPNode.Translator() { @Override public StringBuilder appendTo(StringBuilder buf, int i) { String l = meta.getLabel(idx[i]); return (l != null) ? buf.append(l) : buf.append(i); } }); LOG.debugFinest(buf.toString()); } // Reduce memory usage: tree.reduceMemory(); LOG.statistics(ctime.end()); LOG.verbose("Extracting frequent patterns."); Duration etime = LOG.newDuration(STAT + "fp-growth.extraction.time").begin(); final IndefiniteProgress itemp = LOG.isVerbose() ? new IndefiniteProgress("Frequent itemsets", LOG) : null; final List<Itemset> solution = new ArrayList<>(); // Start extraction with the least frequent items tree.extract(minsupp, minlength, maxlength, true, new FPTree.Collector() { @Override public void collect(int support, int[] data, int start, int plen) { // Always translate the indexes back to the original values via 'idx'! if(plen - start == 1) { solution.add(new OneItemset(idx[data[start]], support)); LOG.incrementProcessed(itemp); return; } // Copy from buffer to a permanent storage int[] indices = new int[plen - start]; for(int i = start, j = 0; i < plen; i++) { indices[j++] = idx[data[i]]; // Translate to original items } Arrays.sort(indices); solution.add(new SparseItemset(indices, support)); LOG.incrementProcessed(itemp); } }); LOG.setCompleted(itemp); Collections.sort(solution); LOG.statistics(etime.end()); LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size())); return new FrequentItemsetsResult("FP-Growth", "fp-growth", solution, meta, relation.size()); } }
public class class_name { public FrequentItemsetsResult run(Database db, final Relation<BitVector> relation) { // TODO: implement with resizable array, to not need dim. final int dim = RelationUtil.dimensionality(relation); final VectorFieldTypeInformation<BitVector> meta = RelationUtil.assumeVectorField(relation); // Compute absolute minsupport final int minsupp = getMinimumSupport(relation.size()); LOG.verbose("Finding item frequencies for ordering."); final int[] counts = countItemSupport(relation, dim); // Forward and backward indexes int[] iidx = new int[dim]; final int[] idx = buildIndex(counts, iidx, minsupp); final int items = idx.length; LOG.statistics(new LongStatistic(STAT + "raw-items", dim)); LOG.statistics(new LongStatistic(STAT + "raw-transactions", relation.size())); LOG.statistics(new DoubleStatistic(STAT + "minsupp-relative", minsupp / (double) relation.size())); LOG.statistics(new LongStatistic(STAT + "minsupp-absolute", minsupp)); LOG.verbose("Building FP-Tree."); Duration ctime = LOG.newDuration(STAT + "fp-tree.construction.time").begin(); FPTree tree = buildFPTree(relation, iidx, items); if(LOG.isStatistics()) { tree.logStatistics(); // depends on control dependency: [if], data = [none] } if(LOG.isDebuggingFinest()) { StringBuilder buf = new StringBuilder(10000).append("FP-tree:\n"); tree.appendTo(buf, new FPNode.Translator() { @Override public StringBuilder appendTo(StringBuilder buf, int i) { String l = meta.getLabel(idx[i]); return (l != null) ? buf.append(l) : buf.append(i); } }); // depends on control dependency: [if], data = [none] LOG.debugFinest(buf.toString()); // depends on control dependency: [if], data = [none] } // Reduce memory usage: tree.reduceMemory(); LOG.statistics(ctime.end()); LOG.verbose("Extracting frequent patterns."); Duration etime = LOG.newDuration(STAT + "fp-growth.extraction.time").begin(); final IndefiniteProgress itemp = LOG.isVerbose() ? new IndefiniteProgress("Frequent itemsets", LOG) : null; final List<Itemset> solution = new ArrayList<>(); // Start extraction with the least frequent items tree.extract(minsupp, minlength, maxlength, true, new FPTree.Collector() { @Override public void collect(int support, int[] data, int start, int plen) { // Always translate the indexes back to the original values via 'idx'! if(plen - start == 1) { solution.add(new OneItemset(idx[data[start]], support)); // depends on control dependency: [if], data = [none] LOG.incrementProcessed(itemp); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } // Copy from buffer to a permanent storage int[] indices = new int[plen - start]; for(int i = start, j = 0; i < plen; i++) { indices[j++] = idx[data[i]]; // Translate to original items // depends on control dependency: [for], data = [i] } Arrays.sort(indices); solution.add(new SparseItemset(indices, support)); LOG.incrementProcessed(itemp); } }); LOG.setCompleted(itemp); Collections.sort(solution); LOG.statistics(etime.end()); LOG.statistics(new LongStatistic(STAT + "frequent-itemsets", solution.size())); return new FrequentItemsetsResult("FP-Growth", "fp-growth", solution, meta, relation.size()); } }
public class class_name { private synchronized ZNames loadTimeZoneNames(String tzID) { ZNames tznames = _tzNamesMap.get(tzID); if (tznames == null) { ZNamesLoader loader = new ZNamesLoader(); loader.loadTimeZone(_zoneStrings, tzID); tznames = ZNames.createTimeZoneAndPutInCache(_tzNamesMap, loader.getNames(), tzID); } return tznames; } }
public class class_name { private synchronized ZNames loadTimeZoneNames(String tzID) { ZNames tznames = _tzNamesMap.get(tzID); if (tznames == null) { ZNamesLoader loader = new ZNamesLoader(); loader.loadTimeZone(_zoneStrings, tzID); // depends on control dependency: [if], data = [none] tznames = ZNames.createTimeZoneAndPutInCache(_tzNamesMap, loader.getNames(), tzID); // depends on control dependency: [if], data = [none] } return tznames; } }
public class class_name { public boolean waitForActivity(String name, int timeout){ if(isActivityMatching(activityUtils.getCurrentActivity(false, false), name)){ return true; } boolean foundActivity = false; ActivityMonitor activityMonitor = getActivityMonitor(); long currentTime = SystemClock.uptimeMillis(); final long endTime = currentTime + timeout; while(currentTime < endTime){ Activity currentActivity = activityMonitor.waitForActivityWithTimeout(endTime - currentTime); if(isActivityMatching(currentActivity, name)){ foundActivity = true; break; } currentTime = SystemClock.uptimeMillis(); } removeMonitor(activityMonitor); return foundActivity; } }
public class class_name { public boolean waitForActivity(String name, int timeout){ if(isActivityMatching(activityUtils.getCurrentActivity(false, false), name)){ return true; // depends on control dependency: [if], data = [none] } boolean foundActivity = false; ActivityMonitor activityMonitor = getActivityMonitor(); long currentTime = SystemClock.uptimeMillis(); final long endTime = currentTime + timeout; while(currentTime < endTime){ Activity currentActivity = activityMonitor.waitForActivityWithTimeout(endTime - currentTime); if(isActivityMatching(currentActivity, name)){ foundActivity = true; // depends on control dependency: [if], data = [none] break; } currentTime = SystemClock.uptimeMillis(); // depends on control dependency: [while], data = [none] } removeMonitor(activityMonitor); return foundActivity; } }
public class class_name { public static void cursorLongToContentValues(Cursor cursor, String field, ContentValues values, String key) { int colIndex = cursor.getColumnIndex(field); if (!cursor.isNull(colIndex)) { Long value = Long.valueOf(cursor.getLong(colIndex)); values.put(key, value); } else { values.put(key, (Long) null); } } }
public class class_name { public static void cursorLongToContentValues(Cursor cursor, String field, ContentValues values, String key) { int colIndex = cursor.getColumnIndex(field); if (!cursor.isNull(colIndex)) { Long value = Long.valueOf(cursor.getLong(colIndex)); values.put(key, value); // depends on control dependency: [if], data = [none] } else { values.put(key, (Long) null); // depends on control dependency: [if], data = [none] } } }
public class class_name { public ListStreamConsumersResult withConsumers(Consumer... consumers) { if (this.consumers == null) { setConsumers(new com.amazonaws.internal.SdkInternalList<Consumer>(consumers.length)); } for (Consumer ele : consumers) { this.consumers.add(ele); } return this; } }
public class class_name { public ListStreamConsumersResult withConsumers(Consumer... consumers) { if (this.consumers == null) { setConsumers(new com.amazonaws.internal.SdkInternalList<Consumer>(consumers.length)); // depends on control dependency: [if], data = [none] } for (Consumer ele : consumers) { this.consumers.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { public EClass getIfcDateTimeSelect() { if (ifcDateTimeSelectEClass == null) { ifcDateTimeSelectEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(949); } return ifcDateTimeSelectEClass; } }
public class class_name { public EClass getIfcDateTimeSelect() { if (ifcDateTimeSelectEClass == null) { ifcDateTimeSelectEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc2x3tc1Package.eNS_URI) .getEClassifiers().get(949); // depends on control dependency: [if], data = [none] } return ifcDateTimeSelectEClass; } }
public class class_name { @Override protected void extraFileActions(PackageSymbol pack, JavaFileObject fo) { if (fo.isNameCompatible("package", JavaFileObject.Kind.HTML)) { pack.sourcefile = fo; } } }
public class class_name { @Override protected void extraFileActions(PackageSymbol pack, JavaFileObject fo) { if (fo.isNameCompatible("package", JavaFileObject.Kind.HTML)) { pack.sourcefile = fo; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void update (Graphics g) { Shape clip = g.getClip(); Rectangle dirty; if (clip != null) { dirty = clip.getBounds(); } else { dirty = getRootPane().getBounds(); // account for our frame insets Insets insets = getInsets(); dirty.x += insets.left; dirty.y += insets.top; } if (_fmgr != null) { _fmgr.restoreFromBack(dirty); } } }
public class class_name { @Override public void update (Graphics g) { Shape clip = g.getClip(); Rectangle dirty; if (clip != null) { dirty = clip.getBounds(); // depends on control dependency: [if], data = [none] } else { dirty = getRootPane().getBounds(); // depends on control dependency: [if], data = [none] // account for our frame insets Insets insets = getInsets(); dirty.x += insets.left; // depends on control dependency: [if], data = [none] dirty.y += insets.top; // depends on control dependency: [if], data = [none] } if (_fmgr != null) { _fmgr.restoreFromBack(dirty); // depends on control dependency: [if], data = [none] } } }
public class class_name { private void removeOldChildren() { Map<String, Object> attributes = component.getAttributes(); List<String> currentComponents = createdComponents; // Get the old list of created component ids and update the current list as a // component attribute Object oldValue; if (currentComponents != null) { oldValue = attributes.put(JSP_CREATED_COMPONENT_IDS, currentComponents); createdComponents = null; } else { oldValue = attributes.remove(JSP_CREATED_COMPONENT_IDS); } // Remove old children that are no longer present if (oldValue != null) { List<String> oldList = TypedCollections.dynamicallyCastList((List)oldValue, String.class); int oldCount = oldList.size(); if (oldCount > 0) { if (currentComponents != null) { int currStartIndex = 0; for (int oldIndex = 0; oldIndex < oldCount; oldIndex++) { String oldId = oldList.get(oldIndex); int foundIndex = _indexOfStartingFrom(currentComponents, currStartIndex, oldId); if (foundIndex != -1) { currStartIndex = foundIndex + 1; } else { UIComponent child = component.findComponent(oldId); // if a component is marked transient, it would have // been already removed from the child list, but the // oldList would still have it. In addition, the component // might have manually been removed. So, if findComponent // isn't successful, don't call remove child (it will NPE) if ( child != null) { component.getChildren().remove(child); } } } } else { List<UIComponent> children = component.getChildren(); // All old components need to be removed for (String oldId : oldList) { UIComponent child = component.findComponent(oldId); if (child != null) { children.remove(child); } } } } } } }
public class class_name { private void removeOldChildren() { Map<String, Object> attributes = component.getAttributes(); List<String> currentComponents = createdComponents; // Get the old list of created component ids and update the current list as a // component attribute Object oldValue; if (currentComponents != null) { oldValue = attributes.put(JSP_CREATED_COMPONENT_IDS, currentComponents); // depends on control dependency: [if], data = [none] createdComponents = null; // depends on control dependency: [if], data = [none] } else { oldValue = attributes.remove(JSP_CREATED_COMPONENT_IDS); // depends on control dependency: [if], data = [none] } // Remove old children that are no longer present if (oldValue != null) { List<String> oldList = TypedCollections.dynamicallyCastList((List)oldValue, String.class); int oldCount = oldList.size(); if (oldCount > 0) { if (currentComponents != null) { int currStartIndex = 0; for (int oldIndex = 0; oldIndex < oldCount; oldIndex++) { String oldId = oldList.get(oldIndex); int foundIndex = _indexOfStartingFrom(currentComponents, currStartIndex, oldId); if (foundIndex != -1) { currStartIndex = foundIndex + 1; // depends on control dependency: [if], data = [none] } else { UIComponent child = component.findComponent(oldId); // if a component is marked transient, it would have // been already removed from the child list, but the // oldList would still have it. In addition, the component // might have manually been removed. So, if findComponent // isn't successful, don't call remove child (it will NPE) if ( child != null) { component.getChildren().remove(child); // depends on control dependency: [if], data = [none] } } } } else { List<UIComponent> children = component.getChildren(); // All old components need to be removed for (String oldId : oldList) { UIComponent child = component.findComponent(oldId); if (child != null) { children.remove(child); // depends on control dependency: [if], data = [(child] } } } } } } }
public class class_name { public void marshall(PutEmailIdentityMailFromAttributesRequest putEmailIdentityMailFromAttributesRequest, ProtocolMarshaller protocolMarshaller) { if (putEmailIdentityMailFromAttributesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getEmailIdentity(), EMAILIDENTITY_BINDING); protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getMailFromDomain(), MAILFROMDOMAIN_BINDING); protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getBehaviorOnMxFailure(), BEHAVIORONMXFAILURE_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(PutEmailIdentityMailFromAttributesRequest putEmailIdentityMailFromAttributesRequest, ProtocolMarshaller protocolMarshaller) { if (putEmailIdentityMailFromAttributesRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getEmailIdentity(), EMAILIDENTITY_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getMailFromDomain(), MAILFROMDOMAIN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(putEmailIdentityMailFromAttributesRequest.getBehaviorOnMxFailure(), BEHAVIORONMXFAILURE_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { boolean checkAccessTopicFromController(UserContext ctx, String topic, JsTopicAccessController jsTopicAccessController) throws IllegalAccessException { logger.debug("Looking for accessController for topic '{}' from JsTopicAccessController {}", topic, jsTopicAccessController); JsTopicControls jsTopicControls = jsTopicControlsTools.getJsTopicControlsFromProxyClass(jsTopicAccessController.getClass()); logger.debug("Looking for accessController for topic '{}' from jsTopicControls {}", topic, jsTopicControls); if(null != jsTopicControls) { logger.debug("Looking for accessController for topic '{}' from jsTopicControls {}, {}", topic, jsTopicControls, jsTopicControls.value()); for (JsTopicControl jsTopicControl : jsTopicControls.value()) { if(topic.equals(jsTopicControl.value())) { logger.debug("Found accessController for topic '{}' from JsTopicControls annotation", topic); checkAccessTopicFromControllers(ctx, topic, Arrays.asList(jsTopicAccessController)); return true; } } } return false; } }
public class class_name { boolean checkAccessTopicFromController(UserContext ctx, String topic, JsTopicAccessController jsTopicAccessController) throws IllegalAccessException { logger.debug("Looking for accessController for topic '{}' from JsTopicAccessController {}", topic, jsTopicAccessController); JsTopicControls jsTopicControls = jsTopicControlsTools.getJsTopicControlsFromProxyClass(jsTopicAccessController.getClass()); logger.debug("Looking for accessController for topic '{}' from jsTopicControls {}", topic, jsTopicControls); if(null != jsTopicControls) { logger.debug("Looking for accessController for topic '{}' from jsTopicControls {}, {}", topic, jsTopicControls, jsTopicControls.value()); for (JsTopicControl jsTopicControl : jsTopicControls.value()) { if(topic.equals(jsTopicControl.value())) { logger.debug("Found accessController for topic '{}' from JsTopicControls annotation", topic); // depends on control dependency: [if], data = [none] checkAccessTopicFromControllers(ctx, topic, Arrays.asList(jsTopicAccessController)); // depends on control dependency: [if], data = [none] return true; // depends on control dependency: [if], data = [none] } } } return false; } }
public class class_name { private int deleteCandidates(Outline outline, Collection<Candidate> candidates) { int deletionCount = 0; writeSummary("Deletions:"); // Visit all candidate classes. for (Candidate candidate : candidates) { if (!candidate.canBeRemoved()) { continue; } // Get the defined class for candidate class. JDefinedClass candidateClass = candidate.getClazz(); deleteClass(outline, candidateClass); deletionCount++; for (JDefinedClass objectFactoryClass : candidate.getObjectFactoryClasses()) { deletionCount += deleteFactoryMethod(objectFactoryClass, candidate); } // Replay the same for interface: if (candidate.isValueObjectDisabled()) { for (Iterator<JClass> iter = candidateClass._implements(); iter.hasNext();) { JClass interfaceClass = iter.next(); if (!isHiddenClass(interfaceClass)) { deleteClass(outline, (JDefinedClass) interfaceClass); deletionCount++; } } } } return deletionCount; } }
public class class_name { private int deleteCandidates(Outline outline, Collection<Candidate> candidates) { int deletionCount = 0; writeSummary("Deletions:"); // Visit all candidate classes. for (Candidate candidate : candidates) { if (!candidate.canBeRemoved()) { continue; } // Get the defined class for candidate class. JDefinedClass candidateClass = candidate.getClazz(); deleteClass(outline, candidateClass); // depends on control dependency: [for], data = [candidate] deletionCount++; // depends on control dependency: [for], data = [none] for (JDefinedClass objectFactoryClass : candidate.getObjectFactoryClasses()) { deletionCount += deleteFactoryMethod(objectFactoryClass, candidate); // depends on control dependency: [for], data = [objectFactoryClass] } // Replay the same for interface: if (candidate.isValueObjectDisabled()) { for (Iterator<JClass> iter = candidateClass._implements(); iter.hasNext();) { JClass interfaceClass = iter.next(); if (!isHiddenClass(interfaceClass)) { deleteClass(outline, (JDefinedClass) interfaceClass); // depends on control dependency: [if], data = [none] deletionCount++; // depends on control dependency: [if], data = [none] } } } } return deletionCount; } }
public class class_name { public void close() { if (connected) { try { terminal.deactivate(); } catch (Exception ex) { logger.debug("Exception occurred while closing UDPMasterConnection", ex); } connected = false; } } }
public class class_name { public void close() { if (connected) { try { terminal.deactivate(); // depends on control dependency: [try], data = [none] } catch (Exception ex) { logger.debug("Exception occurred while closing UDPMasterConnection", ex); } // depends on control dependency: [catch], data = [none] connected = false; // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public void onAboutToShow() { log.debug(getId() + ": onAboutToShow with refreshPolicy: " + dataProvider.getRefreshPolicy()); super.onAboutToShow(); dataProvider.addDataProviderListener(this); registerListeners(); if (detailForm instanceof Widget) { ((Widget) detailForm).onAboutToShow(); } getTableWidget().onAboutToShow(); // lazy loading, if no list is present, load when widget is shown // include RefreshPolicy given by DataProvider if ((dataProvider.getRefreshPolicy() != DataProvider.RefreshPolicy.NEVER) && (getTableWidget().isEmpty())) { executeFilter(); } else if (!getTableWidget().hasSelection()) { getTableWidget().selectRowObject(0, this); } } }
public class class_name { @Override public void onAboutToShow() { log.debug(getId() + ": onAboutToShow with refreshPolicy: " + dataProvider.getRefreshPolicy()); super.onAboutToShow(); dataProvider.addDataProviderListener(this); registerListeners(); if (detailForm instanceof Widget) { ((Widget) detailForm).onAboutToShow(); // depends on control dependency: [if], data = [none] } getTableWidget().onAboutToShow(); // lazy loading, if no list is present, load when widget is shown // include RefreshPolicy given by DataProvider if ((dataProvider.getRefreshPolicy() != DataProvider.RefreshPolicy.NEVER) && (getTableWidget().isEmpty())) { executeFilter(); // depends on control dependency: [if], data = [none] } else if (!getTableWidget().hasSelection()) { getTableWidget().selectRowObject(0, this); // depends on control dependency: [if], data = [none] } } }
public class class_name { public boolean getRequiredBoolean(String key) { Boolean value = configuration.getBoolean(key, null); if (value != null) { return value; } else { throw new IllegalStateException(String.format("required key [%s] not found", key)); } } }
public class class_name { public boolean getRequiredBoolean(String key) { Boolean value = configuration.getBoolean(key, null); if (value != null) { return value; // depends on control dependency: [if], data = [none] } else { throw new IllegalStateException(String.format("required key [%s] not found", key)); } } }
public class class_name { public ResourceRequestInfo getPendingRequestForAny( String host, ResourceType type) { Context c = getContext(type); for (ResourceRequestInfo req: c.anyHostRequests) { Set<String> excluded = req.getExcludeHosts(); if (!excluded.contains(host)) { return req; } } return null; } }
public class class_name { public ResourceRequestInfo getPendingRequestForAny( String host, ResourceType type) { Context c = getContext(type); for (ResourceRequestInfo req: c.anyHostRequests) { Set<String> excluded = req.getExcludeHosts(); if (!excluded.contains(host)) { return req; // depends on control dependency: [if], data = [none] } } return null; } }
public class class_name { public String save() { Long userId = getLong("user.id"); if (ValidEntityKeyPredicate.Instance.apply(userId)) { User user = userService.get(userId); User manager = userService.get(SecurityUtils.getUsername()); if (userService.isManagedBy(manager, user)) { return updateAccount(userId); } else { return null; } } else { addError("error.parameters.needed"); return ERROR; } } }
public class class_name { public String save() { Long userId = getLong("user.id"); if (ValidEntityKeyPredicate.Instance.apply(userId)) { User user = userService.get(userId); User manager = userService.get(SecurityUtils.getUsername()); if (userService.isManagedBy(manager, user)) { return updateAccount(userId); // depends on control dependency: [if], data = [none] } else { return null; // depends on control dependency: [if], data = [none] } } else { addError("error.parameters.needed"); // depends on control dependency: [if], data = [none] return ERROR; // depends on control dependency: [if], data = [none] } } }
public class class_name { private boolean splitTwo() throws PBXException { final AsteriskSettings profile = PBXFactory.getActiveProfile(); AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX(); List<Channel> channels = new LinkedList<>(); channels.add(channel1); if (!pbx.waitForChannelsToQuiescent(channels, 3000)) { logger.error(callSite, callSite); throw new PBXException("Channel: " + channel1 + " cannot be split as they are still in transition."); } /* * redirects the specified channels to the specified endpoints. Returns * true or false reflecting success. */ AgiChannelActivityHold agi1 = new AgiChannelActivityHold(); pbx.setVariable(channel1, "proxyId", "" + ((ChannelProxy) channel1).getIdentity()); channel1.setCurrentActivityAction(agi1); final String agiExten = profile.getAgiExtension(); final String agiContext = profile.getManagementContext(); logger.debug("redirect channel lhs:" + channel1 + " to " + agiExten + " in context " + agiContext); final EndPoint extensionAgi = pbx.getExtensionAgi(); final RedirectAction redirect = new RedirectAction(channel1, agiContext, extensionAgi, 1); // logger.error(redirect); boolean ret = false; { try { // final ManagerResponse response = pbx.sendAction(redirect, 1000); double ctr = 0; while (!agi1.hasCallReachedAgi() && ctr < 10) { Thread.sleep(100); ctr += 100.0 / 1000.0; if (!agi1.hasCallReachedAgi()) { logger.warn("Waiting on (agi1) " + channel1); } } ret = agi1.hasCallReachedAgi(); } catch (final Exception e) { logger.error(e, e); } } return ret; } }
public class class_name { private boolean splitTwo() throws PBXException { final AsteriskSettings profile = PBXFactory.getActiveProfile(); AsteriskPBX pbx = (AsteriskPBX) PBXFactory.getActivePBX(); List<Channel> channels = new LinkedList<>(); channels.add(channel1); if (!pbx.waitForChannelsToQuiescent(channels, 3000)) { logger.error(callSite, callSite); throw new PBXException("Channel: " + channel1 + " cannot be split as they are still in transition."); } /* * redirects the specified channels to the specified endpoints. Returns * true or false reflecting success. */ AgiChannelActivityHold agi1 = new AgiChannelActivityHold(); pbx.setVariable(channel1, "proxyId", "" + ((ChannelProxy) channel1).getIdentity()); channel1.setCurrentActivityAction(agi1); final String agiExten = profile.getAgiExtension(); final String agiContext = profile.getManagementContext(); logger.debug("redirect channel lhs:" + channel1 + " to " + agiExten + " in context " + agiContext); final EndPoint extensionAgi = pbx.getExtensionAgi(); final RedirectAction redirect = new RedirectAction(channel1, agiContext, extensionAgi, 1); // logger.error(redirect); boolean ret = false; { try { // final ManagerResponse response = pbx.sendAction(redirect, 1000); // depends on control dependency: [try], data = [none] double ctr = 0; while (!agi1.hasCallReachedAgi() && ctr < 10) { Thread.sleep(100); // depends on control dependency: [while], data = [none] ctr += 100.0 / 1000.0; // depends on control dependency: [while], data = [none] if (!agi1.hasCallReachedAgi()) { logger.warn("Waiting on (agi1) " + channel1); // depends on control dependency: [if], data = [none] } } ret = agi1.hasCallReachedAgi(); // depends on control dependency: [try], data = [none] } catch (final Exception e) { logger.error(e, e); } // depends on control dependency: [catch], data = [none] } return ret; } }
public class class_name { public void abort() { try { super.close(); } catch (IOException ioe) { LOG.warn("Unable to abort file " + tmpFile, ioe); } if (!tmpFile.delete()) { LOG.warn("Unable to delete tmp file during abort " + tmpFile); } } }
public class class_name { public void abort() { try { super.close(); // depends on control dependency: [try], data = [none] } catch (IOException ioe) { LOG.warn("Unable to abort file " + tmpFile, ioe); } // depends on control dependency: [catch], data = [none] if (!tmpFile.delete()) { LOG.warn("Unable to delete tmp file during abort " + tmpFile); // depends on control dependency: [if], data = [none] } } }
public class class_name { public GHGeocodingResponse geocode(GHGeocodingRequest request) { String url = buildUrl(request); try { Request okRequest = new Request.Builder().url(url).build(); ResponseBody rspBody = getClientForRequest(request).newCall(okRequest).execute().body(); return objectMapper.readValue(rspBody.bytes(), GHGeocodingResponse.class); } catch (Exception ex) { throw new RuntimeException("Problem performing geocoding for " + url + ": " + ex.getMessage(), ex); } } }
public class class_name { public GHGeocodingResponse geocode(GHGeocodingRequest request) { String url = buildUrl(request); try { Request okRequest = new Request.Builder().url(url).build(); ResponseBody rspBody = getClientForRequest(request).newCall(okRequest).execute().body(); return objectMapper.readValue(rspBody.bytes(), GHGeocodingResponse.class); // depends on control dependency: [try], data = [none] } catch (Exception ex) { throw new RuntimeException("Problem performing geocoding for " + url + ": " + ex.getMessage(), ex); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static int chooseWithProbability(double[] distances, double sum, double probability) { for (int j = 0; j < distances.length; ++j) { double probOfDistance = Math.pow(distances[j], 2) / sum; probability -= probOfDistance; if (probability <= EPSILON) { return j; } } return distances.length-1; } }
public class class_name { private static int chooseWithProbability(double[] distances, double sum, double probability) { for (int j = 0; j < distances.length; ++j) { double probOfDistance = Math.pow(distances[j], 2) / sum; probability -= probOfDistance; // depends on control dependency: [for], data = [none] if (probability <= EPSILON) { return j; // depends on control dependency: [if], data = [none] } } return distances.length-1; } }
public class class_name { private void scan(HilbertFeatures hf, int k0) { final int mink0 = Math.min(2 * k0, capital_n - 1); if(LOG.isDebuggingFine()) { LOG.debugFine("Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star); } for(int i = 0; i < hf.pf.length; i++) { if(hf.pf[i].ubound < omega_star) { continue; } if(hf.pf[i].lbound < hf.pf[i].ubound) { double omega = hf.fastUpperBound(i); if(omega < omega_star) { hf.pf[i].ubound = omega; } else { int maxcount; // capital_n-1 instead of capital_n: all, except self if(hf.top.contains(hf.pf[i])) { maxcount = capital_n - 1; } else { maxcount = mink0; } innerScan(hf, i, maxcount); } } if(hf.pf[i].ubound > 0) { hf.updateOUT(i); } if(hf.pf[i].lbound > 0) { hf.updateWLB(i); } if(hf.wlb.size() >= n) { omega_star = Math.max(omega_star, hf.wlb.peek().lbound); } } } }
public class class_name { private void scan(HilbertFeatures hf, int k0) { final int mink0 = Math.min(2 * k0, capital_n - 1); if(LOG.isDebuggingFine()) { LOG.debugFine("Scanning with k0=" + k0 + " (" + mink0 + ")" + " N*=" + capital_n_star); // depends on control dependency: [if], data = [none] } for(int i = 0; i < hf.pf.length; i++) { if(hf.pf[i].ubound < omega_star) { continue; } if(hf.pf[i].lbound < hf.pf[i].ubound) { double omega = hf.fastUpperBound(i); if(omega < omega_star) { hf.pf[i].ubound = omega; // depends on control dependency: [if], data = [none] } else { int maxcount; // capital_n-1 instead of capital_n: all, except self if(hf.top.contains(hf.pf[i])) { maxcount = capital_n - 1; // depends on control dependency: [if], data = [none] } else { maxcount = mink0; // depends on control dependency: [if], data = [none] } innerScan(hf, i, maxcount); // depends on control dependency: [if], data = [none] } } if(hf.pf[i].ubound > 0) { hf.updateOUT(i); // depends on control dependency: [if], data = [none] } if(hf.pf[i].lbound > 0) { hf.updateWLB(i); // depends on control dependency: [if], data = [none] } if(hf.wlb.size() >= n) { omega_star = Math.max(omega_star, hf.wlb.peek().lbound); // depends on control dependency: [if], data = [none] } } } }
public class class_name { @Override public EClass getIfcSpatialStructureElement() { if (ifcSpatialStructureElementEClass == null) { ifcSpatialStructureElementEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(616); } return ifcSpatialStructureElementEClass; } }
public class class_name { @Override public EClass getIfcSpatialStructureElement() { if (ifcSpatialStructureElementEClass == null) { ifcSpatialStructureElementEClass = (EClass) EPackage.Registry.INSTANCE.getEPackage(Ifc4Package.eNS_URI) .getEClassifiers().get(616); // depends on control dependency: [if], data = [none] } return ifcSpatialStructureElementEClass; } }
public class class_name { @Override public void run() { if (this.state.compareAndSet(NEW, RUNNING)) { try { renameThread("RUNNING"); this.threadRef.set(Thread.currentThread()); this.jedis.sadd(key(WORKERS), this.name); this.jedis.set(key(WORKER, this.name, STARTED), new SimpleDateFormat(DATE_FORMAT).format(new Date())); this.listenerDelegate.fireEvent(WORKER_START, this, null, null, null, null, null); loadRedisScripts(); poll(); } catch (Exception ex) { LOG.error("Uncaught exception in worker run-loop!", ex); this.listenerDelegate.fireEvent(WORKER_ERROR, this, null, null, null, null, ex); } finally { renameThread("STOPPING"); this.listenerDelegate.fireEvent(WORKER_STOP, this, null, null, null, null, null); this.jedis.srem(key(WORKERS), this.name); this.jedis.del(key(WORKER, this.name), key(WORKER, this.name, STARTED), key(STAT, FAILED, this.name), key(STAT, PROCESSED, this.name)); this.jedis.quit(); this.threadRef.set(null); } } else if (RUNNING.equals(this.state.get())) { throw new IllegalStateException("This WorkerImpl is already running"); } else { throw new IllegalStateException("This WorkerImpl is shutdown"); } } }
public class class_name { @Override public void run() { if (this.state.compareAndSet(NEW, RUNNING)) { try { renameThread("RUNNING"); // depends on control dependency: [try], data = [none] this.threadRef.set(Thread.currentThread()); // depends on control dependency: [try], data = [none] this.jedis.sadd(key(WORKERS), this.name); // depends on control dependency: [try], data = [none] this.jedis.set(key(WORKER, this.name, STARTED), new SimpleDateFormat(DATE_FORMAT).format(new Date())); // depends on control dependency: [try], data = [none] this.listenerDelegate.fireEvent(WORKER_START, this, null, null, null, null, null); // depends on control dependency: [try], data = [none] loadRedisScripts(); // depends on control dependency: [try], data = [none] poll(); // depends on control dependency: [try], data = [none] } catch (Exception ex) { LOG.error("Uncaught exception in worker run-loop!", ex); this.listenerDelegate.fireEvent(WORKER_ERROR, this, null, null, null, null, ex); } finally { // depends on control dependency: [catch], data = [none] renameThread("STOPPING"); this.listenerDelegate.fireEvent(WORKER_STOP, this, null, null, null, null, null); this.jedis.srem(key(WORKERS), this.name); this.jedis.del(key(WORKER, this.name), key(WORKER, this.name, STARTED), key(STAT, FAILED, this.name), key(STAT, PROCESSED, this.name)); this.jedis.quit(); this.threadRef.set(null); } } else if (RUNNING.equals(this.state.get())) { throw new IllegalStateException("This WorkerImpl is already running"); } else { throw new IllegalStateException("This WorkerImpl is shutdown"); } } }
public class class_name { public static Object saveAttachedState(FacesContext context, Object attachedObject) { if (null == context) { throw new NullPointerException(); } if (null == attachedObject) { return null; } Object result; Class mapOrCollectionClass = attachedObject.getClass(); boolean newWillSucceed = true; // first, test for newability of the class. try { int modifiers = mapOrCollectionClass.getModifiers(); newWillSucceed = Modifier.isPublic(modifiers); if (newWillSucceed) { newWillSucceed = null != mapOrCollectionClass.getConstructor(); } } catch (Exception e) { newWillSucceed = false; } if (newWillSucceed && attachedObject instanceof Collection) { Collection attachedCollection = (Collection) attachedObject; List<StateHolderSaver> resultList = null; for (Object item : attachedCollection) { if (item != null) { if (item instanceof StateHolder && ((StateHolder) item).isTransient()) { continue; } if (resultList == null) { resultList = new ArrayList<StateHolderSaver>(attachedCollection.size() + 1); resultList.add(new StateHolderSaver(context, mapOrCollectionClass)); } resultList.add(new StateHolderSaver(context, item)); } } result = resultList; } else if (newWillSucceed && attachedObject instanceof Map) { Map<Object, Object> attachedMap = (Map<Object, Object>) attachedObject; List<StateHolderSaver> resultList = null; Object key, value; for (Map.Entry<Object, Object> entry : attachedMap.entrySet()) { key = entry.getKey(); if (key instanceof StateHolder && ((StateHolder)key).isTransient()) { continue; } value = entry.getValue(); if (value instanceof StateHolder && ((StateHolder)value).isTransient()) { continue; } if (resultList == null) { resultList = new ArrayList<StateHolderSaver>(attachedMap.size()*2 + 1); resultList.add(new StateHolderSaver(context, mapOrCollectionClass)); } resultList.add(new StateHolderSaver(context, key)); resultList.add(new StateHolderSaver(context, value)); } result = resultList; } else { result = new StateHolderSaver(context, attachedObject); } return result; } }
public class class_name { public static Object saveAttachedState(FacesContext context, Object attachedObject) { if (null == context) { throw new NullPointerException(); } if (null == attachedObject) { return null; // depends on control dependency: [if], data = [none] } Object result; Class mapOrCollectionClass = attachedObject.getClass(); boolean newWillSucceed = true; // first, test for newability of the class. try { int modifiers = mapOrCollectionClass.getModifiers(); newWillSucceed = Modifier.isPublic(modifiers); // depends on control dependency: [try], data = [none] if (newWillSucceed) { newWillSucceed = null != mapOrCollectionClass.getConstructor(); // depends on control dependency: [if], data = [none] } } catch (Exception e) { newWillSucceed = false; } // depends on control dependency: [catch], data = [none] if (newWillSucceed && attachedObject instanceof Collection) { Collection attachedCollection = (Collection) attachedObject; List<StateHolderSaver> resultList = null; for (Object item : attachedCollection) { if (item != null) { if (item instanceof StateHolder && ((StateHolder) item).isTransient()) { continue; } if (resultList == null) { resultList = new ArrayList<StateHolderSaver>(attachedCollection.size() + 1); // depends on control dependency: [if], data = [none] resultList.add(new StateHolderSaver(context, mapOrCollectionClass)); // depends on control dependency: [if], data = [none] } resultList.add(new StateHolderSaver(context, item)); // depends on control dependency: [if], data = [none] } } result = resultList; // depends on control dependency: [if], data = [none] } else if (newWillSucceed && attachedObject instanceof Map) { Map<Object, Object> attachedMap = (Map<Object, Object>) attachedObject; List<StateHolderSaver> resultList = null; Object key, value; for (Map.Entry<Object, Object> entry : attachedMap.entrySet()) { key = entry.getKey(); // depends on control dependency: [for], data = [entry] if (key instanceof StateHolder && ((StateHolder)key).isTransient()) { continue; } value = entry.getValue(); // depends on control dependency: [for], data = [entry] if (value instanceof StateHolder && ((StateHolder)value).isTransient()) { continue; } if (resultList == null) { resultList = new ArrayList<StateHolderSaver>(attachedMap.size()*2 + 1); // depends on control dependency: [if], data = [none] resultList.add(new StateHolderSaver(context, mapOrCollectionClass)); // depends on control dependency: [if], data = [none] } resultList.add(new StateHolderSaver(context, key)); // depends on control dependency: [for], data = [none] resultList.add(new StateHolderSaver(context, value)); // depends on control dependency: [for], data = [none] } result = resultList; // depends on control dependency: [if], data = [none] } else { result = new StateHolderSaver(context, attachedObject); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public Object getObject(String type, Package pkg) { if (type==null || type.equals(documentType)) { if (object==null && content!=null) { object = VariableTranslator.realToObject(pkg, documentType, content); } } else { if (content!=null) { documentType = type; object = VariableTranslator.realToObject(pkg, documentType, content); } else if (object!=null) { content = VariableTranslator.realToString(pkg, documentType, object); documentType = type; object = VariableTranslator.realToObject(pkg, documentType, content); } } return object; } }
public class class_name { public Object getObject(String type, Package pkg) { if (type==null || type.equals(documentType)) { if (object==null && content!=null) { object = VariableTranslator.realToObject(pkg, documentType, content); // depends on control dependency: [if], data = [none] } } else { if (content!=null) { documentType = type; // depends on control dependency: [if], data = [none] object = VariableTranslator.realToObject(pkg, documentType, content); // depends on control dependency: [if], data = [none] } else if (object!=null) { content = VariableTranslator.realToString(pkg, documentType, object); // depends on control dependency: [if], data = [none] documentType = type; // depends on control dependency: [if], data = [none] object = VariableTranslator.realToObject(pkg, documentType, content); // depends on control dependency: [if], data = [none] } } return object; } }
public class class_name { public UpdateTypedLinkFacetRequest withAttributeUpdates(TypedLinkFacetAttributeUpdate... attributeUpdates) { if (this.attributeUpdates == null) { setAttributeUpdates(new java.util.ArrayList<TypedLinkFacetAttributeUpdate>(attributeUpdates.length)); } for (TypedLinkFacetAttributeUpdate ele : attributeUpdates) { this.attributeUpdates.add(ele); } return this; } }
public class class_name { public UpdateTypedLinkFacetRequest withAttributeUpdates(TypedLinkFacetAttributeUpdate... attributeUpdates) { if (this.attributeUpdates == null) { setAttributeUpdates(new java.util.ArrayList<TypedLinkFacetAttributeUpdate>(attributeUpdates.length)); // depends on control dependency: [if], data = [none] } for (TypedLinkFacetAttributeUpdate ele : attributeUpdates) { this.attributeUpdates.add(ele); // depends on control dependency: [for], data = [ele] } return this; } }
public class class_name { @Override public Permission createPerm(String p) { String[] params = Split.split('|', p); if(params.length==3) { return new AAFPermission(params[0],params[1],params[2]); } else { return new LocalPermission(p); } } }
public class class_name { @Override public Permission createPerm(String p) { String[] params = Split.split('|', p); if(params.length==3) { return new AAFPermission(params[0],params[1],params[2]); // depends on control dependency: [if], data = [none] } else { return new LocalPermission(p); // depends on control dependency: [if], data = [none] } } }
public class class_name { protected String getQualified(String prefix, String localName) { /* * This mostly/only helps with empty/text-only elements... might make * sense to do 'real' caching... */ if (localName.equals(mLastLocalName) && prefix.endsWith(mLastPrefix)) { return mLastQName; } String qn = prefix + ':' + localName; mLastQName = qn; return qn; } }
public class class_name { protected String getQualified(String prefix, String localName) { /* * This mostly/only helps with empty/text-only elements... might make * sense to do 'real' caching... */ if (localName.equals(mLastLocalName) && prefix.endsWith(mLastPrefix)) { return mLastQName; // depends on control dependency: [if], data = [none] } String qn = prefix + ':' + localName; mLastQName = qn; return qn; } }
public class class_name { public static void swap(char[] charArray1, int array1Index, char[] charArray2, int array2Index) { if(charArray1[array1Index] != charArray2[array2Index]) { charArray1[array1Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); charArray2[array2Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); charArray1[array1Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); } } }
public class class_name { public static void swap(char[] charArray1, int array1Index, char[] charArray2, int array2Index) { if(charArray1[array1Index] != charArray2[array2Index]) { charArray1[array1Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); // depends on control dependency: [if], data = [(charArray1[array1Index]] charArray2[array2Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); // depends on control dependency: [if], data = [(charArray1[array1Index]] charArray1[array1Index] = (char) (charArray1[array1Index] ^ charArray2[array2Index]); // depends on control dependency: [if], data = [(charArray1[array1Index]] } } }
public class class_name { public void insertRow(Object rowHandle, Object... fieldList) throws InterruptedException { int partitionId = 0; //Find partition to send this row to and put on correct PerPartitionTable. if (fieldList == null || fieldList.length <= 0) { String errMsg; if (rowHandle == null) errMsg = "Error: insertRow received empty fieldList"; else errMsg = "Error: insertRow received empty fieldList for row: " + rowHandle.toString(); generateError(rowHandle, fieldList, errMsg); return; } if (fieldList.length != m_columnCnt) { String errMsg; if (rowHandle == null) errMsg = "Error: insertRow received incorrect number of columns; " + fieldList.length + " found, " + m_columnCnt + " expected"; else errMsg = "Error: insertRow received incorrect number of columns; " + fieldList.length + " found, " + m_columnCnt + " expected for row: " + rowHandle.toString(); generateError(rowHandle, fieldList, errMsg); return; } VoltBulkLoaderRow newRow = new VoltBulkLoaderRow(this, rowHandle, fieldList); if (m_isMP) { m_partitionTable[m_firstPartitionTable].insertRowInTable(newRow); } else { try { partitionId = (int)m_clientImpl.getPartitionForParameter( m_partitionColumnType.getValue(), fieldList[m_partitionedColumnIndex]); m_partitionTable[partitionId].insertRowInTable(newRow); } catch (VoltTypeException e) { generateError(rowHandle, fieldList, e.getMessage()); return; } } m_outstandingRowCount.incrementAndGet(); } }
public class class_name { public void insertRow(Object rowHandle, Object... fieldList) throws InterruptedException { int partitionId = 0; //Find partition to send this row to and put on correct PerPartitionTable. if (fieldList == null || fieldList.length <= 0) { String errMsg; if (rowHandle == null) errMsg = "Error: insertRow received empty fieldList"; else errMsg = "Error: insertRow received empty fieldList for row: " + rowHandle.toString(); generateError(rowHandle, fieldList, errMsg); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (fieldList.length != m_columnCnt) { String errMsg; if (rowHandle == null) errMsg = "Error: insertRow received incorrect number of columns; " + fieldList.length + " found, " + m_columnCnt + " expected"; // depends on control dependency: [if], data = [none] else errMsg = "Error: insertRow received incorrect number of columns; " + fieldList.length + " found, " + m_columnCnt + " expected for row: " + rowHandle.toString(); generateError(rowHandle, fieldList, errMsg); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } VoltBulkLoaderRow newRow = new VoltBulkLoaderRow(this, rowHandle, fieldList); if (m_isMP) { m_partitionTable[m_firstPartitionTable].insertRowInTable(newRow); // depends on control dependency: [if], data = [none] } else { try { partitionId = (int)m_clientImpl.getPartitionForParameter( m_partitionColumnType.getValue(), fieldList[m_partitionedColumnIndex]); // depends on control dependency: [try], data = [none] m_partitionTable[partitionId].insertRowInTable(newRow); // depends on control dependency: [try], data = [none] } catch (VoltTypeException e) { generateError(rowHandle, fieldList, e.getMessage()); return; } // depends on control dependency: [catch], data = [none] } m_outstandingRowCount.incrementAndGet(); } }
public class class_name { public void manage(Collection<SupervisedWorker> workers) { if(workers == null) return; this.workers.clear(); SupervisedWorker worker = null; for (Iterator<SupervisedWorker> i = workers.iterator(); i.hasNext();) { worker = i.next(); this.manage(worker); } } }
public class class_name { public void manage(Collection<SupervisedWorker> workers) { if(workers == null) return; this.workers.clear(); SupervisedWorker worker = null; for (Iterator<SupervisedWorker> i = workers.iterator(); i.hasNext();) { worker = i.next(); // depends on control dependency: [for], data = [i] this.manage(worker); // depends on control dependency: [for], data = [none] } } }
public class class_name { public void setPatternScheme(final boolean isByWeekDay, final boolean fireChange) { if (isByWeekDay ^ (null != m_model.getWeekDay())) { removeExceptionsOnChange(new Command() { public void execute() { if (isByWeekDay) { m_model.setWeekOfMonth(getPatternDefaultValues().getWeekOfMonth()); m_model.setWeekDay(getPatternDefaultValues().getWeekDay()); } else { m_model.clearWeekDays(); m_model.clearWeeksOfMonth(); m_model.setDayOfMonth(getPatternDefaultValues().getDayOfMonth()); } m_model.setInterval(getPatternDefaultValues().getInterval()); if (fireChange) { onValueChange(); } } }); } } }
public class class_name { public void setPatternScheme(final boolean isByWeekDay, final boolean fireChange) { if (isByWeekDay ^ (null != m_model.getWeekDay())) { removeExceptionsOnChange(new Command() { public void execute() { if (isByWeekDay) { m_model.setWeekOfMonth(getPatternDefaultValues().getWeekOfMonth()); // depends on control dependency: [if], data = [none] m_model.setWeekDay(getPatternDefaultValues().getWeekDay()); // depends on control dependency: [if], data = [none] } else { m_model.clearWeekDays(); // depends on control dependency: [if], data = [none] m_model.clearWeeksOfMonth(); // depends on control dependency: [if], data = [none] m_model.setDayOfMonth(getPatternDefaultValues().getDayOfMonth()); // depends on control dependency: [if], data = [none] } m_model.setInterval(getPatternDefaultValues().getInterval()); if (fireChange) { onValueChange(); // depends on control dependency: [if], data = [none] } } }); // depends on control dependency: [if], data = [none] } } }
public class class_name { private InputColumn<?> findOrderByColumn(final AnalysisJobBuilder jobBuilder) { final Table sourceTable = jobBuilder.getSourceTables().get(0); // preferred strategy: Use the primary key final List<Column> primaryKeys = sourceTable.getPrimaryKeys(); if (primaryKeys.size() == 1) { final Column primaryKey = primaryKeys.get(0); final InputColumn<?> sourceColumn = jobBuilder.getSourceColumnByName(primaryKey.getName()); if (sourceColumn == null) { jobBuilder.addSourceColumn(primaryKey); logger.info("Added PK source column for ORDER BY clause on slave jobs: {}", sourceColumn); return jobBuilder.getSourceColumnByName(primaryKey.getName()); } else { logger.info("Using existing PK source column for ORDER BY clause on slave jobs: {}", sourceColumn); return sourceColumn; } } else { if (logger.isDebugEnabled()) { logger.debug("Found {} primary keys, cannot select a single for ORDER BY clause on slave jobs: {}", primaryKeys.size(), primaryKeys.size()); } } // secondary strategy: See if there's a source column called something // like 'ID' or so, and use that. final List<MetaModelInputColumn> sourceColumns = jobBuilder.getSourceColumns(); final String tableName = sourceTable.getName().toLowerCase(); for (final MetaModelInputColumn sourceColumn : sourceColumns) { String name = sourceColumn.getName(); if (name != null) { name = StringUtils.replaceWhitespaces(name, ""); name = StringUtils.replaceAll(name, "_", ""); name = StringUtils.replaceAll(name, "-", ""); name = name.toLowerCase(); if ("id".equals(name) || (tableName + "id").equals(name) || (tableName + "number").equals(name) || ( tableName + "key").equals(name)) { logger.info("Using existing source column for ORDER BY clause on slave jobs: {}", sourceColumn); return sourceColumn; } } } // last resort: Pick any source column and sort on that (might not work // if the column contains a lot of repeated values) final MetaModelInputColumn sourceColumn = sourceColumns.get(0); logger.warn( "Couldn't pick a good source column for ORDER BY clause on slave jobs. Picking the first column: {}", sourceColumn); return sourceColumn; } }
public class class_name { private InputColumn<?> findOrderByColumn(final AnalysisJobBuilder jobBuilder) { final Table sourceTable = jobBuilder.getSourceTables().get(0); // preferred strategy: Use the primary key final List<Column> primaryKeys = sourceTable.getPrimaryKeys(); if (primaryKeys.size() == 1) { final Column primaryKey = primaryKeys.get(0); final InputColumn<?> sourceColumn = jobBuilder.getSourceColumnByName(primaryKey.getName()); if (sourceColumn == null) { jobBuilder.addSourceColumn(primaryKey); // depends on control dependency: [if], data = [none] logger.info("Added PK source column for ORDER BY clause on slave jobs: {}", sourceColumn); // depends on control dependency: [if], data = [none] return jobBuilder.getSourceColumnByName(primaryKey.getName()); // depends on control dependency: [if], data = [none] } else { logger.info("Using existing PK source column for ORDER BY clause on slave jobs: {}", sourceColumn); // depends on control dependency: [if], data = [none] return sourceColumn; // depends on control dependency: [if], data = [none] } } else { if (logger.isDebugEnabled()) { logger.debug("Found {} primary keys, cannot select a single for ORDER BY clause on slave jobs: {}", primaryKeys.size(), primaryKeys.size()); // depends on control dependency: [if], data = [none] } } // secondary strategy: See if there's a source column called something // like 'ID' or so, and use that. final List<MetaModelInputColumn> sourceColumns = jobBuilder.getSourceColumns(); final String tableName = sourceTable.getName().toLowerCase(); for (final MetaModelInputColumn sourceColumn : sourceColumns) { String name = sourceColumn.getName(); if (name != null) { name = StringUtils.replaceWhitespaces(name, ""); // depends on control dependency: [if], data = [(name] name = StringUtils.replaceAll(name, "_", ""); // depends on control dependency: [if], data = [(name] name = StringUtils.replaceAll(name, "-", ""); // depends on control dependency: [if], data = [(name] name = name.toLowerCase(); // depends on control dependency: [if], data = [none] if ("id".equals(name) || (tableName + "id").equals(name) || (tableName + "number").equals(name) || ( tableName + "key").equals(name)) { logger.info("Using existing source column for ORDER BY clause on slave jobs: {}", sourceColumn); // depends on control dependency: [if], data = [none] return sourceColumn; // depends on control dependency: [if], data = [none] } } } // last resort: Pick any source column and sort on that (might not work // if the column contains a lot of repeated values) final MetaModelInputColumn sourceColumn = sourceColumns.get(0); logger.warn( "Couldn't pick a good source column for ORDER BY clause on slave jobs. Picking the first column: {}", sourceColumn); return sourceColumn; } }
public class class_name { String wrap(String toWrap, int len, int start) { StringBuilder buff = new StringBuilder(); StringBuilder line = new StringBuilder(); char[] head = new char[start]; Arrays.fill(head, ' '); for (StringTokenizer tok = new StringTokenizer(toWrap, " "); tok.hasMoreTokens();) { String next = tok.nextToken(); final int x = line.length(); line.append(line.length() == 0 ? "" : " ").append(next); if (line.length() > len) { // The line is now too long. Backtrack: remove the last word, start a // new line containing just that word. line.setLength(x); buff.append(line).append(SEPARATOR).append(head); line.setLength(0); line.append(next); } } buff.append(line); return buff.toString(); } }
public class class_name { String wrap(String toWrap, int len, int start) { StringBuilder buff = new StringBuilder(); StringBuilder line = new StringBuilder(); char[] head = new char[start]; Arrays.fill(head, ' '); for (StringTokenizer tok = new StringTokenizer(toWrap, " "); tok.hasMoreTokens();) { String next = tok.nextToken(); final int x = line.length(); line.append(line.length() == 0 ? "" : " ").append(next); // depends on control dependency: [for], data = [none] if (line.length() > len) { // The line is now too long. Backtrack: remove the last word, start a // new line containing just that word. line.setLength(x); // depends on control dependency: [if], data = [none] buff.append(line).append(SEPARATOR).append(head); // depends on control dependency: [if], data = [none] line.setLength(0); // depends on control dependency: [if], data = [none] line.append(next); // depends on control dependency: [if], data = [none] } } buff.append(line); return buff.toString(); } }
public class class_name { public void marshall(CreateCertificateAuthorityRequest createCertificateAuthorityRequest, ProtocolMarshaller protocolMarshaller) { if (createCertificateAuthorityRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createCertificateAuthorityRequest.getCertificateAuthorityConfiguration(), CERTIFICATEAUTHORITYCONFIGURATION_BINDING); protocolMarshaller.marshall(createCertificateAuthorityRequest.getRevocationConfiguration(), REVOCATIONCONFIGURATION_BINDING); protocolMarshaller.marshall(createCertificateAuthorityRequest.getCertificateAuthorityType(), CERTIFICATEAUTHORITYTYPE_BINDING); protocolMarshaller.marshall(createCertificateAuthorityRequest.getIdempotencyToken(), IDEMPOTENCYTOKEN_BINDING); protocolMarshaller.marshall(createCertificateAuthorityRequest.getTags(), TAGS_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(CreateCertificateAuthorityRequest createCertificateAuthorityRequest, ProtocolMarshaller protocolMarshaller) { if (createCertificateAuthorityRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(createCertificateAuthorityRequest.getCertificateAuthorityConfiguration(), CERTIFICATEAUTHORITYCONFIGURATION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createCertificateAuthorityRequest.getRevocationConfiguration(), REVOCATIONCONFIGURATION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createCertificateAuthorityRequest.getCertificateAuthorityType(), CERTIFICATEAUTHORITYTYPE_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createCertificateAuthorityRequest.getIdempotencyToken(), IDEMPOTENCYTOKEN_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(createCertificateAuthorityRequest.getTags(), TAGS_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public Optional<Object> getData(Object key) { if (key == null) { throw new IllegalArgumentException("key cannot be null"); } for (ContextData c = this; c != null; c = c.parent) { if (key.equals(c.key)) { return Optional.of(c.value); } } return Optional.empty(); } }
public class class_name { public Optional<Object> getData(Object key) { if (key == null) { throw new IllegalArgumentException("key cannot be null"); } for (ContextData c = this; c != null; c = c.parent) { if (key.equals(c.key)) { return Optional.of(c.value); // depends on control dependency: [if], data = [none] } } return Optional.empty(); } }
public class class_name { public AnnotationValueBuilder<T> member(String name, @Nullable Class<?>... types) { if (types != null) { AnnotationClassValue[] classValues = new AnnotationClassValue[types.length]; for (int i = 0; i < types.length; i++) { Class<?> type = types[i]; classValues[i] = new AnnotationClassValue<>(type); } values.put(name, classValues); } return this; } }
public class class_name { public AnnotationValueBuilder<T> member(String name, @Nullable Class<?>... types) { if (types != null) { AnnotationClassValue[] classValues = new AnnotationClassValue[types.length]; for (int i = 0; i < types.length; i++) { Class<?> type = types[i]; classValues[i] = new AnnotationClassValue<>(type); } values.put(name, classValues); } return this; // depends on control dependency: [if], data = [none] } }
public class class_name { private boolean validSubsetSizeRangeExplored(){ if(isIncreasing()){ return getCurrentSolution().getNumSelectedIDs() >= getProblem().getMaxSubsetSize(); } else { return getCurrentSolution().getNumSelectedIDs() <= getProblem().getMinSubsetSize(); } } }
public class class_name { private boolean validSubsetSizeRangeExplored(){ if(isIncreasing()){ return getCurrentSolution().getNumSelectedIDs() >= getProblem().getMaxSubsetSize(); // depends on control dependency: [if], data = [none] } else { return getCurrentSolution().getNumSelectedIDs() <= getProblem().getMinSubsetSize(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private int collapsedGibbsSampling(Dataframe dataset) { AbstractModelParameters modelParameters = knowledgeBase.getModelParameters(); Map<Integer, CL> clusterMap = modelParameters.getClusterMap(); AbstractTrainingParameters trainingParameters = knowledgeBase.getTrainingParameters(); double alpha = trainingParameters.getAlpha(); //Initialize clusters, create a cluster for every xi Integer newClusterId = clusterMap.size(); //start counting the Ids based on clusters in the list if(trainingParameters.getInitializationMethod()==AbstractTrainingParameters.Initialization.ONE_CLUSTER_PER_RECORD) { for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); //generate a new cluster CL cluster = createNewCluster(newClusterId); cluster.add(r); clusterMap.put(newClusterId, cluster); //add the record in the new cluster r = new Record(r.getX(), r.getY(), newClusterId, r.getYPredictedProbabilities()); dataset._unsafe_set(rId, r); ++newClusterId; } } else { int numberOfNewClusters = (int)(Math.max(alpha, 1)*Math.log(dataset.size())); //a*log(n) clusters on average if(numberOfNewClusters<=0) { numberOfNewClusters=1; } //generate new clusters for(int i=0;i<numberOfNewClusters;++i) { //generate a new cluster CL cluster = createNewCluster(newClusterId); clusterMap.put(newClusterId, cluster); ++newClusterId; } int clusterMapSize = newClusterId; for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); Integer assignedClusterId = PHPMethods.mt_rand(0, clusterMapSize-1); r = new Record(r.getX(), r.getY(), assignedClusterId, r.getYPredictedProbabilities()); dataset._unsafe_set(rId, r); CL c = getFromClusterMap(assignedClusterId, clusterMap); c.add(r); clusterMap.put(assignedClusterId, c); } } int n = clusterMap.size(); int maxIterations = trainingParameters.getMaxIterations(); boolean noChangeMade=false; int iteration=0; while(iteration<maxIterations && noChangeMade==false) { logger.debug("Iteration {}", iteration); noChangeMade=true; for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); Integer pointClusterId = (Integer) r.getYPredicted(); CL ci = getFromClusterMap(pointClusterId, clusterMap); //remove the point from the cluster ci.remove(r); //if empty cluster remove it if(ci.size()==0) { clusterMap.remove(pointClusterId); } else { clusterMap.put(pointClusterId, ci); } AssociativeArray condProbCiGivenXiAndOtherCi = clusterProbabilities(r, n, clusterMap); //Calculate the probabilities of assigning the point to a new cluster //compute P*(X[i]) = P(X[i]|λ) CL cNew = createNewCluster(newClusterId); double priorLogPredictive = cNew.posteriorLogPdf(r); //compute P(z[i] = * | z[-i], Data) = α/(α+N-1) double probNewCluster = alpha/(alpha+n-1.0); condProbCiGivenXiAndOtherCi.put(newClusterId, priorLogPredictive+Math.log(probNewCluster)); //normalize probabilities P(z[i]) Descriptives.normalizeExp(condProbCiGivenXiAndOtherCi); Integer sampledClusterId = (Integer) SimpleRandomSampling.weightedSampling(condProbCiGivenXiAndOtherCi, 1, true).iterator().next(); //condProbCiGivenXiAndOtherCi=null; //Add Xi back to the sampled AbstractCluster if(Objects.equals(sampledClusterId, newClusterId)) { //if new cluster //add the record in the new cluster r = new Record(r.getX(), r.getY(), newClusterId, r.getYPredictedProbabilities()); dataset._unsafe_set(rId, r); cNew.add(r); clusterMap.put(newClusterId, cNew); noChangeMade=false; ++newClusterId; } else { if(!Objects.equals(pointClusterId, sampledClusterId)) { //if it assigned in a different cluster update the record r = new Record(r.getX(), r.getY(), sampledClusterId, r.getYPredictedProbabilities()); dataset._unsafe_set(rId, r); noChangeMade=false; } CL c = getFromClusterMap(sampledClusterId, clusterMap); c.add(r); //add it to the cluster (or just add it back) clusterMap.put(sampledClusterId, c); } } ++iteration; } return iteration; } }
public class class_name { private int collapsedGibbsSampling(Dataframe dataset) { AbstractModelParameters modelParameters = knowledgeBase.getModelParameters(); Map<Integer, CL> clusterMap = modelParameters.getClusterMap(); AbstractTrainingParameters trainingParameters = knowledgeBase.getTrainingParameters(); double alpha = trainingParameters.getAlpha(); //Initialize clusters, create a cluster for every xi Integer newClusterId = clusterMap.size(); //start counting the Ids based on clusters in the list if(trainingParameters.getInitializationMethod()==AbstractTrainingParameters.Initialization.ONE_CLUSTER_PER_RECORD) { for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); //generate a new cluster CL cluster = createNewCluster(newClusterId); cluster.add(r); // depends on control dependency: [for], data = [e] clusterMap.put(newClusterId, cluster); // depends on control dependency: [for], data = [e] //add the record in the new cluster r = new Record(r.getX(), r.getY(), newClusterId, r.getYPredictedProbabilities()); // depends on control dependency: [for], data = [e] dataset._unsafe_set(rId, r); // depends on control dependency: [for], data = [e] ++newClusterId; // depends on control dependency: [for], data = [e] } } else { int numberOfNewClusters = (int)(Math.max(alpha, 1)*Math.log(dataset.size())); //a*log(n) clusters on average if(numberOfNewClusters<=0) { numberOfNewClusters=1; // depends on control dependency: [if], data = [none] } //generate new clusters for(int i=0;i<numberOfNewClusters;++i) { //generate a new cluster CL cluster = createNewCluster(newClusterId); clusterMap.put(newClusterId, cluster); // depends on control dependency: [for], data = [none] ++newClusterId; // depends on control dependency: [for], data = [none] } int clusterMapSize = newClusterId; for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); Integer assignedClusterId = PHPMethods.mt_rand(0, clusterMapSize-1); r = new Record(r.getX(), r.getY(), assignedClusterId, r.getYPredictedProbabilities()); // depends on control dependency: [for], data = [e] dataset._unsafe_set(rId, r); // depends on control dependency: [for], data = [e] CL c = getFromClusterMap(assignedClusterId, clusterMap); c.add(r); // depends on control dependency: [for], data = [none] clusterMap.put(assignedClusterId, c); // depends on control dependency: [for], data = [e] } } int n = clusterMap.size(); int maxIterations = trainingParameters.getMaxIterations(); boolean noChangeMade=false; int iteration=0; while(iteration<maxIterations && noChangeMade==false) { logger.debug("Iteration {}", iteration); // depends on control dependency: [while], data = [none] noChangeMade=true; // depends on control dependency: [while], data = [none] for(Map.Entry<Integer, Record> e : dataset.entries()) { Integer rId = e.getKey(); Record r = e.getValue(); Integer pointClusterId = (Integer) r.getYPredicted(); CL ci = getFromClusterMap(pointClusterId, clusterMap); //remove the point from the cluster ci.remove(r); // depends on control dependency: [for], data = [e] //if empty cluster remove it if(ci.size()==0) { clusterMap.remove(pointClusterId); // depends on control dependency: [if], data = [none] } else { clusterMap.put(pointClusterId, ci); // depends on control dependency: [if], data = [none] } AssociativeArray condProbCiGivenXiAndOtherCi = clusterProbabilities(r, n, clusterMap); //Calculate the probabilities of assigning the point to a new cluster //compute P*(X[i]) = P(X[i]|λ) CL cNew = createNewCluster(newClusterId); double priorLogPredictive = cNew.posteriorLogPdf(r); //compute P(z[i] = * | z[-i], Data) = α/(α+N-1) double probNewCluster = alpha/(alpha+n-1.0); condProbCiGivenXiAndOtherCi.put(newClusterId, priorLogPredictive+Math.log(probNewCluster)); // depends on control dependency: [for], data = [e] //normalize probabilities P(z[i]) Descriptives.normalizeExp(condProbCiGivenXiAndOtherCi); // depends on control dependency: [for], data = [e] Integer sampledClusterId = (Integer) SimpleRandomSampling.weightedSampling(condProbCiGivenXiAndOtherCi, 1, true).iterator().next(); //condProbCiGivenXiAndOtherCi=null; //Add Xi back to the sampled AbstractCluster if(Objects.equals(sampledClusterId, newClusterId)) { //if new cluster //add the record in the new cluster r = new Record(r.getX(), r.getY(), newClusterId, r.getYPredictedProbabilities()); // depends on control dependency: [if], data = [none] dataset._unsafe_set(rId, r); // depends on control dependency: [if], data = [none] cNew.add(r); // depends on control dependency: [if], data = [none] clusterMap.put(newClusterId, cNew); // depends on control dependency: [if], data = [none] noChangeMade=false; // depends on control dependency: [if], data = [none] ++newClusterId; // depends on control dependency: [if], data = [none] } else { if(!Objects.equals(pointClusterId, sampledClusterId)) { //if it assigned in a different cluster update the record r = new Record(r.getX(), r.getY(), sampledClusterId, r.getYPredictedProbabilities()); // depends on control dependency: [if], data = [none] dataset._unsafe_set(rId, r); // depends on control dependency: [if], data = [none] noChangeMade=false; // depends on control dependency: [if], data = [none] } CL c = getFromClusterMap(sampledClusterId, clusterMap); c.add(r); //add it to the cluster (or just add it back) // depends on control dependency: [if], data = [none] clusterMap.put(sampledClusterId, c); // depends on control dependency: [if], data = [none] } } ++iteration; // depends on control dependency: [while], data = [none] } return iteration; } }
public class class_name { public static void sort(IRingSet ringSet) { List<IRing> ringList = new ArrayList<IRing>(); for (IAtomContainer atomContainer : ringSet.atomContainers()) { ringList.add((IRing) atomContainer); } Collections.sort(ringList, new RingSizeComparator(RingSizeComparator.SMALL_FIRST)); ringSet.removeAllAtomContainers(); for (IAtomContainer aRingList : ringList) ringSet.addAtomContainer(aRingList); } }
public class class_name { public static void sort(IRingSet ringSet) { List<IRing> ringList = new ArrayList<IRing>(); for (IAtomContainer atomContainer : ringSet.atomContainers()) { ringList.add((IRing) atomContainer); // depends on control dependency: [for], data = [atomContainer] } Collections.sort(ringList, new RingSizeComparator(RingSizeComparator.SMALL_FIRST)); ringSet.removeAllAtomContainers(); for (IAtomContainer aRingList : ringList) ringSet.addAtomContainer(aRingList); } }
public class class_name { @Override public IdentifyingToken validate(IdentifyingToken token, @Nullable ChargingStationId chargingStationId) { for (AuthorizationProvider provider : providers) { IdentifyingToken validatedToken = provider.validate(token, chargingStationId); if (validatedToken.isValid()) { return validatedToken; } } return token; } }
public class class_name { @Override public IdentifyingToken validate(IdentifyingToken token, @Nullable ChargingStationId chargingStationId) { for (AuthorizationProvider provider : providers) { IdentifyingToken validatedToken = provider.validate(token, chargingStationId); if (validatedToken.isValid()) { return validatedToken; // depends on control dependency: [if], data = [none] } } return token; } }
public class class_name { public static Object getFieldValue(final Object object, final Field field) { try { if (!field.isAccessible()) { field.setAccessible(true); } return field.get(object); } catch (final Exception e) { throw new IllegalArgumentException("Could not get field value: " + field, e); } } }
public class class_name { public static Object getFieldValue(final Object object, final Field field) { try { if (!field.isAccessible()) { field.setAccessible(true); // depends on control dependency: [if], data = [none] } return field.get(object); // depends on control dependency: [try], data = [none] } catch (final Exception e) { throw new IllegalArgumentException("Could not get field value: " + field, e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static <T> Class<?>[] getClasses(Type[] types) { if (types == null) return null; if (types.length == 0) return new Class<?>[0]; Class<?>[] classes = new Class[types.length]; // Resolve types by chasing down type variables. for (int i = 0; i < types.length; i++) { classes[i] = getClass(types[i]); } return classes; } }
public class class_name { public static <T> Class<?>[] getClasses(Type[] types) { if (types == null) return null; if (types.length == 0) return new Class<?>[0]; Class<?>[] classes = new Class[types.length]; // Resolve types by chasing down type variables. for (int i = 0; i < types.length; i++) { classes[i] = getClass(types[i]); // depends on control dependency: [for], data = [i] } return classes; } }
public class class_name { public String getSlotSharingGroup(Integer id) { if (virtualSideOutputNodes.containsKey(id)) { Integer mappedId = virtualSideOutputNodes.get(id).f0; return getSlotSharingGroup(mappedId); } else if (virtualSelectNodes.containsKey(id)) { Integer mappedId = virtualSelectNodes.get(id).f0; return getSlotSharingGroup(mappedId); } else if (virtualPartitionNodes.containsKey(id)) { Integer mappedId = virtualPartitionNodes.get(id).f0; return getSlotSharingGroup(mappedId); } else { StreamNode node = getStreamNode(id); return node.getSlotSharingGroup(); } } }
public class class_name { public String getSlotSharingGroup(Integer id) { if (virtualSideOutputNodes.containsKey(id)) { Integer mappedId = virtualSideOutputNodes.get(id).f0; return getSlotSharingGroup(mappedId); // depends on control dependency: [if], data = [none] } else if (virtualSelectNodes.containsKey(id)) { Integer mappedId = virtualSelectNodes.get(id).f0; return getSlotSharingGroup(mappedId); // depends on control dependency: [if], data = [none] } else if (virtualPartitionNodes.containsKey(id)) { Integer mappedId = virtualPartitionNodes.get(id).f0; return getSlotSharingGroup(mappedId); // depends on control dependency: [if], data = [none] } else { StreamNode node = getStreamNode(id); return node.getSlotSharingGroup(); // depends on control dependency: [if], data = [none] } } }
public class class_name { static String md5(String origin) { try { MessageDigest md = MessageDigest.getInstance("MD5"); md.update(origin.getBytes("UTF-8")); BigInteger bi = new BigInteger(1, md.digest()); StringBuilder hash = new StringBuilder(bi.toString(16)); while (hash.length() < 32) { hash.insert(0, "0"); } return hash.toString(); } catch (Exception e) { return getUuid(); } } }
public class class_name { static String md5(String origin) { try { MessageDigest md = MessageDigest.getInstance("MD5"); md.update(origin.getBytes("UTF-8")); // depends on control dependency: [try], data = [none] BigInteger bi = new BigInteger(1, md.digest()); StringBuilder hash = new StringBuilder(bi.toString(16)); while (hash.length() < 32) { hash.insert(0, "0"); // depends on control dependency: [while], data = [none] } return hash.toString(); // depends on control dependency: [try], data = [none] } catch (Exception e) { return getUuid(); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public TafResp validate(LifeForm reading, HttpServletRequest req, HttpServletResponse resp) { // Given a LifeForm Neutral, for HTTP, we need to discover true Life-Form Readings if(reading==LifeForm.LFN) { reading = tricorderScan(req); } TafResp tresp=null, firstTry = null; List<Redirectable> redirectables = null; for(HttpTaf taf : tafs) { tresp = taf.validate(reading, req, resp); switch(tresp.isAuthenticated()) { case TRY_ANOTHER_TAF: break; // and loop case TRY_AUTHENTICATING: if(tresp instanceof Redirectable) { if(redirectables==null) { redirectables = new ArrayList<Redirectable>(); } redirectables.add((Redirectable)tresp); } else if(firstTry==null) { firstTry = tresp; } break; case IS_AUTHENTICATED: tresp = trustChecker.mayTrust(tresp, req); return tresp; default: return tresp; } } // If No TAFs configured, at this point. It is safer at this point to be "not validated", // rather than "let it go" // Note: if exists, there will always be more than 0 entries, according to above code if(redirectables==null) { return firstTry!=null?firstTry:NullTafResp.singleton(); } // If there is one Tryable entry then return it if(redirectables.size()>1) { return LoginPageTafResp.create(access,locator,resp,redirectables); } else { return redirectables.get(0); } } }
public class class_name { public TafResp validate(LifeForm reading, HttpServletRequest req, HttpServletResponse resp) { // Given a LifeForm Neutral, for HTTP, we need to discover true Life-Form Readings if(reading==LifeForm.LFN) { reading = tricorderScan(req); // depends on control dependency: [if], data = [none] } TafResp tresp=null, firstTry = null; List<Redirectable> redirectables = null; for(HttpTaf taf : tafs) { tresp = taf.validate(reading, req, resp); // depends on control dependency: [for], data = [taf] switch(tresp.isAuthenticated()) { case TRY_ANOTHER_TAF: break; // and loop case TRY_AUTHENTICATING: if(tresp instanceof Redirectable) { if(redirectables==null) { redirectables = new ArrayList<Redirectable>(); // depends on control dependency: [if], data = [none] } redirectables.add((Redirectable)tresp); // depends on control dependency: [if], data = [none] } else if(firstTry==null) { firstTry = tresp; // depends on control dependency: [if], data = [none] } break; case IS_AUTHENTICATED: tresp = trustChecker.mayTrust(tresp, req); return tresp; // depends on control dependency: [for], data = [none] default: return tresp; } } // If No TAFs configured, at this point. It is safer at this point to be "not validated", // rather than "let it go" // Note: if exists, there will always be more than 0 entries, according to above code if(redirectables==null) { return firstTry!=null?firstTry:NullTafResp.singleton(); } // If there is one Tryable entry then return it if(redirectables.size()>1) { return LoginPageTafResp.create(access,locator,resp,redirectables); } else { return redirectables.get(0); } } }
public class class_name { public <Solution> Collection<Variable<Solution, ?>> createFromGettersAndConstructors( Class<Solution> solutionClass) { if (solutionClass.isInterface()) { throw new IsInterfaceException(solutionClass); } else { Map<String, Method> getters = new HashMap<>(); Map<Class<?>, String> types = new HashMap<>(); for (Method method : solutionClass.getMethods()) { Class<?> returnType = method.getReturnType(); if (method.getParameterTypes().length == 0 && returnType != null && !method.getName().equals("getClass") && method.getName().matches("get[^a-z].*")) { String name = method.getName().substring(3); getters.put(name, method); if (types.containsKey(returnType)) { throw new IllegalArgumentException( types.get(returnType) + " and " + name + " are both of type " + returnType + ", we cannot differentiate them"); } else { types.put(returnType, name); } } else { // not a getter, ignore it } } Collection<Variable<Solution, ?>> variables = new LinkedList<>(); for (Constructor<?> constructor : solutionClass.getConstructors()) { Class<?>[] constructorTypes = constructor.getParameterTypes(); Set<Class<?>> uniqueTypes = new HashSet<>( Arrays.asList(constructorTypes)); if (uniqueTypes.size() < constructorTypes.length) { throw new IllegalArgumentException( "Some constructor types are redundant, we cannot differentiate them: " + Arrays.asList(constructorTypes)); } else { for (Class<?> type : constructorTypes) { String name = types.remove(type); if (name == null) { // constructor value without getter or already done } else { Method getter = getters.get(name); variables.add(createVariableOn(solutionClass, getter, name, type)); } } } } return variables; } } }
public class class_name { public <Solution> Collection<Variable<Solution, ?>> createFromGettersAndConstructors( Class<Solution> solutionClass) { if (solutionClass.isInterface()) { throw new IsInterfaceException(solutionClass); } else { Map<String, Method> getters = new HashMap<>(); Map<Class<?>, String> types = new HashMap<>(); for (Method method : solutionClass.getMethods()) { Class<?> returnType = method.getReturnType(); if (method.getParameterTypes().length == 0 && returnType != null && !method.getName().equals("getClass") && method.getName().matches("get[^a-z].*")) { String name = method.getName().substring(3); getters.put(name, method); if (types.containsKey(returnType)) { throw new IllegalArgumentException( types.get(returnType) + " and " + name + " are both of type " + returnType + ", we cannot differentiate them"); } else { types.put(returnType, name); // depends on control dependency: [if], data = [none] } } else { // not a getter, ignore it } } Collection<Variable<Solution, ?>> variables = new LinkedList<>(); for (Constructor<?> constructor : solutionClass.getConstructors()) { Class<?>[] constructorTypes = constructor.getParameterTypes(); Set<Class<?>> uniqueTypes = new HashSet<>( Arrays.asList(constructorTypes)); if (uniqueTypes.size() < constructorTypes.length) { throw new IllegalArgumentException( "Some constructor types are redundant, we cannot differentiate them: " + Arrays.asList(constructorTypes)); } else { for (Class<?> type : constructorTypes) { String name = types.remove(type); if (name == null) { // constructor value without getter or already done } else { Method getter = getters.get(name); variables.add(createVariableOn(solutionClass, getter, name, type)); } } } } return variables; } } }
public class class_name { public long getFreeSpace() { try { StructStatVfs sb = Libcore.os.statvfs(path); return sb.f_bfree * sb.f_bsize; // free block count * block size in bytes. } catch (ErrnoException errnoException) { return 0; } } }
public class class_name { public long getFreeSpace() { try { StructStatVfs sb = Libcore.os.statvfs(path); return sb.f_bfree * sb.f_bsize; // free block count * block size in bytes. // depends on control dependency: [try], data = [none] } catch (ErrnoException errnoException) { return 0; } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void initListEvents() { this.listChangeListener = new ListChangeListener<SourceType>() { @Override public void onChanged( Change<? extends SourceType> listEvent) { // We have to stage delete events, because if we process them // separately, there will be unwanted ChangeEvents on the // targetList List<TargetType> deleteStaging = new ArrayList<>(); while (listEvent.next()) { if (listEvent.wasUpdated()) { processUpdateEvent(listEvent); } else if (listEvent.wasReplaced()) { processReplaceEvent(listEvent, deleteStaging); } else if (listEvent.wasAdded()) { processAddEvent(listEvent); } else if (listEvent.wasRemoved()) { processRemoveEvent(listEvent, deleteStaging); } } // Process the staged elements processStagingLists(deleteStaging); } }; modelListProperty().addListener( new WeakListChangeListener<>(listChangeListener)); } }
public class class_name { private void initListEvents() { this.listChangeListener = new ListChangeListener<SourceType>() { @Override public void onChanged( Change<? extends SourceType> listEvent) { // We have to stage delete events, because if we process them // separately, there will be unwanted ChangeEvents on the // targetList List<TargetType> deleteStaging = new ArrayList<>(); while (listEvent.next()) { if (listEvent.wasUpdated()) { processUpdateEvent(listEvent); // depends on control dependency: [if], data = [none] } else if (listEvent.wasReplaced()) { processReplaceEvent(listEvent, deleteStaging); // depends on control dependency: [if], data = [none] } else if (listEvent.wasAdded()) { processAddEvent(listEvent); // depends on control dependency: [if], data = [none] } else if (listEvent.wasRemoved()) { processRemoveEvent(listEvent, deleteStaging); // depends on control dependency: [if], data = [none] } } // Process the staged elements processStagingLists(deleteStaging); } }; modelListProperty().addListener( new WeakListChangeListener<>(listChangeListener)); } }
public class class_name { public Token getNextToken() { if (nextToken == null) { try { if (!lex()) { throw new NoSuchElementException(); } } catch (IOException e) { errorMessage = e.getMessage(); hasErrors_ = true; throw new NoSuchElementException(); } } return nextToken; } }
public class class_name { public Token getNextToken() { if (nextToken == null) { try { if (!lex()) { throw new NoSuchElementException(); } } catch (IOException e) { errorMessage = e.getMessage(); hasErrors_ = true; throw new NoSuchElementException(); } // depends on control dependency: [catch], data = [none] } return nextToken; } }
public class class_name { public static Parse parseParse(String parse) { StringBuffer text = new StringBuffer(); int offset = 0; Stack stack = new Stack(); List cons = new LinkedList(); for (int ci = 0, cl = parse.length(); ci < cl; ci++) { char c = parse.charAt(ci); if (c == '(') { String rest = parse.substring(ci + 1); String type = getType(rest); if (type == null) { System.err.println("null type for: " + rest); } String token = getToken(rest); stack.push(new Object[] { type, new Integer(offset)}); if (token != null && !type.equals("-NONE-")) { cons.add(new Object[] { ParserME.TOK_NODE, new Span(offset, offset + token.length())}); text.append(token).append(" "); offset += token.length() + 1; } } else if (c == ')') { Object[] parts = (Object[]) stack.pop(); String type = (String) parts[0]; if (!type.equals("-NONE-")) { int start = ((Integer) parts[1]).intValue(); cons.add(new Object[] { parts[0], new Span(start, offset - 1)}); } } } String txt = text.toString(); Parse p = new Parse(txt, new Span(0, txt.length()), ParserME.TOP_NODE, 1); /* for (int ti=0;ti < tokens.size();ti++) { Object[] parts = (Object[]) cons.get(ti); String type = (String) parts[0]; if (!type.equals(ParserME.TOP_NODE)) { Parse con = new Parse(txt, (Span) parts[1], type, 1); System.err.println("insert "+type+" "+con.toString()); p.insert(con); } } */ for (int ci=0;ci < cons.size();ci++) { Object[] parts = (Object[]) cons.get(ci); String type = (String) parts[0]; if (!type.equals(ParserME.TOP_NODE)) { Parse con = new Parse(txt, (Span) parts[1], type, 1); //System.err.println("insert["+ci+"] "+type+" "+con.toString()+" "+con.hashCode()); p.insert(con); //codeTree(p); } } return p; } }
public class class_name { public static Parse parseParse(String parse) { StringBuffer text = new StringBuffer(); int offset = 0; Stack stack = new Stack(); List cons = new LinkedList(); for (int ci = 0, cl = parse.length(); ci < cl; ci++) { char c = parse.charAt(ci); if (c == '(') { String rest = parse.substring(ci + 1); String type = getType(rest); if (type == null) { System.err.println("null type for: " + rest); // depends on control dependency: [if], data = [none] } String token = getToken(rest); stack.push(new Object[] { type, new Integer(offset)}); // depends on control dependency: [if], data = [none] if (token != null && !type.equals("-NONE-")) { cons.add(new Object[] { ParserME.TOK_NODE, new Span(offset, offset + token.length())}); // depends on control dependency: [if], data = [none] text.append(token).append(" "); // depends on control dependency: [if], data = [(token] offset += token.length() + 1; // depends on control dependency: [if], data = [none] } } else if (c == ')') { Object[] parts = (Object[]) stack.pop(); String type = (String) parts[0]; if (!type.equals("-NONE-")) { int start = ((Integer) parts[1]).intValue(); cons.add(new Object[] { parts[0], new Span(start, offset - 1)}); // depends on control dependency: [if], data = [none] } } } String txt = text.toString(); Parse p = new Parse(txt, new Span(0, txt.length()), ParserME.TOP_NODE, 1); /* for (int ti=0;ti < tokens.size();ti++) { Object[] parts = (Object[]) cons.get(ti); String type = (String) parts[0]; if (!type.equals(ParserME.TOP_NODE)) { Parse con = new Parse(txt, (Span) parts[1], type, 1); System.err.println("insert "+type+" "+con.toString()); p.insert(con); } } */ for (int ci=0;ci < cons.size();ci++) { Object[] parts = (Object[]) cons.get(ci); String type = (String) parts[0]; if (!type.equals(ParserME.TOP_NODE)) { Parse con = new Parse(txt, (Span) parts[1], type, 1); //System.err.println("insert["+ci+"] "+type+" "+con.toString()+" "+con.hashCode()); p.insert(con); // depends on control dependency: [if], data = [none] //codeTree(p); } } return p; } }
public class class_name { public static String readTextFile(File file) { try { return readTextFile(new FileReader(file)); } catch (FileNotFoundException e) { e.printStackTrace(); return null; } } }
public class class_name { public static String readTextFile(File file) { try { return readTextFile(new FileReader(file)); // depends on control dependency: [try], data = [none] } catch (FileNotFoundException e) { e.printStackTrace(); return null; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static Optional<GeneratorSet> reduce( SystemInputDef inputDef, GeneratorSet genDef, SystemTestDef baseDef, ReducerOptions options) { // Create a new set of generators to be updated. GeneratorSet genDefNew = genDef.cloneOf(); // Identify functions to reduce. String function = options.getFunction(); Stream<FunctionInputDef> functionInputDefs; if( function == null) { functionInputDefs = toStream( inputDef.getFunctionInputDefs()); } else if( inputDef.getFunctionInputDef( function) == null) { throw new RuntimeException( "Function=" + function + " is not defined"); } else { functionInputDefs = Stream.of( inputDef.getFunctionInputDef( function)); } // For each of the specified function(s), find a seed that generates minimum test cases Map<String,ITestCaseGenerator> generatorsNew = functionInputDefs .map( functionInputDef -> new SimpleEntry<String,ITestCaseGenerator>( functionInputDef.getName(), reduce( functionInputDef, genDefNew.getGenerator( functionInputDef.getName()), baseDef == null? null : baseDef.getFunctionTestDef( functionInputDef.getName()), options) .orElse( null))) .filter( e -> e.getValue() != null) .collect( toMap( SimpleEntry::getKey, SimpleEntry::getValue)); if( generatorsNew.isEmpty()) { logger_.info( "Generator definitions not changed"); return Optional.empty(); } else { generatorsNew.forEach( (f, g) -> genDefNew.setGenerator( f, g)); return Optional.of( genDefNew); } } }
public class class_name { public static Optional<GeneratorSet> reduce( SystemInputDef inputDef, GeneratorSet genDef, SystemTestDef baseDef, ReducerOptions options) { // Create a new set of generators to be updated. GeneratorSet genDefNew = genDef.cloneOf(); // Identify functions to reduce. String function = options.getFunction(); Stream<FunctionInputDef> functionInputDefs; if( function == null) { functionInputDefs = toStream( inputDef.getFunctionInputDefs()); // depends on control dependency: [if], data = [none] } else if( inputDef.getFunctionInputDef( function) == null) { throw new RuntimeException( "Function=" + function + " is not defined"); } else { functionInputDefs = Stream.of( inputDef.getFunctionInputDef( function)); // depends on control dependency: [if], data = [( inputDef.getFunctionInputDef( function)] } // For each of the specified function(s), find a seed that generates minimum test cases Map<String,ITestCaseGenerator> generatorsNew = functionInputDefs .map( functionInputDef -> new SimpleEntry<String,ITestCaseGenerator>( functionInputDef.getName(), reduce( functionInputDef, genDefNew.getGenerator( functionInputDef.getName()), baseDef == null? null : baseDef.getFunctionTestDef( functionInputDef.getName()), options) .orElse( null))) .filter( e -> e.getValue() != null) .collect( toMap( SimpleEntry::getKey, SimpleEntry::getValue)); if( generatorsNew.isEmpty()) { logger_.info( "Generator definitions not changed"); // depends on control dependency: [if], data = [none] return Optional.empty(); // depends on control dependency: [if], data = [none] } else { generatorsNew.forEach( (f, g) -> genDefNew.setGenerator( f, g)); // depends on control dependency: [if], data = [none] return Optional.of( genDefNew); // depends on control dependency: [if], data = [none] } } }
public class class_name { public synchronized void removeObserver(final ApptentiveNotificationObserver observer) { for (ApptentiveNotificationObserverList observers : observerListLookup.values()) { observers.removeObserver(observer); } } }
public class class_name { public synchronized void removeObserver(final ApptentiveNotificationObserver observer) { for (ApptentiveNotificationObserverList observers : observerListLookup.values()) { observers.removeObserver(observer); // depends on control dependency: [for], data = [observers] } } }
public class class_name { public ComputeNodeReimageHeaders withLastModified(DateTime lastModified) { if (lastModified == null) { this.lastModified = null; } else { this.lastModified = new DateTimeRfc1123(lastModified); } return this; } }
public class class_name { public ComputeNodeReimageHeaders withLastModified(DateTime lastModified) { if (lastModified == null) { this.lastModified = null; // depends on control dependency: [if], data = [none] } else { this.lastModified = new DateTimeRfc1123(lastModified); // depends on control dependency: [if], data = [(lastModified] } return this; } }
public class class_name { private void runScripts(DBConnection dbc, String dropRoot, String schemaName, boolean createSchemas) throws IOException { int fi = 0; boolean fnf = false; while (!fnf) { InputStream sqlStream = getClass().getResourceAsStream(dropRoot + fi + ".sql"); if (sqlStream == null) { fnf = true; break; } String script = IOUtils.toString(sqlStream); String[] sqlStatements = script.split(SQL_SCRIPT_DELIMITER); for (String sqlStatement : sqlStatements) { // MySQL allows schemas to by system or dba managed. If dba managed we must // skip the schema creation phase which is always the first sql script (0.sql) if (createSchemas == false && fi == 0) { continue; } if (schemaName != null) { sqlStatement = sqlStatement.replace(SCHEMA_NAME_PLACEHOLDER, schemaName); } try { databaseService.update(dbc, sqlStatement); } catch (SQLException e) { log.debug("Error running script, message:\n{}\n\n{}\n", new Object[] { e.getMessage(), sqlStatement }); //swallow since DROP statements might fail. } } fi++; } } }
public class class_name { private void runScripts(DBConnection dbc, String dropRoot, String schemaName, boolean createSchemas) throws IOException { int fi = 0; boolean fnf = false; while (!fnf) { InputStream sqlStream = getClass().getResourceAsStream(dropRoot + fi + ".sql"); if (sqlStream == null) { fnf = true; // depends on control dependency: [if], data = [none] break; } String script = IOUtils.toString(sqlStream); String[] sqlStatements = script.split(SQL_SCRIPT_DELIMITER); for (String sqlStatement : sqlStatements) { // MySQL allows schemas to by system or dba managed. If dba managed we must // skip the schema creation phase which is always the first sql script (0.sql) if (createSchemas == false && fi == 0) { continue; } if (schemaName != null) { sqlStatement = sqlStatement.replace(SCHEMA_NAME_PLACEHOLDER, schemaName); // depends on control dependency: [if], data = [none] } try { databaseService.update(dbc, sqlStatement); // depends on control dependency: [try], data = [none] } catch (SQLException e) { log.debug("Error running script, message:\n{}\n\n{}\n", new Object[] { e.getMessage(), sqlStatement }); //swallow since DROP statements might fail. } // depends on control dependency: [catch], data = [none] } fi++; } } }
public class class_name { @Override public final boolean isValid(final Object pvalue, final ConstraintValidatorContext pcontext) { final String valueAsString = Objects.toString(pvalue, null); if (StringUtils.isEmpty(valueAsString)) { // empty field is ok return true; } try { String countryCode = BeanUtils.getProperty(pvalue, fieldCountryCode); final String phoneNumber = BeanUtils.getProperty(pvalue, fieldPhoneNumber); if (StringUtils.isEmpty(phoneNumber)) { return true; } if (allowLowerCaseCountryCode) { countryCode = StringUtils.upperCase(countryCode); } final PathDefinitionInterface pathDefinition = GWT.create(PathDefinitionInterface.class); final String url = pathDefinition.getRestBasePath() + "/" + PhoneNumber.ROOT + "/" + PhoneNumber.VALIDATE // + "?" + Parameters.COUNTRY + "=" + countryCode // + "&" + Parameters.PHONE_NUMBER + "=" + urlEncode(phoneNumber) // + "&" + Parameters.DIN_5008 + "=" + PhoneNumberValueRestValidator.this.allowDin5008 // + "&" + Parameters.E123 + "=" + PhoneNumberValueRestValidator.this.allowE123 // + "&" + Parameters.URI + "=" + PhoneNumberValueRestValidator.this.allowUri // + "&" + Parameters.MS + "=" + PhoneNumberValueRestValidator.this.allowMs // + "&" + Parameters.COMMON + "=" + PhoneNumberValueRestValidator.this.allowCommon; final String restResult = CachedSyncHttpGetCall.syncRestCall(url); if (StringUtils.equalsIgnoreCase("TRUE", restResult)) { return true; } switchContext(pcontext); return false; } catch (final Exception ignore) { switchContext(pcontext); return false; } } }
public class class_name { @Override public final boolean isValid(final Object pvalue, final ConstraintValidatorContext pcontext) { final String valueAsString = Objects.toString(pvalue, null); if (StringUtils.isEmpty(valueAsString)) { // empty field is ok return true; // depends on control dependency: [if], data = [none] } try { String countryCode = BeanUtils.getProperty(pvalue, fieldCountryCode); final String phoneNumber = BeanUtils.getProperty(pvalue, fieldPhoneNumber); if (StringUtils.isEmpty(phoneNumber)) { return true; // depends on control dependency: [if], data = [none] } if (allowLowerCaseCountryCode) { countryCode = StringUtils.upperCase(countryCode); // depends on control dependency: [if], data = [none] } final PathDefinitionInterface pathDefinition = GWT.create(PathDefinitionInterface.class); final String url = pathDefinition.getRestBasePath() + "/" + PhoneNumber.ROOT + "/" + PhoneNumber.VALIDATE // + "?" + Parameters.COUNTRY + "=" + countryCode // + "&" + Parameters.PHONE_NUMBER + "=" + urlEncode(phoneNumber) // + "&" + Parameters.DIN_5008 + "=" + PhoneNumberValueRestValidator.this.allowDin5008 // + "&" + Parameters.E123 + "=" + PhoneNumberValueRestValidator.this.allowE123 // + "&" + Parameters.URI + "=" + PhoneNumberValueRestValidator.this.allowUri // + "&" + Parameters.MS + "=" + PhoneNumberValueRestValidator.this.allowMs // + "&" + Parameters.COMMON + "=" + PhoneNumberValueRestValidator.this.allowCommon; final String restResult = CachedSyncHttpGetCall.syncRestCall(url); if (StringUtils.equalsIgnoreCase("TRUE", restResult)) { return true; // depends on control dependency: [if], data = [none] } switchContext(pcontext); // depends on control dependency: [try], data = [none] return false; // depends on control dependency: [try], data = [none] } catch (final Exception ignore) { switchContext(pcontext); return false; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static CompressionMetadata create(String dataFilePath, FileSystem fs) { Descriptor desc = Descriptor.fromFilename(dataFilePath); try { return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), fs.getFileStatus(new Path(dataFilePath)).getLen(), fs); } catch (IOException e) { throw new RuntimeException(e); } } }
public class class_name { public static CompressionMetadata create(String dataFilePath, FileSystem fs) { Descriptor desc = Descriptor.fromFilename(dataFilePath); try { return new CompressionMetadata(desc.filenameFor(Component.COMPRESSION_INFO), fs.getFileStatus(new Path(dataFilePath)).getLen(), fs); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new RuntimeException(e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void onClick(MenuItemClickEvent event) { if (feature != null && feature.isSelected()) { FeatureTransaction ft = mapWidget.getMapModel().getFeatureEditor().startEditing( new Feature[] { feature.clone() }, new Feature[] { feature.clone() }); mapWidget.render(ft, RenderGroup.VECTOR, RenderStatus.ALL); VectorLayer vLayer = feature.getLayer(); if (vLayer.getLayerInfo().getLayerType() == LayerType.POINT) { controller.setController(new PointEditController(mapWidget, controller)); } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTIPOINT) { mapWidget.getMapModel().getFeatureEditor().stopEditing(); mapWidget.render(ft, RenderGroup.VECTOR, RenderStatus.DELETE); SC.warn("Editing of MultiPoint layers is not supported yet...."); } else if (vLayer.getLayerInfo().getLayerType() == LayerType.LINESTRING) { controller.setController(new LineStringEditController(mapWidget, controller)); } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTILINESTRING) { controller.setController(new MultiLineStringEditController(mapWidget, controller)); } else if (vLayer.getLayerInfo().getLayerType() == LayerType.POLYGON) { controller.setController(new PolygonEditController(mapWidget, controller)); } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTIPOLYGON) { controller.setController(new MultiPolygonEditController(mapWidget, controller)); } controller.setEditMode(EditMode.DRAG_MODE); } } }
public class class_name { public void onClick(MenuItemClickEvent event) { if (feature != null && feature.isSelected()) { FeatureTransaction ft = mapWidget.getMapModel().getFeatureEditor().startEditing( new Feature[] { feature.clone() }, new Feature[] { feature.clone() }); mapWidget.render(ft, RenderGroup.VECTOR, RenderStatus.ALL); // depends on control dependency: [if], data = [none] VectorLayer vLayer = feature.getLayer(); if (vLayer.getLayerInfo().getLayerType() == LayerType.POINT) { controller.setController(new PointEditController(mapWidget, controller)); // depends on control dependency: [if], data = [none] } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTIPOINT) { mapWidget.getMapModel().getFeatureEditor().stopEditing(); // depends on control dependency: [if], data = [none] mapWidget.render(ft, RenderGroup.VECTOR, RenderStatus.DELETE); // depends on control dependency: [if], data = [none] SC.warn("Editing of MultiPoint layers is not supported yet...."); // depends on control dependency: [if], data = [none] } else if (vLayer.getLayerInfo().getLayerType() == LayerType.LINESTRING) { controller.setController(new LineStringEditController(mapWidget, controller)); // depends on control dependency: [if], data = [none] } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTILINESTRING) { controller.setController(new MultiLineStringEditController(mapWidget, controller)); // depends on control dependency: [if], data = [none] } else if (vLayer.getLayerInfo().getLayerType() == LayerType.POLYGON) { controller.setController(new PolygonEditController(mapWidget, controller)); // depends on control dependency: [if], data = [none] } else if (vLayer.getLayerInfo().getLayerType() == LayerType.MULTIPOLYGON) { controller.setController(new MultiPolygonEditController(mapWidget, controller)); // depends on control dependency: [if], data = [none] } controller.setEditMode(EditMode.DRAG_MODE); // depends on control dependency: [if], data = [none] } } }
public class class_name { public static void fullTraverse(byte[] msgData, int level, int startPos, int endPos, int levelToIndex, Queue<Integer> index) { try { if (msgData == null || msgData.length == 0) return; int pos = startPos; while (pos < endPos) { if (level == levelToIndex) index.add(pos); // It's a list with a payload more than 55 bytes // data[0] - 0xF7 = how many next bytes allocated // for the length of the list if ((msgData[pos] & 0xFF) >= OFFSET_LONG_LIST) { byte lengthOfLength = (byte) (msgData[pos] - OFFSET_LONG_LIST); int length = calcLength(lengthOfLength, msgData, pos); // now we can parse an item for data[1]..data[length] System.out.println("-- level: [" + level + "] Found big list length: " + length); fullTraverse(msgData, level + 1, pos + lengthOfLength + 1, pos + lengthOfLength + length, levelToIndex, index); pos += lengthOfLength + length + 1; continue; } // It's a list with a payload less than 55 bytes if ((msgData[pos] & 0xFF) >= OFFSET_SHORT_LIST && (msgData[pos] & 0xFF) < OFFSET_LONG_LIST) { byte length = (byte) ((msgData[pos] & 0xFF) - OFFSET_SHORT_LIST); System.out.println("-- level: [" + level + "] Found small list length: " + length); fullTraverse(msgData, level + 1, pos + 1, pos + length + 1, levelToIndex, index); pos += 1 + length; continue; } // It's an item with a payload more than 55 bytes // data[0] - 0xB7 = how much next bytes allocated for // the length of the string if ((msgData[pos] & 0xFF) >= OFFSET_LONG_ITEM && (msgData[pos] & 0xFF) < OFFSET_SHORT_LIST) { byte lengthOfLength = (byte) (msgData[pos] - OFFSET_LONG_ITEM); int length = calcLength(lengthOfLength, msgData, pos); // now we can parse an item for data[1]..data[length] System.out.println("-- level: [" + level + "] Found big item length: " + length); pos += lengthOfLength + length + 1; continue; } // It's an item less than 55 bytes long, // data[0] - 0x80 == length of the item if ((msgData[pos] & 0xFF) > OFFSET_SHORT_ITEM && (msgData[pos] & 0xFF) < OFFSET_LONG_ITEM) { byte length = (byte) ((msgData[pos] & 0xFF) - OFFSET_SHORT_ITEM); System.out.println("-- level: [" + level + "] Found small item length: " + length); pos += 1 + length; continue; } // null item if ((msgData[pos] & 0xFF) == OFFSET_SHORT_ITEM) { System.out.println("-- level: [" + level + "] Found null item: "); pos += 1; continue; } // single byte item if ((msgData[pos] & 0xFF) < OFFSET_SHORT_ITEM) { System.out.println("-- level: [" + level + "] Found single item: "); pos += 1; continue; } } } catch (Throwable th) { throw new RuntimeException("RLP wrong encoding", th.fillInStackTrace()); } } }
public class class_name { public static void fullTraverse(byte[] msgData, int level, int startPos, int endPos, int levelToIndex, Queue<Integer> index) { try { if (msgData == null || msgData.length == 0) return; int pos = startPos; while (pos < endPos) { if (level == levelToIndex) index.add(pos); // It's a list with a payload more than 55 bytes // data[0] - 0xF7 = how many next bytes allocated // for the length of the list if ((msgData[pos] & 0xFF) >= OFFSET_LONG_LIST) { byte lengthOfLength = (byte) (msgData[pos] - OFFSET_LONG_LIST); int length = calcLength(lengthOfLength, msgData, pos); // now we can parse an item for data[1]..data[length] System.out.println("-- level: [" + level + "] Found big list length: " + length); // depends on control dependency: [if], data = [none] fullTraverse(msgData, level + 1, pos + lengthOfLength + 1, pos + lengthOfLength + length, levelToIndex, index); // depends on control dependency: [if], data = [none] pos += lengthOfLength + length + 1; // depends on control dependency: [if], data = [none] continue; } // It's a list with a payload less than 55 bytes if ((msgData[pos] & 0xFF) >= OFFSET_SHORT_LIST && (msgData[pos] & 0xFF) < OFFSET_LONG_LIST) { byte length = (byte) ((msgData[pos] & 0xFF) - OFFSET_SHORT_LIST); System.out.println("-- level: [" + level + "] Found small list length: " + length); // depends on control dependency: [if], data = [none] fullTraverse(msgData, level + 1, pos + 1, pos + length + 1, levelToIndex, index); // depends on control dependency: [if], data = [none] pos += 1 + length; // depends on control dependency: [if], data = [none] continue; } // It's an item with a payload more than 55 bytes // data[0] - 0xB7 = how much next bytes allocated for // the length of the string if ((msgData[pos] & 0xFF) >= OFFSET_LONG_ITEM && (msgData[pos] & 0xFF) < OFFSET_SHORT_LIST) { byte lengthOfLength = (byte) (msgData[pos] - OFFSET_LONG_ITEM); int length = calcLength(lengthOfLength, msgData, pos); // now we can parse an item for data[1]..data[length] System.out.println("-- level: [" + level + "] Found big item length: " + length); // depends on control dependency: [if], data = [none] pos += lengthOfLength + length + 1; // depends on control dependency: [if], data = [none] continue; } // It's an item less than 55 bytes long, // data[0] - 0x80 == length of the item if ((msgData[pos] & 0xFF) > OFFSET_SHORT_ITEM && (msgData[pos] & 0xFF) < OFFSET_LONG_ITEM) { byte length = (byte) ((msgData[pos] & 0xFF) - OFFSET_SHORT_ITEM); System.out.println("-- level: [" + level + "] Found small item length: " + length); // depends on control dependency: [if], data = [none] pos += 1 + length; // depends on control dependency: [if], data = [none] continue; } // null item if ((msgData[pos] & 0xFF) == OFFSET_SHORT_ITEM) { System.out.println("-- level: [" + level + "] Found null item: "); // depends on control dependency: [if], data = [none] pos += 1; // depends on control dependency: [if], data = [none] continue; } // single byte item if ((msgData[pos] & 0xFF) < OFFSET_SHORT_ITEM) { System.out.println("-- level: [" + level + "] Found single item: "); // depends on control dependency: [if], data = [none] pos += 1; // depends on control dependency: [if], data = [none] continue; } } } catch (Throwable th) { throw new RuntimeException("RLP wrong encoding", th.fillInStackTrace()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private static void encodeCopy(ByteBuf out, int offset, int length) { while (length >= 68) { encodeCopyWithOffset(out, offset, 64); length -= 64; } if (length > 64) { encodeCopyWithOffset(out, offset, 60); length -= 60; } encodeCopyWithOffset(out, offset, length); } }
public class class_name { private static void encodeCopy(ByteBuf out, int offset, int length) { while (length >= 68) { encodeCopyWithOffset(out, offset, 64); // depends on control dependency: [while], data = [none] length -= 64; // depends on control dependency: [while], data = [none] } if (length > 64) { encodeCopyWithOffset(out, offset, 60); // depends on control dependency: [if], data = [none] length -= 60; // depends on control dependency: [if], data = [none] } encodeCopyWithOffset(out, offset, length); } }
public class class_name { protected static <A> double[] computeNormalizedRanks(final NumberArrayAdapter<?, A> adapter, final A data, int len) { // Sort the objects: int[] s1 = sortedIndex(adapter, data, len); final double norm = .5 / (len - 1); double[] ret = new double[len]; for(int i = 0; i < len;) { final int start = i++; final double val = adapter.getDouble(data, s1[start]); while(i < len && adapter.getDouble(data, s1[i]) <= val) { i++; } final double score = (start + i - 1) * norm; for(int j = start; j < i; j++) { ret[s1[j]] = score; } } return ret; } }
public class class_name { protected static <A> double[] computeNormalizedRanks(final NumberArrayAdapter<?, A> adapter, final A data, int len) { // Sort the objects: int[] s1 = sortedIndex(adapter, data, len); final double norm = .5 / (len - 1); double[] ret = new double[len]; for(int i = 0; i < len;) { final int start = i++; final double val = adapter.getDouble(data, s1[start]); while(i < len && adapter.getDouble(data, s1[i]) <= val) { i++; // depends on control dependency: [while], data = [none] } final double score = (start + i - 1) * norm; for(int j = start; j < i; j++) { ret[s1[j]] = score; // depends on control dependency: [for], data = [j] } } return ret; } }
public class class_name { private void normalize(final Element vertex) { double nx = vertex.getDouble("nx"); double ny = vertex.getDouble("ny"); double nz = vertex.getDouble("nz"); double n = Math.sqrt(nx * nx + ny * ny + nz * nz); if (n < EPSILON) { vertex.setDouble("nx", 0); vertex.setDouble("ny", 0); vertex.setDouble("nz", 0); } vertex.setDouble("nx", nx / n); vertex.setDouble("ny", ny / n); vertex.setDouble("nz", nz / n); } }
public class class_name { private void normalize(final Element vertex) { double nx = vertex.getDouble("nx"); double ny = vertex.getDouble("ny"); double nz = vertex.getDouble("nz"); double n = Math.sqrt(nx * nx + ny * ny + nz * nz); if (n < EPSILON) { vertex.setDouble("nx", 0); // depends on control dependency: [if], data = [none] vertex.setDouble("ny", 0); // depends on control dependency: [if], data = [none] vertex.setDouble("nz", 0); // depends on control dependency: [if], data = [none] } vertex.setDouble("nx", nx / n); vertex.setDouble("ny", ny / n); vertex.setDouble("nz", nz / n); } }
public class class_name { private int getRealIndex(int index) { int idx = index; if(idx < 0) { if(Math.abs(idx) <= this.size()) { idx = this.size() + idx; } else { logger.error("negative number {} is too large: results size is {}, maximum admitted value is {}", index, this.size(), -1 * this.size() + 1); throw new IndexOutOfBoundsException("Negative index is too large"); } } return idx; } }
public class class_name { private int getRealIndex(int index) { int idx = index; if(idx < 0) { if(Math.abs(idx) <= this.size()) { idx = this.size() + idx; // depends on control dependency: [if], data = [none] } else { logger.error("negative number {} is too large: results size is {}, maximum admitted value is {}", index, this.size(), -1 * this.size() + 1); // depends on control dependency: [if], data = [none] throw new IndexOutOfBoundsException("Negative index is too large"); } } return idx; } }
public class class_name { @Nullable static String removeElExpressionBraces(@Nullable final String rule) { if(isEmptyWhitespace(rule)) { return null; } int startchar = 0; // inclusive int endchar = rule.length() - 1; // inclusive // garbage free trim() while(startchar < rule.length() && CharMatcher.WHITESPACE.matches(rule.charAt(startchar))) { ++startchar; } while(endchar > startchar && CharMatcher.WHITESPACE.matches(rule.charAt(endchar))) { --endchar; } if(rule.regionMatches(startchar, "${", 0, 2) && rule.charAt(endchar) == '}') { startchar += 2; // skip "${" --endchar; // skip '}' } // garbage free trim() while(startchar < rule.length() && CharMatcher.WHITESPACE.matches(rule.charAt(startchar))) { ++startchar; } while(endchar > startchar && CharMatcher.WHITESPACE.matches(rule.charAt(endchar))) { --endchar; } if(endchar < startchar ) { // null instead of empty string for consistency with 'isEmptyWhitespace' check at the beginning return null; } return rule.substring(startchar, endchar + 1); } }
public class class_name { @Nullable static String removeElExpressionBraces(@Nullable final String rule) { if(isEmptyWhitespace(rule)) { return null; // depends on control dependency: [if], data = [none] } int startchar = 0; // inclusive int endchar = rule.length() - 1; // inclusive // garbage free trim() while(startchar < rule.length() && CharMatcher.WHITESPACE.matches(rule.charAt(startchar))) { ++startchar; // depends on control dependency: [while], data = [none] } while(endchar > startchar && CharMatcher.WHITESPACE.matches(rule.charAt(endchar))) { --endchar; // depends on control dependency: [while], data = [none] } if(rule.regionMatches(startchar, "${", 0, 2) && rule.charAt(endchar) == '}') { startchar += 2; // skip "${" // depends on control dependency: [if], data = [none] --endchar; // skip '}' // depends on control dependency: [if], data = [none] } // garbage free trim() while(startchar < rule.length() && CharMatcher.WHITESPACE.matches(rule.charAt(startchar))) { ++startchar; // depends on control dependency: [while], data = [none] } while(endchar > startchar && CharMatcher.WHITESPACE.matches(rule.charAt(endchar))) { --endchar; // depends on control dependency: [while], data = [none] } if(endchar < startchar ) { // null instead of empty string for consistency with 'isEmptyWhitespace' check at the beginning return null; // depends on control dependency: [if], data = [none] } return rule.substring(startchar, endchar + 1); } }