code
stringlengths
130
281k
code_dependency
stringlengths
182
306k
public class class_name { public String getLocalName(int nodeHandle) { nodes.readSlot(nodeHandle, gotslot); short type = (short) (gotslot[0] & 0xFFFF); String name = ""; if ((type==ELEMENT_NODE) || (type==ATTRIBUTE_NODE)) { int i=gotslot[3]; name=m_localNames.indexToString(i & 0xFFFF); if(name==null) name=""; } return name; } }
public class class_name { public String getLocalName(int nodeHandle) { nodes.readSlot(nodeHandle, gotslot); short type = (short) (gotslot[0] & 0xFFFF); String name = ""; if ((type==ELEMENT_NODE) || (type==ATTRIBUTE_NODE)) { int i=gotslot[3]; name=m_localNames.indexToString(i & 0xFFFF); // depends on control dependency: [if], data = [none] if(name==null) name=""; } return name; } }
public class class_name { @Override public SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String identifier) { validateName(owner); SafeSpace ss = createSafeSpaceLogical(owner, identifier); for (File parent = ss.directory.getParentFile(); parent != null; parent = parent.getParentFile()) { if (rootDirectory.equals(parent)) { return new DefaultSafeSpaceIdentifier(ss); } } throw new IllegalArgumentException("Attempted to access file outside the persistence path"); } }
public class class_name { @Override public SafeSpaceIdentifier createSafeSpaceIdentifier(String owner, String identifier) { validateName(owner); SafeSpace ss = createSafeSpaceLogical(owner, identifier); for (File parent = ss.directory.getParentFile(); parent != null; parent = parent.getParentFile()) { if (rootDirectory.equals(parent)) { return new DefaultSafeSpaceIdentifier(ss); // depends on control dependency: [if], data = [none] } } throw new IllegalArgumentException("Attempted to access file outside the persistence path"); } }
public class class_name { private static <T> T[] removeNull(T[] items) { int count = 0; for (int i = 0; i != items.length; ++i) { if (items[i] == null) { count = count + 1; } } if (count == 0) { return items; } else { T[] rs = java.util.Arrays.copyOf(items, items.length - count); for (int i = 0, j = 0; i != items.length; ++i) { T item = items[i]; if (item != null) { rs[j++] = item; } } return rs; } } }
public class class_name { private static <T> T[] removeNull(T[] items) { int count = 0; for (int i = 0; i != items.length; ++i) { if (items[i] == null) { count = count + 1; // depends on control dependency: [if], data = [none] } } if (count == 0) { return items; // depends on control dependency: [if], data = [none] } else { T[] rs = java.util.Arrays.copyOf(items, items.length - count); for (int i = 0, j = 0; i != items.length; ++i) { T item = items[i]; if (item != null) { rs[j++] = item; // depends on control dependency: [if], data = [none] } } return rs; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void handleMasterResponse(Address masterAddress, Address callerAddress) { clusterServiceLock.lock(); try { if (logger.isFineEnabled()) { logger.fine(format("Handling master response %s from %s", masterAddress, callerAddress)); } if (clusterService.isJoined()) { if (logger.isFineEnabled()) { logger.fine(format("Ignoring master response %s from %s, this node is already joined", masterAddress, callerAddress)); } return; } if (node.getThisAddress().equals(masterAddress)) { logger.warning("Received my address as master address from " + callerAddress); return; } Address currentMaster = clusterService.getMasterAddress(); if (currentMaster == null || currentMaster.equals(masterAddress)) { setMasterAndJoin(masterAddress); return; } if (currentMaster.equals(callerAddress)) { logger.warning(format("Setting master to %s since %s says it is not master anymore", masterAddress, currentMaster)); setMasterAndJoin(masterAddress); return; } Connection conn = node.getEndpointManager(MEMBER).getConnection(currentMaster); if (conn != null && conn.isAlive()) { logger.info(format("Ignoring master response %s from %s since this node has an active master %s", masterAddress, callerAddress, currentMaster)); sendJoinRequest(currentMaster, true); } else { logger.warning(format("Ambiguous master response! Received master response %s from %s. " + "This node has a master %s, but does not have an active connection to it. " + "Master field will be unset now.", masterAddress, callerAddress, currentMaster)); clusterService.setMasterAddress(null); } } finally { clusterServiceLock.unlock(); } } }
public class class_name { public void handleMasterResponse(Address masterAddress, Address callerAddress) { clusterServiceLock.lock(); try { if (logger.isFineEnabled()) { logger.fine(format("Handling master response %s from %s", masterAddress, callerAddress)); // depends on control dependency: [if], data = [none] } if (clusterService.isJoined()) { if (logger.isFineEnabled()) { logger.fine(format("Ignoring master response %s from %s, this node is already joined", masterAddress, callerAddress)); // depends on control dependency: [if], data = [none] } return; // depends on control dependency: [if], data = [none] } if (node.getThisAddress().equals(masterAddress)) { logger.warning("Received my address as master address from " + callerAddress); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } Address currentMaster = clusterService.getMasterAddress(); if (currentMaster == null || currentMaster.equals(masterAddress)) { setMasterAndJoin(masterAddress); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } if (currentMaster.equals(callerAddress)) { logger.warning(format("Setting master to %s since %s says it is not master anymore", masterAddress, currentMaster)); // depends on control dependency: [if], data = [none] setMasterAndJoin(masterAddress); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } Connection conn = node.getEndpointManager(MEMBER).getConnection(currentMaster); if (conn != null && conn.isAlive()) { logger.info(format("Ignoring master response %s from %s since this node has an active master %s", masterAddress, callerAddress, currentMaster)); // depends on control dependency: [if], data = [none] sendJoinRequest(currentMaster, true); // depends on control dependency: [if], data = [none] } else { logger.warning(format("Ambiguous master response! Received master response %s from %s. " + "This node has a master %s, but does not have an active connection to it. " + "Master field will be unset now.", masterAddress, callerAddress, currentMaster)); // depends on control dependency: [if], data = [none] clusterService.setMasterAddress(null); // depends on control dependency: [if], data = [none] } } finally { clusterServiceLock.unlock(); } } }
public class class_name { public void marshall(DeleteVocabularyRequest deleteVocabularyRequest, ProtocolMarshaller protocolMarshaller) { if (deleteVocabularyRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteVocabularyRequest.getVocabularyName(), VOCABULARYNAME_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(DeleteVocabularyRequest deleteVocabularyRequest, ProtocolMarshaller protocolMarshaller) { if (deleteVocabularyRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(deleteVocabularyRequest.getVocabularyName(), VOCABULARYNAME_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static <K, V> EntryStream<K, V> of(Stream<? extends Entry<K, V>> stream) { if (stream instanceof AbstractStreamEx) { @SuppressWarnings("unchecked") AbstractStreamEx<Entry<K, V>, ?> ase = (AbstractStreamEx<Entry<K, V>, ?>) stream; if (ase.spliterator != null) return new EntryStream<>(ase.spliterator(), ase.context); return new EntryStream<>(ase.stream(), ase.context); } return new EntryStream<>(stream, StreamContext.of(stream)); } }
public class class_name { public static <K, V> EntryStream<K, V> of(Stream<? extends Entry<K, V>> stream) { if (stream instanceof AbstractStreamEx) { @SuppressWarnings("unchecked") AbstractStreamEx<Entry<K, V>, ?> ase = (AbstractStreamEx<Entry<K, V>, ?>) stream; if (ase.spliterator != null) return new EntryStream<>(ase.spliterator(), ase.context); return new EntryStream<>(ase.stream(), ase.context); // depends on control dependency: [if], data = [none] } return new EntryStream<>(stream, StreamContext.of(stream)); } }
public class class_name { static List<S3ObjectSummary> listObjectsLexicographically( AmazonS3 s3Client, S3ConfigBean s3ConfigBean, AntPathMatcher pathMatcher, S3Offset s3Offset, int fetchSize ) { // Incrementally scan objects after the marker (s3Offset). List<S3ObjectSummary> list = new ArrayList<>(fetchSize); ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(s3ConfigBean.s3Config.bucket); listObjectsRequest.setPrefix(s3ConfigBean.s3Config.commonPrefix); listObjectsRequest.setMaxKeys(BATCH_SIZE); if (s3Offset.getKey() != null) { listObjectsRequest.setMarker(s3Offset.getKey()); } ObjectListing objectListing = s3Client.listObjects(listObjectsRequest); while (true) { for (S3ObjectSummary s : objectListing.getObjectSummaries()) { String fullPrefix = s.getKey(); String remainingPrefix = fullPrefix.substring(s3ConfigBean.s3Config.commonPrefix.length(), fullPrefix.length()); if (!remainingPrefix.isEmpty()) { if (pathMatcher.match(s3ConfigBean.s3FileConfig.prefixPattern, remainingPrefix)) { list.add(s); } // We've got enough objects. if (list.size() == fetchSize) { return list; } } } // Listing is complete. No more objects to be listed. if (!objectListing.isTruncated()) { break; } objectListing = s3Client.listNextBatchOfObjects(objectListing); } return list; } }
public class class_name { static List<S3ObjectSummary> listObjectsLexicographically( AmazonS3 s3Client, S3ConfigBean s3ConfigBean, AntPathMatcher pathMatcher, S3Offset s3Offset, int fetchSize ) { // Incrementally scan objects after the marker (s3Offset). List<S3ObjectSummary> list = new ArrayList<>(fetchSize); ListObjectsRequest listObjectsRequest = new ListObjectsRequest(); listObjectsRequest.setBucketName(s3ConfigBean.s3Config.bucket); listObjectsRequest.setPrefix(s3ConfigBean.s3Config.commonPrefix); listObjectsRequest.setMaxKeys(BATCH_SIZE); if (s3Offset.getKey() != null) { listObjectsRequest.setMarker(s3Offset.getKey()); // depends on control dependency: [if], data = [(s3Offset.getKey()] } ObjectListing objectListing = s3Client.listObjects(listObjectsRequest); while (true) { for (S3ObjectSummary s : objectListing.getObjectSummaries()) { String fullPrefix = s.getKey(); String remainingPrefix = fullPrefix.substring(s3ConfigBean.s3Config.commonPrefix.length(), fullPrefix.length()); if (!remainingPrefix.isEmpty()) { if (pathMatcher.match(s3ConfigBean.s3FileConfig.prefixPattern, remainingPrefix)) { list.add(s); // depends on control dependency: [if], data = [none] } // We've got enough objects. if (list.size() == fetchSize) { return list; // depends on control dependency: [if], data = [none] } } } // Listing is complete. No more objects to be listed. if (!objectListing.isTruncated()) { break; } objectListing = s3Client.listNextBatchOfObjects(objectListing); // depends on control dependency: [while], data = [none] } return list; } }
public class class_name { @Override public void setEnterpriseBean(Object bean) { super.setEnterpriseBean(bean); if (bean instanceof SessionSynchronization) { sessionSync = (SessionSynchronization) bean; } } }
public class class_name { @Override public void setEnterpriseBean(Object bean) { super.setEnterpriseBean(bean); if (bean instanceof SessionSynchronization) { sessionSync = (SessionSynchronization) bean; // depends on control dependency: [if], data = [none] } } }
public class class_name { public long getInvocationCacheHitCount() { LruCache<Object,I> invocationCache = _invocationCache; if (invocationCache != null) { return invocationCache.getHitCount(); } else { return 0; } } }
public class class_name { public long getInvocationCacheHitCount() { LruCache<Object,I> invocationCache = _invocationCache; if (invocationCache != null) { return invocationCache.getHitCount(); // depends on control dependency: [if], data = [none] } else { return 0; // depends on control dependency: [if], data = [none] } } }
public class class_name { public TemplateModel parseStandalone( final ITemplateContext context, final String template, final Set<String> templateSelectors, final TemplateMode templateMode, final boolean useCache, final boolean failIfNotExists) { Validate.notNull(context, "Context cannot be null"); Validate.notNull(template, "Template cannot be null"); // templateSelectors CAN be null if we are going to render the entire template // templateMode CAN be null if we are going to use the mode specified by the template resolver // templateResolutionAttributes CAN be null final String ownerTemplate = context.getTemplateData().getTemplate(); final Map<String,Object> templateResolutionAttributes = context.getTemplateResolutionAttributes(); final Set<String> cleanTemplateSelectors; if (templateSelectors != null && !templateSelectors.isEmpty()) { Validate.containsNoEmpties( templateSelectors, "If specified, the Template Selector set cannot contain any nulls or empties"); if (templateSelectors.size() == 1) { cleanTemplateSelectors = Collections.singleton(templateSelectors.iterator().next()); } else { // We will be using a TreeSet because we want the selectors to be ORDERED, so that comparison at the // equals(...) method works alright cleanTemplateSelectors = Collections.unmodifiableSet(new TreeSet<String>(templateSelectors)); } } else { cleanTemplateSelectors = null; } final TemplateCacheKey cacheKey = useCache? new TemplateCacheKey( ownerTemplate, template, cleanTemplateSelectors, 0, 0, templateMode, templateResolutionAttributes) : null; /* * First look at the cache - it might be already cached */ if (useCache && this.templateCache != null) { final TemplateModel cached = this.templateCache.get(cacheKey); if (cached != null) { /* * Just at the end, and importantly AFTER CACHING, check if we need to apply any pre-processors * to this model before returning and letting the engine insert the model in any way it needs. */ return applyPreProcessorsIfNeeded(context, cached); } } /* * Resolve the template */ final TemplateResolution templateResolution = resolveTemplate(this.configuration, ownerTemplate, template, templateResolutionAttributes, failIfNotExists); /* * Once the template has been resolved (or tried to), and depending on the value of our 'failIfNotExists' * flag, we will check two conditions in which we will be returning null: * * 1. No template resolver has been able to resolve the template (this can happen if resolvers are * configured with the 'checkExistence' flag to true). * 2. If the template was resolved, its existence should be checked in order to avoid exceptions during * the reading phase. * * NOTE we will not cache this "null" result because the fact that a template is cacheable or not is * determined by template resolvers. And in this case there is no template resolver being applied * (actually, we are here because no resolver had success). */ if (!failIfNotExists) { if (templateResolution == null) { // No resolver could resolve this return null; } if (!templateResolution.isTemplateResourceExistenceVerified()) { final ITemplateResource resource = templateResolution.getTemplateResource(); if (resource == null || !resource.exists()) { // Calling resource.exists() each time is not great, but think this only happens if the resource // has not been cached (e.g. when it does not exist) return null; } } } /* * Build the TemplateData object */ final TemplateData templateData = buildTemplateData(templateResolution, template, cleanTemplateSelectors, templateMode, useCache); /* * Create the Template Handler that will be in charge of building the TemplateModel */ final ModelBuilderTemplateHandler builderHandler = new ModelBuilderTemplateHandler(this.configuration, templateData); /* * PROCESS THE TEMPLATE */ final ITemplateParser parser = getParserForTemplateMode(templateData.getTemplateMode()); parser.parseStandalone( this.configuration, ownerTemplate, template, cleanTemplateSelectors, templateData.getTemplateResource(), templateData.getTemplateMode(), templateResolution.getUseDecoupledLogic(), builderHandler); final TemplateModel templateModel = builderHandler.getModel(); /* * Cache the template if it is cacheable */ if (useCache && this.templateCache != null) { if (templateResolution.getValidity().isCacheable()) { this.templateCache.put(cacheKey, templateModel); } } /* * Last step: just at the end, and importantly AFTER CACHING, check if we need to apply any pre-processors * to this model before returning and letting the engine insert the model in any way it needs. */ return applyPreProcessorsIfNeeded(context, templateModel); } }
public class class_name { public TemplateModel parseStandalone( final ITemplateContext context, final String template, final Set<String> templateSelectors, final TemplateMode templateMode, final boolean useCache, final boolean failIfNotExists) { Validate.notNull(context, "Context cannot be null"); Validate.notNull(template, "Template cannot be null"); // templateSelectors CAN be null if we are going to render the entire template // templateMode CAN be null if we are going to use the mode specified by the template resolver // templateResolutionAttributes CAN be null final String ownerTemplate = context.getTemplateData().getTemplate(); final Map<String,Object> templateResolutionAttributes = context.getTemplateResolutionAttributes(); final Set<String> cleanTemplateSelectors; if (templateSelectors != null && !templateSelectors.isEmpty()) { Validate.containsNoEmpties( templateSelectors, "If specified, the Template Selector set cannot contain any nulls or empties"); // depends on control dependency: [if], data = [none] if (templateSelectors.size() == 1) { cleanTemplateSelectors = Collections.singleton(templateSelectors.iterator().next()); // depends on control dependency: [if], data = [none] } else { // We will be using a TreeSet because we want the selectors to be ORDERED, so that comparison at the // equals(...) method works alright cleanTemplateSelectors = Collections.unmodifiableSet(new TreeSet<String>(templateSelectors)); // depends on control dependency: [if], data = [none] } } else { cleanTemplateSelectors = null; // depends on control dependency: [if], data = [none] } final TemplateCacheKey cacheKey = useCache? new TemplateCacheKey( ownerTemplate, template, cleanTemplateSelectors, 0, 0, templateMode, templateResolutionAttributes) : null; /* * First look at the cache - it might be already cached */ if (useCache && this.templateCache != null) { final TemplateModel cached = this.templateCache.get(cacheKey); if (cached != null) { /* * Just at the end, and importantly AFTER CACHING, check if we need to apply any pre-processors * to this model before returning and letting the engine insert the model in any way it needs. */ return applyPreProcessorsIfNeeded(context, cached); // depends on control dependency: [if], data = [none] } } /* * Resolve the template */ final TemplateResolution templateResolution = resolveTemplate(this.configuration, ownerTemplate, template, templateResolutionAttributes, failIfNotExists); /* * Once the template has been resolved (or tried to), and depending on the value of our 'failIfNotExists' * flag, we will check two conditions in which we will be returning null: * * 1. No template resolver has been able to resolve the template (this can happen if resolvers are * configured with the 'checkExistence' flag to true). * 2. If the template was resolved, its existence should be checked in order to avoid exceptions during * the reading phase. * * NOTE we will not cache this "null" result because the fact that a template is cacheable or not is * determined by template resolvers. And in this case there is no template resolver being applied * (actually, we are here because no resolver had success). */ if (!failIfNotExists) { if (templateResolution == null) { // No resolver could resolve this return null; // depends on control dependency: [if], data = [none] } if (!templateResolution.isTemplateResourceExistenceVerified()) { final ITemplateResource resource = templateResolution.getTemplateResource(); if (resource == null || !resource.exists()) { // Calling resource.exists() each time is not great, but think this only happens if the resource // has not been cached (e.g. when it does not exist) return null; // depends on control dependency: [if], data = [none] } } } /* * Build the TemplateData object */ final TemplateData templateData = buildTemplateData(templateResolution, template, cleanTemplateSelectors, templateMode, useCache); /* * Create the Template Handler that will be in charge of building the TemplateModel */ final ModelBuilderTemplateHandler builderHandler = new ModelBuilderTemplateHandler(this.configuration, templateData); /* * PROCESS THE TEMPLATE */ final ITemplateParser parser = getParserForTemplateMode(templateData.getTemplateMode()); parser.parseStandalone( this.configuration, ownerTemplate, template, cleanTemplateSelectors, templateData.getTemplateResource(), templateData.getTemplateMode(), templateResolution.getUseDecoupledLogic(), builderHandler); final TemplateModel templateModel = builderHandler.getModel(); /* * Cache the template if it is cacheable */ if (useCache && this.templateCache != null) { if (templateResolution.getValidity().isCacheable()) { this.templateCache.put(cacheKey, templateModel); // depends on control dependency: [if], data = [none] } } /* * Last step: just at the end, and importantly AFTER CACHING, check if we need to apply any pre-processors * to this model before returning and letting the engine insert the model in any way it needs. */ return applyPreProcessorsIfNeeded(context, templateModel); } }
public class class_name { public EntryBuffer clear() { for (int i = 0; i < buffer.length; i++) { buffer[i] = null; } return this; } }
public class class_name { public EntryBuffer clear() { for (int i = 0; i < buffer.length; i++) { buffer[i] = null; // depends on control dependency: [for], data = [i] } return this; } }
public class class_name { private void applyChangeToModelPages(CmsSitemapChangeEvent changeEvent) { CmsSitemapChange change = changeEvent.getChange(); switch (change.getChangeType()) { case delete: CmsModelPageTreeItem deleteItem = m_modelPageTreeItems.get(change.getEntryId()); if (deleteItem != null) { deleteItem.removeFromParent(); } break; case undelete: case create: String typeName = m_controller.getGalleryType( new Integer(change.getNewResourceTypeId())).getResourceType(); if (typeName != null) { CmsModelPageEntry modelPage = new CmsModelPageEntry(); modelPage.setSitePath(change.getSitePath()); modelPage.setResourceType(typeName); modelPage.setStructureId(change.getEntryId()); modelPage.setOwnProperties(change.getOwnProperties()); CmsModelPageTreeItem folderItem = new CmsModelPageTreeItem(modelPage, false, false); CmsSitemapHoverbar.installOn(m_controller, folderItem, modelPage.getStructureId()); m_modelPageRoot.addChild(folderItem); m_modelPageTreeItems.put(modelPage.getStructureId(), folderItem); } break; case modify: CmsModelPageTreeItem changeItem = m_modelPageTreeItems.get(change.getEntryId()); if (changeItem != null) { CmsListItemWidget widget = changeItem.getListItemWidget(); for (CmsPropertyModification mod : change.getPropertyChanges()) { if (mod.getName().equals(CmsClientProperty.PROPERTY_TITLE)) { widget.setTitleLabel(mod.getValue()); } } String oldPath = changeItem.getSitePath(); if ((change.getName() != null) && !oldPath.endsWith("/" + change.getName())) { String newPath = CmsResource.getParentFolder(oldPath) + change.getName() + "/"; changeItem.updateSitePath(newPath); } } break; case bumpDetailPage: case clipboardOnly: case remove: default: // nothing to do } } }
public class class_name { private void applyChangeToModelPages(CmsSitemapChangeEvent changeEvent) { CmsSitemapChange change = changeEvent.getChange(); switch (change.getChangeType()) { case delete: CmsModelPageTreeItem deleteItem = m_modelPageTreeItems.get(change.getEntryId()); if (deleteItem != null) { deleteItem.removeFromParent(); // depends on control dependency: [if], data = [none] } break; case undelete: case create: String typeName = m_controller.getGalleryType( new Integer(change.getNewResourceTypeId())).getResourceType(); if (typeName != null) { CmsModelPageEntry modelPage = new CmsModelPageEntry(); modelPage.setSitePath(change.getSitePath()); // depends on control dependency: [if], data = [none] modelPage.setResourceType(typeName); // depends on control dependency: [if], data = [(typeName] modelPage.setStructureId(change.getEntryId()); // depends on control dependency: [if], data = [none] modelPage.setOwnProperties(change.getOwnProperties()); // depends on control dependency: [if], data = [none] CmsModelPageTreeItem folderItem = new CmsModelPageTreeItem(modelPage, false, false); CmsSitemapHoverbar.installOn(m_controller, folderItem, modelPage.getStructureId()); // depends on control dependency: [if], data = [none] m_modelPageRoot.addChild(folderItem); // depends on control dependency: [if], data = [none] m_modelPageTreeItems.put(modelPage.getStructureId(), folderItem); // depends on control dependency: [if], data = [none] } break; case modify: CmsModelPageTreeItem changeItem = m_modelPageTreeItems.get(change.getEntryId()); if (changeItem != null) { CmsListItemWidget widget = changeItem.getListItemWidget(); for (CmsPropertyModification mod : change.getPropertyChanges()) { if (mod.getName().equals(CmsClientProperty.PROPERTY_TITLE)) { widget.setTitleLabel(mod.getValue()); // depends on control dependency: [if], data = [none] } } String oldPath = changeItem.getSitePath(); if ((change.getName() != null) && !oldPath.endsWith("/" + change.getName())) { String newPath = CmsResource.getParentFolder(oldPath) + change.getName() + "/"; changeItem.updateSitePath(newPath); // depends on control dependency: [if], data = [none] } } break; case bumpDetailPage: case clipboardOnly: case remove: default: // nothing to do } } }
public class class_name { private boolean pathMatches(Path path, SquigglyContext context) { List<SquigglyNode> nodes = context.getNodes(); Set<String> viewStack = null; SquigglyNode viewNode = null; int pathSize = path.getElements().size(); int lastIdx = pathSize - 1; for (int i = 0; i < pathSize; i++) { PathElement element = path.getElements().get(i); if (viewNode != null && !viewNode.isSquiggly()) { Class beanClass = element.getBeanClass(); if (beanClass != null && !Map.class.isAssignableFrom(beanClass)) { Set<String> propertyNames = getPropertyNamesFromViewStack(element, viewStack); if (!propertyNames.contains(element.getName())) { return false; } } } else if (nodes.isEmpty()) { return false; } else { SquigglyNode match = findBestSimpleNode(element, nodes); if (match == null) { match = findBestViewNode(element, nodes); if (match != null) { viewNode = match; viewStack = addToViewStack(viewStack, viewNode); } } else if (match.isAnyShallow()) { viewNode = match; } else if (match.isAnyDeep()) { return true; } if (match == null) { if (isJsonUnwrapped(element)) { continue; } return false; } if (match.isNegated()) { return false; } nodes = match.getChildren(); if (i < lastIdx && nodes.isEmpty() && !match.isEmptyNested() && SquigglyConfig.isFilterImplicitlyIncludeBaseFields()) { nodes = BASE_VIEW_NODES; } } } return true; } }
public class class_name { private boolean pathMatches(Path path, SquigglyContext context) { List<SquigglyNode> nodes = context.getNodes(); Set<String> viewStack = null; SquigglyNode viewNode = null; int pathSize = path.getElements().size(); int lastIdx = pathSize - 1; for (int i = 0; i < pathSize; i++) { PathElement element = path.getElements().get(i); if (viewNode != null && !viewNode.isSquiggly()) { Class beanClass = element.getBeanClass(); if (beanClass != null && !Map.class.isAssignableFrom(beanClass)) { Set<String> propertyNames = getPropertyNamesFromViewStack(element, viewStack); if (!propertyNames.contains(element.getName())) { return false; // depends on control dependency: [if], data = [none] } } } else if (nodes.isEmpty()) { return false; // depends on control dependency: [if], data = [none] } else { SquigglyNode match = findBestSimpleNode(element, nodes); if (match == null) { match = findBestViewNode(element, nodes); // depends on control dependency: [if], data = [none] if (match != null) { viewNode = match; // depends on control dependency: [if], data = [none] viewStack = addToViewStack(viewStack, viewNode); // depends on control dependency: [if], data = [none] } } else if (match.isAnyShallow()) { viewNode = match; // depends on control dependency: [if], data = [none] } else if (match.isAnyDeep()) { return true; // depends on control dependency: [if], data = [none] } if (match == null) { if (isJsonUnwrapped(element)) { continue; } return false; // depends on control dependency: [if], data = [none] } if (match.isNegated()) { return false; // depends on control dependency: [if], data = [none] } nodes = match.getChildren(); // depends on control dependency: [if], data = [none] if (i < lastIdx && nodes.isEmpty() && !match.isEmptyNested() && SquigglyConfig.isFilterImplicitlyIncludeBaseFields()) { nodes = BASE_VIEW_NODES; // depends on control dependency: [if], data = [none] } } } return true; } }
public class class_name { protected void clearReference (DObject reffer, String field, int reffedOid) { // look up the reference vector for the referenced object Reference[] refs = _refs.get(reffedOid); Reference ref = null; if (refs != null) { for (int ii = 0; ii < refs.length; ii++) { if (refs[ii].equals(reffer.getOid(), field)) { ref = refs[ii]; refs[ii] = null; break; } } } // if a referred object and referring object are both destroyed without allowing the // referred object destruction to process the ObjectRemoved event which is auto-generated, // the subsequent destruction of the referring object will attempt to clear the reference // to the referred object which no longer exists; so we don't complain about non- existent // references if the referree is already destroyed if (ref == null && _objects.containsKey(reffedOid)) { log.warning("Requested to clear out non-existent reference", "refferOid", reffer.getOid(), "field", field, "reffedOid", reffedOid); // } else { // log.info("Cleared out reference " + ref + "."); } } }
public class class_name { protected void clearReference (DObject reffer, String field, int reffedOid) { // look up the reference vector for the referenced object Reference[] refs = _refs.get(reffedOid); Reference ref = null; if (refs != null) { for (int ii = 0; ii < refs.length; ii++) { if (refs[ii].equals(reffer.getOid(), field)) { ref = refs[ii]; // depends on control dependency: [if], data = [none] refs[ii] = null; // depends on control dependency: [if], data = [none] break; } } } // if a referred object and referring object are both destroyed without allowing the // referred object destruction to process the ObjectRemoved event which is auto-generated, // the subsequent destruction of the referring object will attempt to clear the reference // to the referred object which no longer exists; so we don't complain about non- existent // references if the referree is already destroyed if (ref == null && _objects.containsKey(reffedOid)) { log.warning("Requested to clear out non-existent reference", "refferOid", reffer.getOid(), "field", field, "reffedOid", reffedOid); // depends on control dependency: [if], data = [none] // } else { // log.info("Cleared out reference " + ref + "."); } } }
public class class_name { private MatchedPair matchSessionForNode( Session session, LocalityLevel level) { if (level == LocalityLevel.NODE || level == LocalityLevel.ANY) { Set<Map.Entry<String, NodeContainer>> hostNodesSet = nodeSnapshot.runnableHosts(); for (Map.Entry<String, NodeContainer> hostNodes : hostNodesSet) { Iterator<ClusterNode> clusterNodeIt = hostNodes.getValue().iterator(); while (clusterNodeIt.hasNext()) { ClusterNode node = clusterNodeIt.next(); if (!nodeManager.hasEnoughResource(node)) { continue; } ResourceRequestInfo req = null; if (level == LocalityLevel.NODE) { req = session.getPendingRequestOnHost(node.getHost(), type); } else { req = session.getPendingRequestForAny(node.getHost(), type); } if (req != null) { return new MatchedPair(node, req); } } } } else if (level == LocalityLevel.RACK) { Set<Map.Entry<Node, NodeContainer>> rackNodesSet = nodeSnapshot.runnableRacks(); for (Map.Entry<Node, NodeContainer> rackNodes: rackNodesSet) { Node rack = rackNodes.getKey(); NodeContainer nodes = rackNodes.getValue(); Iterator<ClusterNode> clusterNodeIt = nodes.iterator(); while (clusterNodeIt.hasNext()) { ClusterNode node = clusterNodeIt.next(); if (!nodeManager.hasEnoughResource(node)) { continue; } ResourceRequestInfo req = session.getPendingRequestOnRack( node.getHost(), rack, type); if (req != null) { return new MatchedPair(node, req); } } } } return null; } }
public class class_name { private MatchedPair matchSessionForNode( Session session, LocalityLevel level) { if (level == LocalityLevel.NODE || level == LocalityLevel.ANY) { Set<Map.Entry<String, NodeContainer>> hostNodesSet = nodeSnapshot.runnableHosts(); for (Map.Entry<String, NodeContainer> hostNodes : hostNodesSet) { Iterator<ClusterNode> clusterNodeIt = hostNodes.getValue().iterator(); while (clusterNodeIt.hasNext()) { ClusterNode node = clusterNodeIt.next(); if (!nodeManager.hasEnoughResource(node)) { continue; } ResourceRequestInfo req = null; if (level == LocalityLevel.NODE) { req = session.getPendingRequestOnHost(node.getHost(), type); // depends on control dependency: [if], data = [none] } else { req = session.getPendingRequestForAny(node.getHost(), type); // depends on control dependency: [if], data = [none] } if (req != null) { return new MatchedPair(node, req); // depends on control dependency: [if], data = [none] } } } } else if (level == LocalityLevel.RACK) { Set<Map.Entry<Node, NodeContainer>> rackNodesSet = nodeSnapshot.runnableRacks(); for (Map.Entry<Node, NodeContainer> rackNodes: rackNodesSet) { Node rack = rackNodes.getKey(); NodeContainer nodes = rackNodes.getValue(); Iterator<ClusterNode> clusterNodeIt = nodes.iterator(); while (clusterNodeIt.hasNext()) { ClusterNode node = clusterNodeIt.next(); if (!nodeManager.hasEnoughResource(node)) { continue; } ResourceRequestInfo req = session.getPendingRequestOnRack( node.getHost(), rack, type); if (req != null) { return new MatchedPair(node, req); // depends on control dependency: [if], data = [none] } } } } return null; } }
public class class_name { private void setVisibilityOnControlledObject(Object controlledObject, boolean authorized) { try { Method method = controlledObject.getClass().getMethod( "setVisible", new Class[] { boolean.class } ); method.invoke( controlledObject, new Object[] { new Boolean( authorized ) } ); } catch( NoSuchMethodException ignored ) { System.out.println( "NO setVisible method on object: " + controlledObject ); // No method to call, so nothing to do } catch( IllegalAccessException ignored ) { logger.error( "Could not call setVisible", ignored ); } catch( InvocationTargetException ignored ) { logger.error( "Could not call setVisible", ignored ); } } }
public class class_name { private void setVisibilityOnControlledObject(Object controlledObject, boolean authorized) { try { Method method = controlledObject.getClass().getMethod( "setVisible", new Class[] { boolean.class } ); method.invoke( controlledObject, new Object[] { new Boolean( authorized ) } ); // depends on control dependency: [try], data = [none] } catch( NoSuchMethodException ignored ) { System.out.println( "NO setVisible method on object: " + controlledObject ); // No method to call, so nothing to do } catch( IllegalAccessException ignored ) { // depends on control dependency: [catch], data = [none] logger.error( "Could not call setVisible", ignored ); } catch( InvocationTargetException ignored ) { // depends on control dependency: [catch], data = [none] logger.error( "Could not call setVisible", ignored ); } // depends on control dependency: [catch], data = [none] } }
public class class_name { private void validateFilters() { Set<String> ids = new HashSet<String>(); for (Filter filter : filters) { ids.add(filter.getId()); } for(Metric metric : metrics) { if (metric.getFilter() != null && !metric.getFilter().isEmpty() && !ids.contains(metric.getFilter())) { throw new IllegalArgumentException( String.format("unrecognized filter id %s in metric %s", metric.getFilter(), metric.getId())); } } } }
public class class_name { private void validateFilters() { Set<String> ids = new HashSet<String>(); for (Filter filter : filters) { ids.add(filter.getId()); // depends on control dependency: [for], data = [filter] } for(Metric metric : metrics) { if (metric.getFilter() != null && !metric.getFilter().isEmpty() && !ids.contains(metric.getFilter())) { throw new IllegalArgumentException( String.format("unrecognized filter id %s in metric %s", metric.getFilter(), metric.getId())); } } } }
public class class_name { public Result run(Database database, Relation<O> relation) { // Get a distance and kNN query instance. DistanceQuery<O> distQuery = database.getDistanceQuery(relation, getDistanceFunction()); // Approximate query: KNNQuery<O> knnQuery = database.getKNNQuery(distQuery, k, DatabaseQuery.HINT_OPTIMIZED_ONLY); if(knnQuery == null || knnQuery instanceof LinearScanQuery) { throw new AbortException("Expected an accelerated query, but got a linear scan -- index is not used."); } // Exact query: KNNQuery<O> truekNNQuery; if(forcelinear) { truekNNQuery = QueryUtil.getLinearScanKNNQuery(distQuery); } else { truekNNQuery = database.getKNNQuery(distQuery, k, DatabaseQuery.HINT_EXACT); } if(knnQuery.getClass().equals(truekNNQuery.getClass())) { LOG.warning("Query classes are the same. This experiment may be invalid!"); } // No query set - use original database. if(queries == null || pattern != null) { // Relation to filter on Relation<String> lrel = (pattern != null) ? DatabaseUtil.guessLabelRepresentation(database) : null; final DBIDs sample = DBIDUtil.randomSample(relation.getDBIDs(), sampling, random); FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null; MeanVariance mv = new MeanVariance(), mvrec = new MeanVariance(); MeanVariance mvdist = new MeanVariance(), mvdaerr = new MeanVariance(), mvdrerr = new MeanVariance(); int misses = 0; for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) { if(pattern == null || pattern.matcher(lrel.get(iditer)).find()) { // Query index: KNNList knns = knnQuery.getKNNForDBID(iditer, k); // Query reference: KNNList trueknns = truekNNQuery.getKNNForDBID(iditer, k); // Put adjusted knn size: mv.put(knns.size() * k / (double) trueknns.size()); // Put recall: mvrec.put(DBIDUtil.intersectionSize(knns, trueknns) / (double) trueknns.size()); if(knns.size() >= k) { double kdist = knns.getKNNDistance(); final double tdist = trueknns.getKNNDistance(); if(tdist > 0.0) { mvdist.put(kdist); mvdaerr.put(kdist - tdist); mvdrerr.put(kdist / tdist); } } else { // Less than k objects. misses++; } } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); if(LOG.isStatistics()) { LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev()); LOG.statistics("Recall of true results: " + mvrec.getMean() + " +- " + mvrec.getNaiveStddev()); if(mvdist.getCount() > 0) { LOG.statistics("Mean k-distance: " + mvdist.getMean() + " +- " + mvdist.getNaiveStddev()); LOG.statistics("Mean absolute k-error: " + mvdaerr.getMean() + " +- " + mvdaerr.getNaiveStddev()); LOG.statistics("Mean relative k-error: " + mvdrerr.getMean() + " +- " + mvdrerr.getNaiveStddev()); } if(misses > 0) { LOG.statistics(String.format("Number of queries that returned less than k=%d objects: %d (%.2f%%)", k, misses, misses * 100. / mv.getCount())); } } } else { // Separate query set. TypeInformation res = getDistanceFunction().getInputTypeRestriction(); MultipleObjectsBundle bundle = queries.loadData(); int col = -1; for(int i = 0; i < bundle.metaLength(); i++) { if(res.isAssignableFromType(bundle.meta(i))) { col = i; break; } } if(col < 0) { throw new AbortException("No compatible data type in query input was found. Expected: " + res.toString()); } // Random sampling is a bit of hack, sorry. // But currently, we don't (yet) have an "integer random sample" function. DBIDRange sids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength()); final DBIDs sample = DBIDUtil.randomSample(sids, sampling, random); FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null; MeanVariance mv = new MeanVariance(), mvrec = new MeanVariance(); MeanVariance mvdist = new MeanVariance(), mvdaerr = new MeanVariance(), mvdrerr = new MeanVariance(); int misses = 0; for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) { int off = sids.binarySearch(iditer); assert (off >= 0); @SuppressWarnings("unchecked") O o = (O) bundle.data(off, col); // Query index: KNNList knns = knnQuery.getKNNForObject(o, k); // Query reference: KNNList trueknns = truekNNQuery.getKNNForObject(o, k); // Put adjusted knn size: mv.put(knns.size() * k / (double) trueknns.size()); // Put recall: mvrec.put(DBIDUtil.intersectionSize(knns, trueknns) / (double) trueknns.size()); if(knns.size() >= k) { double kdist = knns.getKNNDistance(); final double tdist = trueknns.getKNNDistance(); if(tdist > 0.0) { mvdist.put(kdist); mvdaerr.put(kdist - tdist); mvdrerr.put(kdist / tdist); } } else { // Less than k objects. misses++; } LOG.incrementProcessed(prog); } LOG.ensureCompleted(prog); if(LOG.isStatistics()) { LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev()); LOG.statistics("Recall of true results: " + mvrec.getMean() + " +- " + mvrec.getNaiveStddev()); if(mvdist.getCount() > 0) { LOG.statistics("Mean absolute k-error: " + mvdaerr.getMean() + " +- " + mvdaerr.getNaiveStddev()); LOG.statistics("Mean relative k-error: " + mvdrerr.getMean() + " +- " + mvdrerr.getNaiveStddev()); } if(misses > 0) { LOG.statistics(String.format("Number of queries that returned less than k=%d objects: %d (%.2f%%)", k, misses, misses * 100. / mv.getCount())); } } } return null; } }
public class class_name { public Result run(Database database, Relation<O> relation) { // Get a distance and kNN query instance. DistanceQuery<O> distQuery = database.getDistanceQuery(relation, getDistanceFunction()); // Approximate query: KNNQuery<O> knnQuery = database.getKNNQuery(distQuery, k, DatabaseQuery.HINT_OPTIMIZED_ONLY); if(knnQuery == null || knnQuery instanceof LinearScanQuery) { throw new AbortException("Expected an accelerated query, but got a linear scan -- index is not used."); } // Exact query: KNNQuery<O> truekNNQuery; if(forcelinear) { truekNNQuery = QueryUtil.getLinearScanKNNQuery(distQuery); // depends on control dependency: [if], data = [none] } else { truekNNQuery = database.getKNNQuery(distQuery, k, DatabaseQuery.HINT_EXACT); // depends on control dependency: [if], data = [none] } if(knnQuery.getClass().equals(truekNNQuery.getClass())) { LOG.warning("Query classes are the same. This experiment may be invalid!"); // depends on control dependency: [if], data = [none] } // No query set - use original database. if(queries == null || pattern != null) { // Relation to filter on Relation<String> lrel = (pattern != null) ? DatabaseUtil.guessLabelRepresentation(database) : null; final DBIDs sample = DBIDUtil.randomSample(relation.getDBIDs(), sampling, random); FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null; MeanVariance mv = new MeanVariance(), mvrec = new MeanVariance(); MeanVariance mvdist = new MeanVariance(), mvdaerr = new MeanVariance(), mvdrerr = new MeanVariance(); int misses = 0; for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) { if(pattern == null || pattern.matcher(lrel.get(iditer)).find()) { // Query index: KNNList knns = knnQuery.getKNNForDBID(iditer, k); // Query reference: KNNList trueknns = truekNNQuery.getKNNForDBID(iditer, k); // Put adjusted knn size: mv.put(knns.size() * k / (double) trueknns.size()); // depends on control dependency: [if], data = [none] // Put recall: mvrec.put(DBIDUtil.intersectionSize(knns, trueknns) / (double) trueknns.size()); // depends on control dependency: [if], data = [none] if(knns.size() >= k) { double kdist = knns.getKNNDistance(); final double tdist = trueknns.getKNNDistance(); if(tdist > 0.0) { mvdist.put(kdist); // depends on control dependency: [if], data = [none] mvdaerr.put(kdist - tdist); // depends on control dependency: [if], data = [none] mvdrerr.put(kdist / tdist); // depends on control dependency: [if], data = [none] } } else { // Less than k objects. misses++; // depends on control dependency: [if], data = [none] } } LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] } LOG.ensureCompleted(prog); // depends on control dependency: [if], data = [none] if(LOG.isStatistics()) { LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev()); // depends on control dependency: [if], data = [none] LOG.statistics("Recall of true results: " + mvrec.getMean() + " +- " + mvrec.getNaiveStddev()); // depends on control dependency: [if], data = [none] if(mvdist.getCount() > 0) { LOG.statistics("Mean k-distance: " + mvdist.getMean() + " +- " + mvdist.getNaiveStddev()); // depends on control dependency: [if], data = [none] LOG.statistics("Mean absolute k-error: " + mvdaerr.getMean() + " +- " + mvdaerr.getNaiveStddev()); // depends on control dependency: [if], data = [none] LOG.statistics("Mean relative k-error: " + mvdrerr.getMean() + " +- " + mvdrerr.getNaiveStddev()); // depends on control dependency: [if], data = [none] } if(misses > 0) { LOG.statistics(String.format("Number of queries that returned less than k=%d objects: %d (%.2f%%)", k, misses, misses * 100. / mv.getCount())); // depends on control dependency: [if], data = [none] } } } else { // Separate query set. TypeInformation res = getDistanceFunction().getInputTypeRestriction(); MultipleObjectsBundle bundle = queries.loadData(); int col = -1; for(int i = 0; i < bundle.metaLength(); i++) { if(res.isAssignableFromType(bundle.meta(i))) { col = i; // depends on control dependency: [if], data = [none] break; } } if(col < 0) { throw new AbortException("No compatible data type in query input was found. Expected: " + res.toString()); } // Random sampling is a bit of hack, sorry. // But currently, we don't (yet) have an "integer random sample" function. DBIDRange sids = DBIDUtil.generateStaticDBIDRange(bundle.dataLength()); final DBIDs sample = DBIDUtil.randomSample(sids, sampling, random); FiniteProgress prog = LOG.isVeryVerbose() ? new FiniteProgress("kNN queries", sample.size(), LOG) : null; MeanVariance mv = new MeanVariance(), mvrec = new MeanVariance(); MeanVariance mvdist = new MeanVariance(), mvdaerr = new MeanVariance(), mvdrerr = new MeanVariance(); int misses = 0; for(DBIDIter iditer = sample.iter(); iditer.valid(); iditer.advance()) { int off = sids.binarySearch(iditer); assert (off >= 0); // depends on control dependency: [for], data = [none] @SuppressWarnings("unchecked") O o = (O) bundle.data(off, col); // Query index: KNNList knns = knnQuery.getKNNForObject(o, k); // Query reference: KNNList trueknns = truekNNQuery.getKNNForObject(o, k); // Put adjusted knn size: mv.put(knns.size() * k / (double) trueknns.size()); // depends on control dependency: [for], data = [none] // Put recall: mvrec.put(DBIDUtil.intersectionSize(knns, trueknns) / (double) trueknns.size()); // depends on control dependency: [for], data = [none] if(knns.size() >= k) { double kdist = knns.getKNNDistance(); final double tdist = trueknns.getKNNDistance(); if(tdist > 0.0) { mvdist.put(kdist); // depends on control dependency: [if], data = [none] mvdaerr.put(kdist - tdist); // depends on control dependency: [if], data = [none] mvdrerr.put(kdist / tdist); // depends on control dependency: [if], data = [none] } } else { // Less than k objects. misses++; // depends on control dependency: [if], data = [none] } LOG.incrementProcessed(prog); // depends on control dependency: [for], data = [none] } LOG.ensureCompleted(prog); // depends on control dependency: [if], data = [none] if(LOG.isStatistics()) { LOG.statistics("Mean number of results: " + mv.getMean() + " +- " + mv.getNaiveStddev()); // depends on control dependency: [if], data = [none] LOG.statistics("Recall of true results: " + mvrec.getMean() + " +- " + mvrec.getNaiveStddev()); // depends on control dependency: [if], data = [none] if(mvdist.getCount() > 0) { LOG.statistics("Mean absolute k-error: " + mvdaerr.getMean() + " +- " + mvdaerr.getNaiveStddev()); // depends on control dependency: [if], data = [none] LOG.statistics("Mean relative k-error: " + mvdrerr.getMean() + " +- " + mvdrerr.getNaiveStddev()); // depends on control dependency: [if], data = [none] } if(misses > 0) { LOG.statistics(String.format("Number of queries that returned less than k=%d objects: %d (%.2f%%)", k, misses, misses * 100. / mv.getCount())); // depends on control dependency: [if], data = [none] } } } return null; } }
public class class_name { public EClass getIfcMechanicalSteelMaterialProperties() { if (ifcMechanicalSteelMaterialPropertiesEClass == null) { ifcMechanicalSteelMaterialPropertiesEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(319); } return ifcMechanicalSteelMaterialPropertiesEClass; } }
public class class_name { public EClass getIfcMechanicalSteelMaterialProperties() { if (ifcMechanicalSteelMaterialPropertiesEClass == null) { ifcMechanicalSteelMaterialPropertiesEClass = (EClass) EPackage.Registry.INSTANCE .getEPackage(Ifc2x3tc1Package.eNS_URI).getEClassifiers().get(319); // depends on control dependency: [if], data = [none] } return ifcMechanicalSteelMaterialPropertiesEClass; } }
public class class_name { private List<String> getResultRows(Result<?> result, boolean logResult) { List<String> rows = new ArrayList<>(); if (logResult) { for (Map<String, Object> columns : result.getRows()) { StringBuilder row = new StringBuilder(); for (Map.Entry<String, Object> entry : columns.entrySet()) { if (row.length() > 0) { row.append(", "); } row.append(entry.getKey()); row.append('='); String stringValue = getLabel(entry.getValue()); row.append(stringValue); } rows.add(" " + row.toString()); } } return rows; } }
public class class_name { private List<String> getResultRows(Result<?> result, boolean logResult) { List<String> rows = new ArrayList<>(); if (logResult) { for (Map<String, Object> columns : result.getRows()) { StringBuilder row = new StringBuilder(); for (Map.Entry<String, Object> entry : columns.entrySet()) { if (row.length() > 0) { row.append(", "); // depends on control dependency: [if], data = [none] } row.append(entry.getKey()); // depends on control dependency: [for], data = [entry] row.append('='); // depends on control dependency: [for], data = [none] String stringValue = getLabel(entry.getValue()); row.append(stringValue); // depends on control dependency: [for], data = [none] } rows.add(" " + row.toString()); // depends on control dependency: [for], data = [none] } } return rows; } }
public class class_name { public <E extends T> List<E> leastOf(Iterator<E> elements, int k) { checkNotNull(elements); checkNonnegative(k, "k"); if (k == 0 || !elements.hasNext()) { return ImmutableList.of(); } else if (k >= Integer.MAX_VALUE / 2) { // k is really large; just do a straightforward sorted-copy-and-sublist ArrayList<E> list = Lists.newArrayList(elements); Collections.sort(list, this); if (list.size() > k) { list.subList(k, list.size()).clear(); } list.trimToSize(); return Collections.unmodifiableList(list); } /* * Our goal is an O(n) algorithm using only one pass and O(k) additional * memory. * * We use the following algorithm: maintain a buffer of size 2*k. Every time * the buffer gets full, find the median and partition around it, keeping * only the lowest k elements. This requires n/k find-median-and-partition * steps, each of which take O(k) time with a traditional quickselect. * * After sorting the output, the whole algorithm is O(n + k log k). It * degrades gracefully for worst-case input (descending order), performs * competitively or wins outright for randomly ordered input, and doesn't * require the whole collection to fit into memory. */ int bufferCap = k * 2; @SuppressWarnings("unchecked") // we'll only put E's in E[] buffer = (E[]) new Object[bufferCap]; E threshold = elements.next(); buffer[0] = threshold; int bufferSize = 1; // threshold is the kth smallest element seen so far. Once bufferSize >= k, // anything larger than threshold can be ignored immediately. while (bufferSize < k && elements.hasNext()) { E e = elements.next(); buffer[bufferSize++] = e; threshold = max(threshold, e); } while (elements.hasNext()) { E e = elements.next(); if (compare(e, threshold) >= 0) { continue; } buffer[bufferSize++] = e; if (bufferSize == bufferCap) { // We apply the quickselect algorithm to partition about the median, // and then ignore the last k elements. int left = 0; int right = bufferCap - 1; int minThresholdPosition = 0; // The leftmost position at which the greatest of the k lower elements // -- the new value of threshold -- might be found. while (left < right) { int pivotIndex = (left + right + 1) >>> 1; int pivotNewIndex = partition(buffer, left, right, pivotIndex); if (pivotNewIndex > k) { right = pivotNewIndex - 1; } else if (pivotNewIndex < k) { left = Math.max(pivotNewIndex, left + 1); minThresholdPosition = pivotNewIndex; } else { break; } } bufferSize = k; threshold = buffer[minThresholdPosition]; for (int i = minThresholdPosition + 1; i < bufferSize; i++) { threshold = max(threshold, buffer[i]); } } } Arrays.sort(buffer, 0, bufferSize, this); bufferSize = Math.min(bufferSize, k); return Collections.unmodifiableList( Arrays.asList(ObjectArrays.arraysCopyOf(buffer, bufferSize))); // We can't use ImmutableList; we have to be null-friendly! } }
public class class_name { public <E extends T> List<E> leastOf(Iterator<E> elements, int k) { checkNotNull(elements); checkNonnegative(k, "k"); if (k == 0 || !elements.hasNext()) { return ImmutableList.of(); // depends on control dependency: [if], data = [none] } else if (k >= Integer.MAX_VALUE / 2) { // k is really large; just do a straightforward sorted-copy-and-sublist ArrayList<E> list = Lists.newArrayList(elements); Collections.sort(list, this); // depends on control dependency: [if], data = [none] if (list.size() > k) { list.subList(k, list.size()).clear(); // depends on control dependency: [if], data = [none] } list.trimToSize(); // depends on control dependency: [if], data = [none] return Collections.unmodifiableList(list); // depends on control dependency: [if], data = [none] } /* * Our goal is an O(n) algorithm using only one pass and O(k) additional * memory. * * We use the following algorithm: maintain a buffer of size 2*k. Every time * the buffer gets full, find the median and partition around it, keeping * only the lowest k elements. This requires n/k find-median-and-partition * steps, each of which take O(k) time with a traditional quickselect. * * After sorting the output, the whole algorithm is O(n + k log k). It * degrades gracefully for worst-case input (descending order), performs * competitively or wins outright for randomly ordered input, and doesn't * require the whole collection to fit into memory. */ int bufferCap = k * 2; @SuppressWarnings("unchecked") // we'll only put E's in E[] buffer = (E[]) new Object[bufferCap]; E threshold = elements.next(); buffer[0] = threshold; int bufferSize = 1; // threshold is the kth smallest element seen so far. Once bufferSize >= k, // anything larger than threshold can be ignored immediately. while (bufferSize < k && elements.hasNext()) { E e = elements.next(); buffer[bufferSize++] = e; // depends on control dependency: [while], data = [none] threshold = max(threshold, e); // depends on control dependency: [while], data = [none] } while (elements.hasNext()) { E e = elements.next(); if (compare(e, threshold) >= 0) { continue; } buffer[bufferSize++] = e; // depends on control dependency: [while], data = [none] if (bufferSize == bufferCap) { // We apply the quickselect algorithm to partition about the median, // and then ignore the last k elements. int left = 0; int right = bufferCap - 1; int minThresholdPosition = 0; // The leftmost position at which the greatest of the k lower elements // -- the new value of threshold -- might be found. while (left < right) { int pivotIndex = (left + right + 1) >>> 1; int pivotNewIndex = partition(buffer, left, right, pivotIndex); if (pivotNewIndex > k) { right = pivotNewIndex - 1; // depends on control dependency: [if], data = [none] } else if (pivotNewIndex < k) { left = Math.max(pivotNewIndex, left + 1); // depends on control dependency: [if], data = [(pivotNewIndex] minThresholdPosition = pivotNewIndex; // depends on control dependency: [if], data = [none] } else { break; } } bufferSize = k; // depends on control dependency: [if], data = [none] threshold = buffer[minThresholdPosition]; // depends on control dependency: [if], data = [none] for (int i = minThresholdPosition + 1; i < bufferSize; i++) { threshold = max(threshold, buffer[i]); // depends on control dependency: [for], data = [i] } } } Arrays.sort(buffer, 0, bufferSize, this); bufferSize = Math.min(bufferSize, k); return Collections.unmodifiableList( Arrays.asList(ObjectArrays.arraysCopyOf(buffer, bufferSize))); // We can't use ImmutableList; we have to be null-friendly! } }
public class class_name { public void finish() throws IOException { //Map<String,Set<TypeLibInfo>> byPackage = new HashMap<String,Set<TypeLibInfo>>(); //for( TypeLibInfo tli : generatedTypeLibs ) { // Set<TypeLibInfo> s = byPackage.get(tli.packageName); // if(s==null) // byPackage.put(tli.packageName,s=new HashSet<TypeLibInfo>()); // s.add(tli); //} // for( Map.Entry<String,Set<TypeLibInfo>> e : byPackage.entrySet() ) { for( Package pkg : packages.values() ) { LibBinder lib1 = pkg.typeLibs.iterator().next(); if(referenceResolver.suppress(lib1.lib)) continue; // generate ClassFactory IndentingWriter o = pkg.createWriter(lib1,"ClassFactory.java"); lib1.generateHeader(o); o.printJavadoc("Defines methods to create COM objects"); o.println("public abstract class ClassFactory {"); o.in(); o.println("private ClassFactory() {} // instanciation is not allowed"); o.println(); for( LibBinder lib : pkg.typeLibs ) { int len = lib.lib.count(); for( int i=0; i<len; i++ ) { ICoClassDecl t = lib.lib.getType(i).queryInterface(ICoClassDecl.class); if(t==null) continue; if(!t.isCreatable()) continue; declareFactoryMethod(o, t); t.dispose(); } } o.out(); o.println("}"); o.close(); } } }
public class class_name { public void finish() throws IOException { //Map<String,Set<TypeLibInfo>> byPackage = new HashMap<String,Set<TypeLibInfo>>(); //for( TypeLibInfo tli : generatedTypeLibs ) { // Set<TypeLibInfo> s = byPackage.get(tli.packageName); // if(s==null) // byPackage.put(tli.packageName,s=new HashSet<TypeLibInfo>()); // s.add(tli); //} // for( Map.Entry<String,Set<TypeLibInfo>> e : byPackage.entrySet() ) { for( Package pkg : packages.values() ) { LibBinder lib1 = pkg.typeLibs.iterator().next(); if(referenceResolver.suppress(lib1.lib)) continue; // generate ClassFactory IndentingWriter o = pkg.createWriter(lib1,"ClassFactory.java"); lib1.generateHeader(o); o.printJavadoc("Defines methods to create COM objects"); o.println("public abstract class ClassFactory {"); o.in(); o.println("private ClassFactory() {} // instanciation is not allowed"); o.println(); for( LibBinder lib : pkg.typeLibs ) { int len = lib.lib.count(); for( int i=0; i<len; i++ ) { ICoClassDecl t = lib.lib.getType(i).queryInterface(ICoClassDecl.class); if(t==null) continue; if(!t.isCreatable()) continue; declareFactoryMethod(o, t); // depends on control dependency: [for], data = [none] t.dispose(); // depends on control dependency: [for], data = [none] } } o.out(); o.println("}"); o.close(); } } }
public class class_name { public List<SpecTopic> getAllSpecTopics() { final ArrayList<SpecTopic> specTopics = new ArrayList<SpecTopic>(); for (final Entry<Integer, List<ITopicNode>> topicEntry : topics.entrySet()) { for (final ITopicNode topic : topicEntry.getValue()) { if (topic instanceof SpecTopic) { specTopics.add((SpecTopic) topic); } } } return specTopics; } }
public class class_name { public List<SpecTopic> getAllSpecTopics() { final ArrayList<SpecTopic> specTopics = new ArrayList<SpecTopic>(); for (final Entry<Integer, List<ITopicNode>> topicEntry : topics.entrySet()) { for (final ITopicNode topic : topicEntry.getValue()) { if (topic instanceof SpecTopic) { specTopics.add((SpecTopic) topic); // depends on control dependency: [if], data = [none] } } } return specTopics; } }
public class class_name { public static PJsonObject parseSpec(final String spec) { final JSONObject jsonSpec; try { jsonSpec = new JSONObject(spec); } catch (JSONException e) { throw new RuntimeException("Cannot parse the spec file: " + spec, e); } return new PJsonObject(jsonSpec, "spec"); } }
public class class_name { public static PJsonObject parseSpec(final String spec) { final JSONObject jsonSpec; try { jsonSpec = new JSONObject(spec); // depends on control dependency: [try], data = [none] } catch (JSONException e) { throw new RuntimeException("Cannot parse the spec file: " + spec, e); } // depends on control dependency: [catch], data = [none] return new PJsonObject(jsonSpec, "spec"); } }
public class class_name { public void queueNotification( final Collection<ClientInterfaceHandleManager> connections, final Supplier<DeferredSerialization> notification, final Predicate<ClientInterfaceHandleManager> wantsNotificationPredicate) { m_submissionQueue.offer(new Runnable() { @Override public void run() { for (ClientInterfaceHandleManager cihm : connections) { if (!wantsNotificationPredicate.apply(cihm)) continue; final Connection c = cihm.connection; /* * To avoid extra allocations and promotion we initially store a single event * as just the event. Once we have two or more events we create a linked list * and walk the list to dedupe events by identity */ Object pendingNotifications = m_clientsPendingNotification.get(c); try { if (pendingNotifications == null) { m_clientsPendingNotification.put(c, notification); } else if (pendingNotifications instanceof Supplier) { //Identity duplicate check if (pendingNotifications == notification) return; //Convert to a two node linked list @SuppressWarnings("unchecked") Node n1 = new Node((Supplier<DeferredSerialization>)pendingNotifications, null); n1 = m_cachedNodes.get(n1, n1); Node n2 = new Node(notification, n1); n2 = m_cachedNodes.get(n2, n2); m_clientsPendingNotification.put(c, n2); } else { //Walk the list and check if the notification is a duplicate Node head = (Node)pendingNotifications; boolean dup = false; while (head != null) { if (head.notification == notification) { dup = true; break; } head = head.next; } //If it's a dupe, no new work if (dup) continue; //Otherwise replace the head of the list which is the value in the map Node replacement = new Node(notification, (Node)pendingNotifications); replacement = m_cachedNodes.get(replacement, replacement); m_clientsPendingNotification.put(c, replacement); } } catch (ExecutionException e) { VoltDB.crashLocalVoltDB( "Unexpected exception pushing client notifications", true, Throwables.getRootCause(e)); } } } }); } }
public class class_name { public void queueNotification( final Collection<ClientInterfaceHandleManager> connections, final Supplier<DeferredSerialization> notification, final Predicate<ClientInterfaceHandleManager> wantsNotificationPredicate) { m_submissionQueue.offer(new Runnable() { @Override public void run() { for (ClientInterfaceHandleManager cihm : connections) { if (!wantsNotificationPredicate.apply(cihm)) continue; final Connection c = cihm.connection; /* * To avoid extra allocations and promotion we initially store a single event * as just the event. Once we have two or more events we create a linked list * and walk the list to dedupe events by identity */ Object pendingNotifications = m_clientsPendingNotification.get(c); try { if (pendingNotifications == null) { m_clientsPendingNotification.put(c, notification); // depends on control dependency: [if], data = [none] } else if (pendingNotifications instanceof Supplier) { //Identity duplicate check if (pendingNotifications == notification) return; //Convert to a two node linked list @SuppressWarnings("unchecked") Node n1 = new Node((Supplier<DeferredSerialization>)pendingNotifications, null); n1 = m_cachedNodes.get(n1, n1); // depends on control dependency: [if], data = [none] Node n2 = new Node(notification, n1); n2 = m_cachedNodes.get(n2, n2); // depends on control dependency: [if], data = [none] m_clientsPendingNotification.put(c, n2); // depends on control dependency: [if], data = [none] } else { //Walk the list and check if the notification is a duplicate Node head = (Node)pendingNotifications; boolean dup = false; while (head != null) { if (head.notification == notification) { dup = true; // depends on control dependency: [if], data = [none] break; } head = head.next; // depends on control dependency: [while], data = [none] } //If it's a dupe, no new work if (dup) continue; //Otherwise replace the head of the list which is the value in the map Node replacement = new Node(notification, (Node)pendingNotifications); replacement = m_cachedNodes.get(replacement, replacement); // depends on control dependency: [if], data = [none] m_clientsPendingNotification.put(c, replacement); // depends on control dependency: [if], data = [none] } } catch (ExecutionException e) { VoltDB.crashLocalVoltDB( "Unexpected exception pushing client notifications", true, Throwables.getRootCause(e)); } // depends on control dependency: [catch], data = [none] } } }); } }
public class class_name { public static ObjectResult newErrorResult(String errMsg, String objID) { ObjectResult result = new ObjectResult(); result.setStatus(Status.ERROR); result.setErrorMessage(errMsg); if (!Utils.isEmpty(objID)) { result.setObjectID(objID); } return result; } }
public class class_name { public static ObjectResult newErrorResult(String errMsg, String objID) { ObjectResult result = new ObjectResult(); result.setStatus(Status.ERROR); result.setErrorMessage(errMsg); if (!Utils.isEmpty(objID)) { result.setObjectID(objID); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public Map<String, Iterable<WorkUnitState>> getPreviousWorkUnitStatesByDatasetUrns() { Map<String, Iterable<WorkUnitState>> previousWorkUnitStatesByDatasetUrns = Maps.newHashMap(); if (this.workUnitAndDatasetStateFunctional != null) { materializeWorkUnitAndDatasetStates(null); } for (WorkUnitState workUnitState : this.previousWorkUnitStates) { String datasetUrn = workUnitState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN); if (!previousWorkUnitStatesByDatasetUrns.containsKey(datasetUrn)) { previousWorkUnitStatesByDatasetUrns.put(datasetUrn, Lists.newArrayList()); } ((List<WorkUnitState>) previousWorkUnitStatesByDatasetUrns.get(datasetUrn)).add(workUnitState); } return ImmutableMap.copyOf(previousWorkUnitStatesByDatasetUrns); } }
public class class_name { public Map<String, Iterable<WorkUnitState>> getPreviousWorkUnitStatesByDatasetUrns() { Map<String, Iterable<WorkUnitState>> previousWorkUnitStatesByDatasetUrns = Maps.newHashMap(); if (this.workUnitAndDatasetStateFunctional != null) { materializeWorkUnitAndDatasetStates(null); // depends on control dependency: [if], data = [null)] } for (WorkUnitState workUnitState : this.previousWorkUnitStates) { String datasetUrn = workUnitState.getProp(ConfigurationKeys.DATASET_URN_KEY, ConfigurationKeys.DEFAULT_DATASET_URN); if (!previousWorkUnitStatesByDatasetUrns.containsKey(datasetUrn)) { previousWorkUnitStatesByDatasetUrns.put(datasetUrn, Lists.newArrayList()); // depends on control dependency: [if], data = [none] } ((List<WorkUnitState>) previousWorkUnitStatesByDatasetUrns.get(datasetUrn)).add(workUnitState); // depends on control dependency: [for], data = [workUnitState] } return ImmutableMap.copyOf(previousWorkUnitStatesByDatasetUrns); } }
public class class_name { private static String localize(Logger logger, String message) { ResourceBundle bundle = logger.getResourceBundle(); try { return bundle != null ? bundle.getString(message) : message; } catch (MissingResourceException ex) { //string not in the bundle return message; } } }
public class class_name { private static String localize(Logger logger, String message) { ResourceBundle bundle = logger.getResourceBundle(); try { return bundle != null ? bundle.getString(message) : message; // depends on control dependency: [try], data = [none] } catch (MissingResourceException ex) { //string not in the bundle return message; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public double[] map( Map<String, Double> row, double data[] ) { String[] colNames = getNames(); for( int i=0; i<colNames.length-1; i++ ) { Double d = row.get(colNames[i]); data[i] = d==null ? Double.NaN : d; } return data; } }
public class class_name { public double[] map( Map<String, Double> row, double data[] ) { String[] colNames = getNames(); for( int i=0; i<colNames.length-1; i++ ) { Double d = row.get(colNames[i]); data[i] = d==null ? Double.NaN : d; // depends on control dependency: [for], data = [i] } return data; } }
public class class_name { public Ticket store(final J2EContext webContext, final BaseClient client) { val properties = buildTicketProperties(webContext); val originalService = argumentExtractor.extractService(webContext.getRequest()); val service = authenticationRequestServiceSelectionStrategies.resolveService(originalService); properties.put(CasProtocolConstants.PARAMETER_SERVICE, originalService); properties.put(CasProtocolConstants.PARAMETER_TARGET_SERVICE, service); val transientFactory = (TransientSessionTicketFactory) this.ticketFactory.get(TransientSessionTicket.class); val ticket = transientFactory.create(originalService, properties); val ticketId = ticket.getId(); LOGGER.debug("Storing delegated authentication request ticket [{}] for service [{}] with properties [{}]", ticketId, ticket.getService(), ticket.getProperties()); this.ticketRegistry.addTicket(ticket); webContext.setRequestAttribute(PARAMETER_CLIENT_ID, ticketId); val sessionStore = webContext.getSessionStore(); if (client instanceof SAML2Client) { sessionStore.set(webContext, SAML2StateGenerator.SAML_RELAY_STATE_ATTRIBUTE, ticketId); } if (client instanceof OAuth20Client) { val oauthClient = (OAuth20Client) client; val config = oauthClient.getConfiguration(); config.setWithState(true); config.setStateGenerator(new StaticOrRandomStateGenerator(ticketId)); } if (client instanceof OidcClient) { val oidcClient = (OidcClient) client; val config = oidcClient.getConfiguration(); config.addCustomParam(PARAMETER_CLIENT_ID, ticketId); config.setWithState(true); config.setStateGenerator(new StaticOrRandomStateGenerator(ticketId)); } if (client instanceof CasClient) { val casClient = (CasClient) client; val config = casClient.getConfiguration(); config.addCustomParam(DelegatedClientWebflowManager.PARAMETER_CLIENT_ID, ticketId); } if (client instanceof OAuth10Client) { sessionStore.set(webContext, OAUTH10_CLIENT_ID_SESSION_KEY, ticket.getId()); } return ticket; } }
public class class_name { public Ticket store(final J2EContext webContext, final BaseClient client) { val properties = buildTicketProperties(webContext); val originalService = argumentExtractor.extractService(webContext.getRequest()); val service = authenticationRequestServiceSelectionStrategies.resolveService(originalService); properties.put(CasProtocolConstants.PARAMETER_SERVICE, originalService); properties.put(CasProtocolConstants.PARAMETER_TARGET_SERVICE, service); val transientFactory = (TransientSessionTicketFactory) this.ticketFactory.get(TransientSessionTicket.class); val ticket = transientFactory.create(originalService, properties); val ticketId = ticket.getId(); LOGGER.debug("Storing delegated authentication request ticket [{}] for service [{}] with properties [{}]", ticketId, ticket.getService(), ticket.getProperties()); this.ticketRegistry.addTicket(ticket); webContext.setRequestAttribute(PARAMETER_CLIENT_ID, ticketId); val sessionStore = webContext.getSessionStore(); if (client instanceof SAML2Client) { sessionStore.set(webContext, SAML2StateGenerator.SAML_RELAY_STATE_ATTRIBUTE, ticketId); // depends on control dependency: [if], data = [none] } if (client instanceof OAuth20Client) { val oauthClient = (OAuth20Client) client; val config = oauthClient.getConfiguration(); config.setWithState(true); // depends on control dependency: [if], data = [none] config.setStateGenerator(new StaticOrRandomStateGenerator(ticketId)); // depends on control dependency: [if], data = [none] } if (client instanceof OidcClient) { val oidcClient = (OidcClient) client; val config = oidcClient.getConfiguration(); config.addCustomParam(PARAMETER_CLIENT_ID, ticketId); // depends on control dependency: [if], data = [none] config.setWithState(true); // depends on control dependency: [if], data = [none] config.setStateGenerator(new StaticOrRandomStateGenerator(ticketId)); // depends on control dependency: [if], data = [none] } if (client instanceof CasClient) { val casClient = (CasClient) client; val config = casClient.getConfiguration(); config.addCustomParam(DelegatedClientWebflowManager.PARAMETER_CLIENT_ID, ticketId); // depends on control dependency: [if], data = [none] } if (client instanceof OAuth10Client) { sessionStore.set(webContext, OAUTH10_CLIENT_ID_SESSION_KEY, ticket.getId()); // depends on control dependency: [if], data = [none] } return ticket; } }
public class class_name { protected String generateCreateIndexSql(final CreateSpatialIndexStatement statement, final Database database) { final StringBuilder sql = new StringBuilder(); sql.append("CREATE INDEX "); final String schemaName = statement.getTableSchemaName(); final String catalogName = statement.getTableCatalogName(); final String indexName = statement.getIndexName(); sql.append(database.escapeIndexName(catalogName, schemaName, indexName)); sql.append(" ON "); final String tableName = statement.getTableName(); sql.append(database.escapeTableName(catalogName, schemaName, tableName)).append(" ("); final Iterator<String> iterator = Arrays.asList(statement.getColumns()).iterator(); final String column = iterator.next(); sql.append(database.escapeColumnName(catalogName, statement.getTableSchemaName(), tableName, column)); sql.append(") INDEXTYPE IS mdsys.spatial_index"); // Generate and add the optional parameters. final Collection<String> parameters = getParameters(statement); if (parameters != null && !parameters.isEmpty()) { sql.append(" PARAMETERS ('"); sql.append(StringUtils.join(parameters, " ")); sql.append("')"); } return sql.toString(); } }
public class class_name { protected String generateCreateIndexSql(final CreateSpatialIndexStatement statement, final Database database) { final StringBuilder sql = new StringBuilder(); sql.append("CREATE INDEX "); final String schemaName = statement.getTableSchemaName(); final String catalogName = statement.getTableCatalogName(); final String indexName = statement.getIndexName(); sql.append(database.escapeIndexName(catalogName, schemaName, indexName)); sql.append(" ON "); final String tableName = statement.getTableName(); sql.append(database.escapeTableName(catalogName, schemaName, tableName)).append(" ("); final Iterator<String> iterator = Arrays.asList(statement.getColumns()).iterator(); final String column = iterator.next(); sql.append(database.escapeColumnName(catalogName, statement.getTableSchemaName(), tableName, column)); sql.append(") INDEXTYPE IS mdsys.spatial_index"); // Generate and add the optional parameters. final Collection<String> parameters = getParameters(statement); if (parameters != null && !parameters.isEmpty()) { sql.append(" PARAMETERS ('"); sql.append(StringUtils.join(parameters, " ")); sql.append("')"); // depends on control dependency: [if], data = [none] } return sql.toString(); } }
public class class_name { static DiyFp asNormalizedDiyFp(long d64) { long f = significand(d64); int e = exponent(d64); assert(f != 0); // The current double could be a denormal. while ((f & kHiddenBit) == 0) { f <<= 1; e--; } // Do the final shifts in one go. Don't forget the hidden bit (the '-1'). f <<= DiyFp.kSignificandSize - kSignificandSize - 1; e -= DiyFp.kSignificandSize - kSignificandSize - 1; return new DiyFp(f, e); } }
public class class_name { static DiyFp asNormalizedDiyFp(long d64) { long f = significand(d64); int e = exponent(d64); assert(f != 0); // The current double could be a denormal. while ((f & kHiddenBit) == 0) { f <<= 1; // depends on control dependency: [while], data = [none] e--; // depends on control dependency: [while], data = [none] } // Do the final shifts in one go. Don't forget the hidden bit (the '-1'). f <<= DiyFp.kSignificandSize - kSignificandSize - 1; e -= DiyFp.kSignificandSize - kSignificandSize - 1; return new DiyFp(f, e); } }
public class class_name { protected List<EquPart> multiplize(final List<EquPart> oldTokens) { final EquPart[] equParts = oldTokens.toArray(new EquPart[0]); final EquPart[] fixed = new EquPart[equParts.length * 2]; /* * )(, operand (, operand operand, operand function, ) operand, ) * function */ fixed[0] = equParts[0]; EquPart m; int left = 0; for (int right = 1; right < equParts.length; right++) { if (fixed[left].multiplize(equParts[right])) { m = new OpMultiply(fixed[left]); left++; fixed[left] = m; } left++; fixed[left] = equParts[right]; } final List<EquPart> tokens = new ArrayList<>(); for (int i = 0; i < fixed.length; i++) if (fixed[i] != null) tokens.add(fixed[i]); return tokens; } }
public class class_name { protected List<EquPart> multiplize(final List<EquPart> oldTokens) { final EquPart[] equParts = oldTokens.toArray(new EquPart[0]); final EquPart[] fixed = new EquPart[equParts.length * 2]; /* * )(, operand (, operand operand, operand function, ) operand, ) * function */ fixed[0] = equParts[0]; EquPart m; int left = 0; for (int right = 1; right < equParts.length; right++) { if (fixed[left].multiplize(equParts[right])) { m = new OpMultiply(fixed[left]); // depends on control dependency: [if], data = [none] left++; // depends on control dependency: [if], data = [none] fixed[left] = m; // depends on control dependency: [if], data = [none] } left++; // depends on control dependency: [for], data = [none] fixed[left] = equParts[right]; // depends on control dependency: [for], data = [right] } final List<EquPart> tokens = new ArrayList<>(); for (int i = 0; i < fixed.length; i++) if (fixed[i] != null) tokens.add(fixed[i]); return tokens; } }
public class class_name { public void displayError(int iErrorCode) { if (iErrorCode < -2) { // Error codes < -2 are usually user defined (and available at 'LastError') Task task = this.getTask();//getAppletScreen().getScreenFieldView().getControl(); if (task != null) { String strError = task.getLastError(iErrorCode); if ((strError != null) && (strError.length() > 0)) { this.displayError(strError); return; } } } DBException e = new DBException(iErrorCode); this.displayError(e); } }
public class class_name { public void displayError(int iErrorCode) { if (iErrorCode < -2) { // Error codes < -2 are usually user defined (and available at 'LastError') Task task = this.getTask();//getAppletScreen().getScreenFieldView().getControl(); if (task != null) { String strError = task.getLastError(iErrorCode); if ((strError != null) && (strError.length() > 0)) { this.displayError(strError); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } } } DBException e = new DBException(iErrorCode); this.displayError(e); } }
public class class_name { public synchronized Object get(Object key) { ConcurrentHashMap<Object, Object> tableRef = primaryTable; Entry curEntry = (Entry) primaryTable.get(key); // Not found in primary if (curEntry == null) { tableRef = secondaryTable; curEntry = (Entry) secondaryTable.get(key); // Not found in primary or secondary if (curEntry == null) { tableRef = tertiaryTable; curEntry = (Entry) tertiaryTable.get(key); } // Not found in primary, secondary, or tertiary if (curEntry == null) { tableRef = null; } } // If found in secondary or tertiary, move entry to primary if ((tableRef != null) && (tableRef != primaryTable)) { primaryTable.put(key, curEntry); tableRef.remove(key); } // If not present even in any table, add an empty entry // that can be found faster for update if (tableRef == null) { curEntry = (Entry) primaryTable.get(key); if (curEntry == null) { curEntry = new Entry(); Entry prevEntry = (Entry) primaryTable.putIfAbsent(key, curEntry); if (prevEntry != null) curEntry = prevEntry; // We lost the race, so use the entry from the other thread } } return curEntry.value; } }
public class class_name { public synchronized Object get(Object key) { ConcurrentHashMap<Object, Object> tableRef = primaryTable; Entry curEntry = (Entry) primaryTable.get(key); // Not found in primary if (curEntry == null) { tableRef = secondaryTable; // depends on control dependency: [if], data = [none] curEntry = (Entry) secondaryTable.get(key); // depends on control dependency: [if], data = [none] // Not found in primary or secondary if (curEntry == null) { tableRef = tertiaryTable; // depends on control dependency: [if], data = [none] curEntry = (Entry) tertiaryTable.get(key); // depends on control dependency: [if], data = [none] } // Not found in primary, secondary, or tertiary if (curEntry == null) { tableRef = null; // depends on control dependency: [if], data = [none] } } // If found in secondary or tertiary, move entry to primary if ((tableRef != null) && (tableRef != primaryTable)) { primaryTable.put(key, curEntry); // depends on control dependency: [if], data = [none] tableRef.remove(key); // depends on control dependency: [if], data = [none] } // If not present even in any table, add an empty entry // that can be found faster for update if (tableRef == null) { curEntry = (Entry) primaryTable.get(key); // depends on control dependency: [if], data = [none] if (curEntry == null) { curEntry = new Entry(); // depends on control dependency: [if], data = [none] Entry prevEntry = (Entry) primaryTable.putIfAbsent(key, curEntry); if (prevEntry != null) curEntry = prevEntry; // We lost the race, so use the entry from the other thread } } return curEntry.value; } }
public class class_name { public Options addOptionGroup(OptionGroup group) { if (group.isRequired()) { requiredOpts.add(group); } for (Option option : group.getOptions()) { // an Option cannot be required if it is in an // OptionGroup, either the group is required or // nothing is required option.setRequired(false); addOption(option); optionGroups.put(option.getKey(), group); } return this; } }
public class class_name { public Options addOptionGroup(OptionGroup group) { if (group.isRequired()) { requiredOpts.add(group); // depends on control dependency: [if], data = [none] } for (Option option : group.getOptions()) { // an Option cannot be required if it is in an // OptionGroup, either the group is required or // nothing is required option.setRequired(false); // depends on control dependency: [for], data = [option] addOption(option); // depends on control dependency: [for], data = [option] optionGroups.put(option.getKey(), group); // depends on control dependency: [for], data = [option] } return this; } }
public class class_name { public static Set<ModificationFeature>[] getChangedModifications(PhysicalEntity before, PhysicalEntity after) { Set<Modification> set1 = collectFeatures(before); Set<Modification> set2 = collectFeatures(after); Set<Modification> temp = new HashSet<Modification>(set1); set1.removeAll(set2); set2.removeAll(temp); // Remove common features that can be deemed semantically equivalent Set<Modification> furtherRemove = new HashSet<Modification>(); for (Modification m1 : set1) { for (Modification m2 : set2) { if (furtherRemove.contains(m2)) continue; if (m1.getKey().equals(m2.getKey())) { furtherRemove.add(m1); furtherRemove.add(m2); break; } } } set1.removeAll(furtherRemove); set2.removeAll(furtherRemove); return new Set[]{collectFeatures(set2), collectFeatures(set1)}; } }
public class class_name { public static Set<ModificationFeature>[] getChangedModifications(PhysicalEntity before, PhysicalEntity after) { Set<Modification> set1 = collectFeatures(before); Set<Modification> set2 = collectFeatures(after); Set<Modification> temp = new HashSet<Modification>(set1); set1.removeAll(set2); set2.removeAll(temp); // Remove common features that can be deemed semantically equivalent Set<Modification> furtherRemove = new HashSet<Modification>(); for (Modification m1 : set1) { for (Modification m2 : set2) { if (furtherRemove.contains(m2)) continue; if (m1.getKey().equals(m2.getKey())) { furtherRemove.add(m1); // depends on control dependency: [if], data = [none] furtherRemove.add(m2); // depends on control dependency: [if], data = [none] break; } } } set1.removeAll(furtherRemove); set2.removeAll(furtherRemove); return new Set[]{collectFeatures(set2), collectFeatures(set1)}; } }
public class class_name { public void claimName(String name) { checkName(name); if (names.add(name, 1) != 0) { names.remove(name); // give a slightly better error message in this case if (reserved.contains(name)) { throw new IllegalArgumentException("Tried to claim a reserved name: " + name); } throw new IllegalArgumentException("Name: " + name + " was already claimed!"); } } }
public class class_name { public void claimName(String name) { checkName(name); if (names.add(name, 1) != 0) { names.remove(name); // depends on control dependency: [if], data = [none] // give a slightly better error message in this case if (reserved.contains(name)) { throw new IllegalArgumentException("Tried to claim a reserved name: " + name); } throw new IllegalArgumentException("Name: " + name + " was already claimed!"); } } }
public class class_name { public static Basic2DMatrix from1DArray(int rows, int columns, double[] array) { double[][] array2D = new double[rows][columns]; for (int i = 0; i < rows; i++) { System.arraycopy(array, i * columns, array2D[i], 0, columns); } return new Basic2DMatrix(array2D); } }
public class class_name { public static Basic2DMatrix from1DArray(int rows, int columns, double[] array) { double[][] array2D = new double[rows][columns]; for (int i = 0; i < rows; i++) { System.arraycopy(array, i * columns, array2D[i], 0, columns); // depends on control dependency: [for], data = [i] } return new Basic2DMatrix(array2D); } }
public class class_name { LoggingOptions getLoggingOptions() { if (loggingOptions == null) { if (Strings.isNullOrEmpty(credentialsFile)) { loggingOptions = LoggingOptions.getDefaultInstance(); } else { try { loggingOptions = LoggingOptions.newBuilder() .setCredentials( GoogleCredentials.fromStream(new FileInputStream(credentialsFile))) .build(); } catch (IOException e) { throw new RuntimeException( String.format( "Could not read credentials file %s. Please verify that the file exists and is a valid Google credentials file.", credentialsFile), e); } } } return loggingOptions; } }
public class class_name { LoggingOptions getLoggingOptions() { if (loggingOptions == null) { if (Strings.isNullOrEmpty(credentialsFile)) { loggingOptions = LoggingOptions.getDefaultInstance(); // depends on control dependency: [if], data = [none] } else { try { loggingOptions = LoggingOptions.newBuilder() .setCredentials( GoogleCredentials.fromStream(new FileInputStream(credentialsFile))) .build(); // depends on control dependency: [try], data = [none] } catch (IOException e) { throw new RuntimeException( String.format( "Could not read credentials file %s. Please verify that the file exists and is a valid Google credentials file.", credentialsFile), e); } // depends on control dependency: [catch], data = [none] } } return loggingOptions; } }
public class class_name { @Override public List<String> listHosts() { try { // TODO (dano): only return hosts whose agents completed registration (i.e. has id nodes) return provider.get("listHosts").getChildren(Paths.configHosts()); } catch (KeeperException.NoNodeException e) { return emptyList(); } catch (KeeperException e) { throw new HeliosRuntimeException("listing hosts failed", e); } } }
public class class_name { @Override public List<String> listHosts() { try { // TODO (dano): only return hosts whose agents completed registration (i.e. has id nodes) return provider.get("listHosts").getChildren(Paths.configHosts()); // depends on control dependency: [try], data = [none] } catch (KeeperException.NoNodeException e) { return emptyList(); } catch (KeeperException e) { // depends on control dependency: [catch], data = [none] throw new HeliosRuntimeException("listing hosts failed", e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public synchronized void endLog() { if (!finalized) { finishedTime = Calendar.getInstance(); finalized = true; logWriter.writeEndLog(); //copy backup chain log file in into Backupset files itself for portability (e.g. on another server) try { InputStream in = PrivilegedFileHelper.fileInputStream(log); File dest = new File(config.getBackupDir() + File.separator + log.getName()); if (!PrivilegedFileHelper.exists(dest)) { OutputStream out = PrivilegedFileHelper.fileOutputStream(dest); byte[] buf = new byte[(int) (PrivilegedFileHelper.length(log))]; in.read(buf); String sConfig = new String(buf, Constants.DEFAULT_ENCODING); sConfig = sConfig.replaceAll("<backup-dir>.+</backup-dir>", "<backup-dir>.</backup-dir>"); out.write(sConfig.getBytes(Constants.DEFAULT_ENCODING)); in.close(); out.close(); } } catch (PatternSyntaxException e) { logger.error("Can't write log", e); } catch (FileNotFoundException e) { logger.error("Can't write log", e); } catch (IOException e) { logger.error("Can't write log", e); } } } }
public class class_name { public synchronized void endLog() { if (!finalized) { finishedTime = Calendar.getInstance(); // depends on control dependency: [if], data = [none] finalized = true; // depends on control dependency: [if], data = [none] logWriter.writeEndLog(); // depends on control dependency: [if], data = [none] //copy backup chain log file in into Backupset files itself for portability (e.g. on another server) try { InputStream in = PrivilegedFileHelper.fileInputStream(log); File dest = new File(config.getBackupDir() + File.separator + log.getName()); if (!PrivilegedFileHelper.exists(dest)) { OutputStream out = PrivilegedFileHelper.fileOutputStream(dest); byte[] buf = new byte[(int) (PrivilegedFileHelper.length(log))]; in.read(buf); // depends on control dependency: [if], data = [none] String sConfig = new String(buf, Constants.DEFAULT_ENCODING); sConfig = sConfig.replaceAll("<backup-dir>.+</backup-dir>", "<backup-dir>.</backup-dir>"); out.write(sConfig.getBytes(Constants.DEFAULT_ENCODING)); // depends on control dependency: [if], data = [none] in.close(); // depends on control dependency: [if], data = [none] out.close(); // depends on control dependency: [if], data = [none] } } catch (PatternSyntaxException e) { logger.error("Can't write log", e); } catch (FileNotFoundException e) { logger.error("Can't write log", e); } // depends on control dependency: [catch], data = [none] catch (IOException e) { logger.error("Can't write log", e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { @Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (readError == null) { try { processHighlightings(lineBuilder); } catch (RangeOffsetConverterException e) { readError = new ReadError(HIGHLIGHTING, lineBuilder.getLine()); LOG.debug(format("Inconsistency detected in Highlighting data. Highlighting will be ignored for file '%s'", file.getDbKey()), e); } } return Optional.ofNullable(readError); } }
public class class_name { @Override public Optional<ReadError> read(DbFileSources.Line.Builder lineBuilder) { if (readError == null) { try { processHighlightings(lineBuilder); // depends on control dependency: [try], data = [none] } catch (RangeOffsetConverterException e) { readError = new ReadError(HIGHLIGHTING, lineBuilder.getLine()); LOG.debug(format("Inconsistency detected in Highlighting data. Highlighting will be ignored for file '%s'", file.getDbKey()), e); } // depends on control dependency: [catch], data = [none] } return Optional.ofNullable(readError); } }
public class class_name { private void setItem(int index, int value) { if (index == HOUR_INDEX) { setValueForItem(HOUR_INDEX, value); int hourDegrees = (value % 12) * HOUR_VALUE_TO_DEGREES_STEP_SIZE; mHourRadialSelectorView.setSelection(hourDegrees, isHourInnerCircle(value), false); mHourRadialSelectorView.invalidate(); } else if (index == MINUTE_INDEX) { setValueForItem(MINUTE_INDEX, value); int minuteDegrees = value * MINUTE_VALUE_TO_DEGREES_STEP_SIZE; mMinuteRadialSelectorView.setSelection(minuteDegrees, false, false); mMinuteRadialSelectorView.invalidate(); } } }
public class class_name { private void setItem(int index, int value) { if (index == HOUR_INDEX) { setValueForItem(HOUR_INDEX, value); // depends on control dependency: [if], data = [none] int hourDegrees = (value % 12) * HOUR_VALUE_TO_DEGREES_STEP_SIZE; mHourRadialSelectorView.setSelection(hourDegrees, isHourInnerCircle(value), false); // depends on control dependency: [if], data = [none] mHourRadialSelectorView.invalidate(); // depends on control dependency: [if], data = [none] } else if (index == MINUTE_INDEX) { setValueForItem(MINUTE_INDEX, value); // depends on control dependency: [if], data = [none] int minuteDegrees = value * MINUTE_VALUE_TO_DEGREES_STEP_SIZE; mMinuteRadialSelectorView.setSelection(minuteDegrees, false, false); // depends on control dependency: [if], data = [none] mMinuteRadialSelectorView.invalidate(); // depends on control dependency: [if], data = [none] } } }
public class class_name { private String getTargetHost() { String host = getVirtualHost(); if (null == host) { host = (isIncoming()) ? getServiceContext().getLocalAddr().getCanonicalHostName() : getServiceContext().getRemoteAddr().getCanonicalHostName(); } return host; } }
public class class_name { private String getTargetHost() { String host = getVirtualHost(); if (null == host) { host = (isIncoming()) ? getServiceContext().getLocalAddr().getCanonicalHostName() : getServiceContext().getRemoteAddr().getCanonicalHostName(); // depends on control dependency: [if], data = [none] } return host; } }
public class class_name { public static void initializeMessages(final String bundleName, final Class clazz) { if (System.getSecurityManager() == null) { load(bundleName, clazz); return; } AccessController.doPrivileged(new PrivilegedAction() { public Object run() { load(bundleName, clazz); return null; } }); } }
public class class_name { public static void initializeMessages(final String bundleName, final Class clazz) { if (System.getSecurityManager() == null) { load(bundleName, clazz); // depends on control dependency: [if], data = [none] return; // depends on control dependency: [if], data = [none] } AccessController.doPrivileged(new PrivilegedAction() { public Object run() { load(bundleName, clazz); return null; } }); } }
public class class_name { void recomposeVariables(ElemVariable elemVar) { // Don't overide higher priority variable if (getVariableOrParamComposed(elemVar.getName()) == null) { elemVar.setIsTopLevel(true); // Mark as a top-level variable or param elemVar.setIndex(m_variables.size()); m_variables.addElement(elemVar); } } }
public class class_name { void recomposeVariables(ElemVariable elemVar) { // Don't overide higher priority variable if (getVariableOrParamComposed(elemVar.getName()) == null) { elemVar.setIsTopLevel(true); // Mark as a top-level variable or param // depends on control dependency: [if], data = [none] elemVar.setIndex(m_variables.size()); // depends on control dependency: [if], data = [none] m_variables.addElement(elemVar); // depends on control dependency: [if], data = [none] } } }
public class class_name { public double norm1() { double f = 0; for (int j = 0; j < n; j++) { double s = 0; for (int i = 0; i < m; i++) { s += Math.abs(A[i][j]); } f = Math.max(f, s); } return f; } }
public class class_name { public double norm1() { double f = 0; for (int j = 0; j < n; j++) { double s = 0; for (int i = 0; i < m; i++) { s += Math.abs(A[i][j]); // depends on control dependency: [for], data = [i] } f = Math.max(f, s); // depends on control dependency: [for], data = [none] } return f; } }
public class class_name { public void marshall(GetDevEndpointsRequest getDevEndpointsRequest, ProtocolMarshaller protocolMarshaller) { if (getDevEndpointsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDevEndpointsRequest.getMaxResults(), MAXRESULTS_BINDING); protocolMarshaller.marshall(getDevEndpointsRequest.getNextToken(), NEXTTOKEN_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(GetDevEndpointsRequest getDevEndpointsRequest, ProtocolMarshaller protocolMarshaller) { if (getDevEndpointsRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(getDevEndpointsRequest.getMaxResults(), MAXRESULTS_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(getDevEndpointsRequest.getNextToken(), NEXTTOKEN_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public Map<byte[], byte[]> hgetAll(final byte[] key) { checkIsInMultiOrPipeline(); client.hgetAll(key); final List<byte[]> flatHash = client.getBinaryMultiBulkReply(); final Map<byte[], byte[]> hash = new JedisByteHashMap(); final Iterator<byte[]> iterator = flatHash.iterator(); while (iterator.hasNext()) { hash.put(iterator.next(), iterator.next()); } return hash; } }
public class class_name { @Override public Map<byte[], byte[]> hgetAll(final byte[] key) { checkIsInMultiOrPipeline(); client.hgetAll(key); final List<byte[]> flatHash = client.getBinaryMultiBulkReply(); final Map<byte[], byte[]> hash = new JedisByteHashMap(); final Iterator<byte[]> iterator = flatHash.iterator(); while (iterator.hasNext()) { hash.put(iterator.next(), iterator.next()); // depends on control dependency: [while], data = [none] } return hash; } }
public class class_name { public boolean appendValue(StringBuilder builder, Class fieldClazz, Object value, boolean isPresent, boolean useToken) { if (List.class.isAssignableFrom(fieldClazz)) { isPresent = appendList(builder, value != null ? value : new ArrayList()); } else if (Set.class.isAssignableFrom(fieldClazz)) { isPresent = appendSet(builder, value != null ? value : new HashSet()); } else if (Map.class.isAssignableFrom(fieldClazz)) { isPresent = appendMap(builder, value != null ? value : new HashMap()); } else { isPresent = true; appendValue(builder, fieldClazz, value, useToken); } return isPresent; } }
public class class_name { public boolean appendValue(StringBuilder builder, Class fieldClazz, Object value, boolean isPresent, boolean useToken) { if (List.class.isAssignableFrom(fieldClazz)) { isPresent = appendList(builder, value != null ? value : new ArrayList()); // depends on control dependency: [if], data = [none] } else if (Set.class.isAssignableFrom(fieldClazz)) { isPresent = appendSet(builder, value != null ? value : new HashSet()); // depends on control dependency: [if], data = [none] } else if (Map.class.isAssignableFrom(fieldClazz)) { isPresent = appendMap(builder, value != null ? value : new HashMap()); // depends on control dependency: [if], data = [none] } else { isPresent = true; // depends on control dependency: [if], data = [none] appendValue(builder, fieldClazz, value, useToken); // depends on control dependency: [if], data = [none] } return isPresent; } }
public class class_name { public static int loadNewVersionOfType(ClassLoader classLoader, String dottedClassname, byte[] newbytes) { try { // Obtain the type registry of interest TypeRegistry typeRegistry = TypeRegistry.getTypeRegistryFor(classLoader); if (typeRegistry == null) { return 1; } // Find the reloadable type ReloadableType reloadableType = typeRegistry.getReloadableType(dottedClassname.replace('.', '/')); if (reloadableType == null) { return 2; } // Create a unique version tag for this reload attempt String tag = Utils.encode(System.currentTimeMillis()); boolean reloaded = reloadableType.loadNewVersion(tag, newbytes); return reloaded ? 0 : 3; } catch (Exception e) { e.printStackTrace(); return 4; } } }
public class class_name { public static int loadNewVersionOfType(ClassLoader classLoader, String dottedClassname, byte[] newbytes) { try { // Obtain the type registry of interest TypeRegistry typeRegistry = TypeRegistry.getTypeRegistryFor(classLoader); if (typeRegistry == null) { return 1; // depends on control dependency: [if], data = [none] } // Find the reloadable type ReloadableType reloadableType = typeRegistry.getReloadableType(dottedClassname.replace('.', '/')); if (reloadableType == null) { return 2; // depends on control dependency: [if], data = [none] } // Create a unique version tag for this reload attempt String tag = Utils.encode(System.currentTimeMillis()); boolean reloaded = reloadableType.loadNewVersion(tag, newbytes); return reloaded ? 0 : 3; // depends on control dependency: [try], data = [none] } catch (Exception e) { e.printStackTrace(); return 4; } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static ByteBuffer blockingRead(SocketChannel so, long timeout, byte[] bytes) throws IOException { ByteBuffer b = ByteBuffer.wrap(bytes); if (bytes.length == 0) return b; final long timeoutTime = (timeout > 0) ? (System.currentTimeMillis() + timeout) : (Long.MAX_VALUE); while (b.remaining() != 0 && System.currentTimeMillis() < timeoutTime) { if (!so.isConnected()) throw new IOException("Socket closed during read operation!"); so.read(b); if (b.remaining() != 0) { // sleep for a short time try { Thread.sleep(20); } catch (InterruptedException e) { } } } if (System.currentTimeMillis() >= timeoutTime) { return null; } b.rewind(); // make it easy for the caller to read from the buffer (if they're interested) return b; } }
public class class_name { public static ByteBuffer blockingRead(SocketChannel so, long timeout, byte[] bytes) throws IOException { ByteBuffer b = ByteBuffer.wrap(bytes); if (bytes.length == 0) return b; final long timeoutTime = (timeout > 0) ? (System.currentTimeMillis() + timeout) : (Long.MAX_VALUE); while (b.remaining() != 0 && System.currentTimeMillis() < timeoutTime) { if (!so.isConnected()) throw new IOException("Socket closed during read operation!"); so.read(b); if (b.remaining() != 0) { // sleep for a short time try { Thread.sleep(20); // depends on control dependency: [try], data = [none] } catch (InterruptedException e) { } // depends on control dependency: [catch], data = [none] } } if (System.currentTimeMillis() >= timeoutTime) { return null; } b.rewind(); // make it easy for the caller to read from the buffer (if they're interested) return b; } }
public class class_name { public double getRatio() { double ratio = 1; if ((getTargetWidth() == -1) || (getTargetHeight() == -1)) { ratio = (double)getOrgWidth() / getOrgHeight(); } else { ratio = (double)getTargetWidth() / getTargetHeight(); } return ratio; } }
public class class_name { public double getRatio() { double ratio = 1; if ((getTargetWidth() == -1) || (getTargetHeight() == -1)) { ratio = (double)getOrgWidth() / getOrgHeight(); // depends on control dependency: [if], data = [none] } else { ratio = (double)getTargetWidth() / getTargetHeight(); // depends on control dependency: [if], data = [none] } return ratio; } }
public class class_name { protected void keepTopFeatures(Map<Object, Double> featureScores, int maxFeatures) { logger.debug("keepTopFeatures()"); logger.debug("Estimating the minPermittedScore"); Double minPermittedScore = SelectKth.largest(featureScores.values().iterator(), maxFeatures); //remove any entry with score less than the minimum permitted one logger.debug("Removing features with scores less than threshold"); Iterator<Map.Entry<Object, Double>> it = featureScores.entrySet().iterator(); while(it.hasNext()) { Map.Entry<Object, Double> entry = it.next(); if(entry.getValue()<minPermittedScore) { it.remove(); } } //if some extra features still exist (due to ties on the scores) remove some of those extra features int numOfExtraFeatures = featureScores.size()-maxFeatures; if(numOfExtraFeatures>0) { logger.debug("Removing extra features caused by ties"); it = featureScores.entrySet().iterator(); while(it.hasNext() && numOfExtraFeatures>0) { Map.Entry<Object, Double> entry = it.next(); if(entry.getValue()-minPermittedScore<=0.0) { //DO NOT COMPARE THEM DIRECTLY USE SUBTRACTION! it.remove(); --numOfExtraFeatures; } } } } }
public class class_name { protected void keepTopFeatures(Map<Object, Double> featureScores, int maxFeatures) { logger.debug("keepTopFeatures()"); logger.debug("Estimating the minPermittedScore"); Double minPermittedScore = SelectKth.largest(featureScores.values().iterator(), maxFeatures); //remove any entry with score less than the minimum permitted one logger.debug("Removing features with scores less than threshold"); Iterator<Map.Entry<Object, Double>> it = featureScores.entrySet().iterator(); while(it.hasNext()) { Map.Entry<Object, Double> entry = it.next(); if(entry.getValue()<minPermittedScore) { it.remove(); // depends on control dependency: [if], data = [none] } } //if some extra features still exist (due to ties on the scores) remove some of those extra features int numOfExtraFeatures = featureScores.size()-maxFeatures; if(numOfExtraFeatures>0) { logger.debug("Removing extra features caused by ties"); // depends on control dependency: [if], data = [none] it = featureScores.entrySet().iterator(); // depends on control dependency: [if], data = [none] while(it.hasNext() && numOfExtraFeatures>0) { Map.Entry<Object, Double> entry = it.next(); if(entry.getValue()-minPermittedScore<=0.0) { //DO NOT COMPARE THEM DIRECTLY USE SUBTRACTION! it.remove(); // depends on control dependency: [if], data = [none] --numOfExtraFeatures; // depends on control dependency: [if], data = [none] } } } } }
public class class_name { private void runEKBPostCommitHooks(EKBCommit commit) throws EKBException { for (EKBPostCommitHook hook : postCommitHooks) { try { hook.onPostCommit(commit); } catch (Exception e) { LOGGER.warn("An exception is thrown in a EKB post commit hook.", e); } } } }
public class class_name { private void runEKBPostCommitHooks(EKBCommit commit) throws EKBException { for (EKBPostCommitHook hook : postCommitHooks) { try { hook.onPostCommit(commit); // depends on control dependency: [try], data = [none] } catch (Exception e) { LOGGER.warn("An exception is thrown in a EKB post commit hook.", e); } // depends on control dependency: [catch], data = [none] } } }
public class class_name { Mono<Void> delayAsync() { Mono<Void> result = Mono.empty(); if (delayInMilliseconds > 0) { result = result.delaySubscription(Duration.ofMillis(delayInMilliseconds)); } return result; } }
public class class_name { Mono<Void> delayAsync() { Mono<Void> result = Mono.empty(); if (delayInMilliseconds > 0) { result = result.delaySubscription(Duration.ofMillis(delayInMilliseconds)); // depends on control dependency: [if], data = [(delayInMilliseconds] } return result; } }
public class class_name { private Object iteratorColumns(EntityMetadata metadata, MetamodelImpl metamodel, EntityType entityType, Map<String, Object> relationalValues, Object entity, Row row, Iterator<Definition> columnDefIter) { while (columnDefIter.hasNext()) { Definition columnDef = columnDefIter.next(); final String columnName = columnDef.getName(); // column name DataType dataType = columnDef.getType(); // data type if (metadata.getRelationNames() != null && metadata.getRelationNames().contains(columnName) && !columnName.equals(((AbstractAttribute) metadata.getIdAttribute()).getJPAColumnName())) { Object relationalValue = DSClientUtilities.assign(row, null, metadata, dataType.getName(), entityType, columnName, null, metamodel); relationalValues.put(columnName, relationalValue); } else { String fieldName = columnName.equals(((AbstractAttribute) metadata.getIdAttribute()).getJPAColumnName()) ? metadata.getIdAttribute().getName() : metadata.getFieldName(columnName); Attribute attribute = fieldName != null ? entityType.getAttribute(fieldName) : null; if (attribute != null) { if (!attribute.isAssociation()) { entity = DSClientUtilities.assign(row, entity, metadata, dataType.getName(), entityType, columnName, null, metamodel); } } else if (metamodel.isEmbeddable(metadata.getIdAttribute().getBindableJavaType())) { entity = populateCompositeId(metadata, entity, columnName, row, metamodel, metadata.getIdAttribute(), metadata.getEntityClazz(), dataType); } else { entity = DSClientUtilities.assign(row, entity, metadata, dataType.getName(), entityType, columnName, null, metamodel); } } } return entity; } }
public class class_name { private Object iteratorColumns(EntityMetadata metadata, MetamodelImpl metamodel, EntityType entityType, Map<String, Object> relationalValues, Object entity, Row row, Iterator<Definition> columnDefIter) { while (columnDefIter.hasNext()) { Definition columnDef = columnDefIter.next(); final String columnName = columnDef.getName(); // column name DataType dataType = columnDef.getType(); // data type if (metadata.getRelationNames() != null && metadata.getRelationNames().contains(columnName) && !columnName.equals(((AbstractAttribute) metadata.getIdAttribute()).getJPAColumnName())) { Object relationalValue = DSClientUtilities.assign(row, null, metadata, dataType.getName(), entityType, columnName, null, metamodel); relationalValues.put(columnName, relationalValue); // depends on control dependency: [if], data = [none] } else { String fieldName = columnName.equals(((AbstractAttribute) metadata.getIdAttribute()).getJPAColumnName()) ? metadata.getIdAttribute().getName() : metadata.getFieldName(columnName); Attribute attribute = fieldName != null ? entityType.getAttribute(fieldName) : null; if (attribute != null) { if (!attribute.isAssociation()) { entity = DSClientUtilities.assign(row, entity, metadata, dataType.getName(), entityType, columnName, null, metamodel); // depends on control dependency: [if], data = [none] } } else if (metamodel.isEmbeddable(metadata.getIdAttribute().getBindableJavaType())) { entity = populateCompositeId(metadata, entity, columnName, row, metamodel, metadata.getIdAttribute(), metadata.getEntityClazz(), dataType); // depends on control dependency: [if], data = [none] } else { entity = DSClientUtilities.assign(row, entity, metadata, dataType.getName(), entityType, columnName, null, metamodel); // depends on control dependency: [if], data = [none] } } } return entity; } }
public class class_name { @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); List<FeedbackMessage> msgs = model.getObject(); if(msgs.size()>0){ for(Component component: messages.keySet()){ StringBuffer sb = new StringBuffer(); for(FeedbackMessage msg : messages.get(component)){ sb.append(msg.getMessage()+"\n"); msg.markRendered(); } String script = "$(\"#"+component.getMarkupId()+"\")" + ".popover({ 'trigger': 'focus', " + "'placement': 'top', " + "'content': \""+StringEscapeUtils.escapeEcmaScript(sb.toString())+"\", " + "'template': '<div class=\"popover feedback-popover\"><div class=\"arrow\"></div><div class=\"popover-inner\"><h3 class=\"popover-title\"></h3><div class=\"popover-content\"><p></p></div></div></div>'" + "});"; script += "$(\"#"+component.getMarkupId()+"\").keypress(function(){ $(\"#"+this.getMarkupId()+"\").removeClass('has-error'); $(this).popover('destroy'); });"; response.render(OnDomReadyHeaderItem.forScript(script)); } } } }
public class class_name { @Override public void renderHead(IHeaderResponse response) { super.renderHead(response); List<FeedbackMessage> msgs = model.getObject(); if(msgs.size()>0){ for(Component component: messages.keySet()){ StringBuffer sb = new StringBuffer(); for(FeedbackMessage msg : messages.get(component)){ sb.append(msg.getMessage()+"\n"); // depends on control dependency: [for], data = [msg] msg.markRendered(); // depends on control dependency: [for], data = [msg] } String script = "$(\"#"+component.getMarkupId()+"\")" + ".popover({ 'trigger': 'focus', " + "'placement': 'top', " + "'content': \""+StringEscapeUtils.escapeEcmaScript(sb.toString())+"\", " + "'template': '<div class=\"popover feedback-popover\"><div class=\"arrow\"></div><div class=\"popover-inner\"><h3 class=\"popover-title\"></h3><div class=\"popover-content\"><p></p></div></div></div>'" + "});"; script += "$(\"#"+component.getMarkupId()+"\").keypress(function(){ $(\"#"+this.getMarkupId()+"\").removeClass('has-error'); $(this).popover('destroy'); });"; response.render(OnDomReadyHeaderItem.forScript(script)); // depends on control dependency: [if], data = [none] } } } }
public class class_name { public static int divide( int dividend , int divisor ) { int length_end = length(dividend); int length_sor = length(divisor); if( length_end < length_sor ) return 0; int result = 0; for( int i = length_end-length_sor; i >= 0; i-- ) { if((dividend & (1 << i + length_sor - 1)) != 0 ) { dividend ^= divisor << i; result |= 1 << i; } } return result; } }
public class class_name { public static int divide( int dividend , int divisor ) { int length_end = length(dividend); int length_sor = length(divisor); if( length_end < length_sor ) return 0; int result = 0; for( int i = length_end-length_sor; i >= 0; i-- ) { if((dividend & (1 << i + length_sor - 1)) != 0 ) { dividend ^= divisor << i; // depends on control dependency: [if], data = [none] result |= 1 << i; // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { private ServiceInfo getRandomService(List<ServiceInfo> serviceList) { if (serviceList == null || serviceList.size() == 0) { return null; } return serviceList.get(RandomUtils.nextInt(0, serviceList.size())); } }
public class class_name { private ServiceInfo getRandomService(List<ServiceInfo> serviceList) { if (serviceList == null || serviceList.size() == 0) { return null; // depends on control dependency: [if], data = [none] } return serviceList.get(RandomUtils.nextInt(0, serviceList.size())); } }
public class class_name { private void insert(int position, int c) { for (int i = 143; i > position; i--) { set[i] = set[i - 1]; character[i] = character[i - 1]; } character[position] = c; } }
public class class_name { private void insert(int position, int c) { for (int i = 143; i > position; i--) { set[i] = set[i - 1]; // depends on control dependency: [for], data = [i] character[i] = character[i - 1]; // depends on control dependency: [for], data = [i] } character[position] = c; } }
public class class_name { public static void main(String[] args) { /* * The ProfileCredentialsProvider will return your [default] * credential profile by reading from the credentials file located at * (~/.aws/credentials). */ AWSCredentials credentials = null; try { credentials = new ProfileCredentialsProvider().getCredentials(); } catch (Exception e) { throw new AmazonClientException( "Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (~/.aws/credentials), and is in valid format.", e); } // Create the AmazonEC2Client object so we can call various APIs. AmazonEC2 ec2 = AmazonEC2ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(credentials)) .withRegion("us-west-2") .build(); // Create a new security group. try { CreateSecurityGroupRequest securityGroupRequest = new CreateSecurityGroupRequest( "GettingStartedGroup", "Getting Started Security Group"); CreateSecurityGroupResult result = ec2 .createSecurityGroup(securityGroupRequest); System.out.println(String.format("Security group created: [%s]", result.getGroupId())); } catch (AmazonServiceException ase) { // Likely this means that the group is already created, so ignore. System.out.println(ase.getMessage()); } String ipAddr = "0.0.0.0/0"; // Get the IP of the current host, so that we can limit the Security Group // by default to the ip range associated with your subnet. try { InetAddress addr = InetAddress.getLocalHost(); // Get IP Address ipAddr = addr.getHostAddress()+"/10"; } catch (UnknownHostException e) { } // Create a range that you would like to populate. List<String> ipRanges = Collections.singletonList(ipAddr); // Open up port 23 for TCP traffic to the associated IP from above (e.g. ssh traffic). IpPermission ipPermission = new IpPermission() .withIpProtocol("tcp") .withFromPort(new Integer(22)) .withToPort(new Integer(22)) .withIpRanges(ipRanges); List<IpPermission> ipPermissions = Collections.singletonList(ipPermission); try { // Authorize the ports to the used. AuthorizeSecurityGroupIngressRequest ingressRequest = new AuthorizeSecurityGroupIngressRequest( "GettingStartedGroup", ipPermissions); ec2.authorizeSecurityGroupIngress(ingressRequest); System.out.println(String.format("Ingress port authroized: [%s]", ipPermissions.toString())); } catch (AmazonServiceException ase) { // Ignore because this likely means the zone has already been authorized. System.out.println(ase.getMessage()); } } }
public class class_name { public static void main(String[] args) { /* * The ProfileCredentialsProvider will return your [default] * credential profile by reading from the credentials file located at * (~/.aws/credentials). */ AWSCredentials credentials = null; try { credentials = new ProfileCredentialsProvider().getCredentials(); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new AmazonClientException( "Cannot load the credentials from the credential profiles file. " + "Please make sure that your credentials file is at the correct " + "location (~/.aws/credentials), and is in valid format.", e); } // depends on control dependency: [catch], data = [none] // Create the AmazonEC2Client object so we can call various APIs. AmazonEC2 ec2 = AmazonEC2ClientBuilder.standard() .withCredentials(new AWSStaticCredentialsProvider(credentials)) .withRegion("us-west-2") .build(); // Create a new security group. try { CreateSecurityGroupRequest securityGroupRequest = new CreateSecurityGroupRequest( "GettingStartedGroup", "Getting Started Security Group"); CreateSecurityGroupResult result = ec2 .createSecurityGroup(securityGroupRequest); System.out.println(String.format("Security group created: [%s]", result.getGroupId())); // depends on control dependency: [try], data = [none] } catch (AmazonServiceException ase) { // Likely this means that the group is already created, so ignore. System.out.println(ase.getMessage()); } // depends on control dependency: [catch], data = [none] String ipAddr = "0.0.0.0/0"; // Get the IP of the current host, so that we can limit the Security Group // by default to the ip range associated with your subnet. try { InetAddress addr = InetAddress.getLocalHost(); // Get IP Address ipAddr = addr.getHostAddress()+"/10"; // depends on control dependency: [try], data = [none] } catch (UnknownHostException e) { } // depends on control dependency: [catch], data = [none] // Create a range that you would like to populate. List<String> ipRanges = Collections.singletonList(ipAddr); // Open up port 23 for TCP traffic to the associated IP from above (e.g. ssh traffic). IpPermission ipPermission = new IpPermission() .withIpProtocol("tcp") .withFromPort(new Integer(22)) .withToPort(new Integer(22)) .withIpRanges(ipRanges); List<IpPermission> ipPermissions = Collections.singletonList(ipPermission); try { // Authorize the ports to the used. AuthorizeSecurityGroupIngressRequest ingressRequest = new AuthorizeSecurityGroupIngressRequest( "GettingStartedGroup", ipPermissions); ec2.authorizeSecurityGroupIngress(ingressRequest); // depends on control dependency: [try], data = [none] System.out.println(String.format("Ingress port authroized: [%s]", ipPermissions.toString())); // depends on control dependency: [try], data = [none] } catch (AmazonServiceException ase) { // Ignore because this likely means the zone has already been authorized. System.out.println(ase.getMessage()); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public String getBeaconIdAsString() { if (mBeaconIdAsString == null) { mBeaconIdAsString = Bytes.toHexString(getBeaconId(), true); } return mBeaconIdAsString; } }
public class class_name { public String getBeaconIdAsString() { if (mBeaconIdAsString == null) { mBeaconIdAsString = Bytes.toHexString(getBeaconId(), true); // depends on control dependency: [if], data = [none] } return mBeaconIdAsString; } }
public class class_name { public static Object[] addAll(Object[] array1, Object[] array2) { if (array1 == null) { return clone(array2); } else if (array2 == null) { return clone(array1); } Object[] joinedArray = (Object[]) Array.newInstance(array1.getClass().getComponentType(), array1.length + array2.length); System.arraycopy(array1, 0, joinedArray, 0, array1.length); try { System.arraycopy(array2, 0, joinedArray, array1.length, array2.length); } catch (ArrayStoreException ase) { // Check if problem was due to incompatible types /* * We do this here, rather than before the copy because: * - it would be a wasted check most of the time * - safer, in case check turns out to be too strict */ final Class type1 = array1.getClass().getComponentType(); final Class type2 = array2.getClass().getComponentType(); if (!type1.isAssignableFrom(type2)){ throw new IllegalArgumentException("Cannot store "+type2.getName()+" in an array of "+type1.getName()); } throw ase; // No, so rethrow original } return joinedArray; } }
public class class_name { public static Object[] addAll(Object[] array1, Object[] array2) { if (array1 == null) { return clone(array2); // depends on control dependency: [if], data = [none] } else if (array2 == null) { return clone(array1); // depends on control dependency: [if], data = [none] } Object[] joinedArray = (Object[]) Array.newInstance(array1.getClass().getComponentType(), array1.length + array2.length); System.arraycopy(array1, 0, joinedArray, 0, array1.length); try { System.arraycopy(array2, 0, joinedArray, array1.length, array2.length); // depends on control dependency: [try], data = [none] } catch (ArrayStoreException ase) { // Check if problem was due to incompatible types /* * We do this here, rather than before the copy because: * - it would be a wasted check most of the time * - safer, in case check turns out to be too strict */ final Class type1 = array1.getClass().getComponentType(); final Class type2 = array2.getClass().getComponentType(); if (!type1.isAssignableFrom(type2)){ throw new IllegalArgumentException("Cannot store "+type2.getName()+" in an array of "+type1.getName()); } throw ase; // No, so rethrow original } // depends on control dependency: [catch], data = [none] return joinedArray; } }
public class class_name { public static void doMetaUpdateVersionsOnStores(AdminClient adminClient, List<StoreDefinition> oldStoreDefs, List<StoreDefinition> newStoreDefs) { Set<String> storeNamesUnion = new HashSet<String>(); Map<String, StoreDefinition> oldStoreDefinitionMap = new HashMap<String, StoreDefinition>(); Map<String, StoreDefinition> newStoreDefinitionMap = new HashMap<String, StoreDefinition>(); List<String> storesChanged = new ArrayList<String>(); for(StoreDefinition storeDef: oldStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); oldStoreDefinitionMap.put(storeName, storeDef); } for(StoreDefinition storeDef: newStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); newStoreDefinitionMap.put(storeName, storeDef); } for(String storeName: storeNamesUnion) { StoreDefinition oldStoreDef = oldStoreDefinitionMap.get(storeName); StoreDefinition newStoreDef = newStoreDefinitionMap.get(storeName); if(oldStoreDef == null && newStoreDef != null || oldStoreDef != null && newStoreDef == null || oldStoreDef != null && newStoreDef != null && !oldStoreDef.equals(newStoreDef)) { storesChanged.add(storeName); } } System.out.println("Updating metadata version for the following stores: " + storesChanged); try { adminClient.metadataMgmtOps.updateMetadataversion(adminClient.getAdminClientCluster() .getNodeIds(), storesChanged); } catch(Exception e) { System.err.println("Error while updating metadata version for the specified store."); } } }
public class class_name { public static void doMetaUpdateVersionsOnStores(AdminClient adminClient, List<StoreDefinition> oldStoreDefs, List<StoreDefinition> newStoreDefs) { Set<String> storeNamesUnion = new HashSet<String>(); Map<String, StoreDefinition> oldStoreDefinitionMap = new HashMap<String, StoreDefinition>(); Map<String, StoreDefinition> newStoreDefinitionMap = new HashMap<String, StoreDefinition>(); List<String> storesChanged = new ArrayList<String>(); for(StoreDefinition storeDef: oldStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); // depends on control dependency: [for], data = [none] oldStoreDefinitionMap.put(storeName, storeDef); // depends on control dependency: [for], data = [storeDef] } for(StoreDefinition storeDef: newStoreDefs) { String storeName = storeDef.getName(); storeNamesUnion.add(storeName); // depends on control dependency: [for], data = [none] newStoreDefinitionMap.put(storeName, storeDef); // depends on control dependency: [for], data = [storeDef] } for(String storeName: storeNamesUnion) { StoreDefinition oldStoreDef = oldStoreDefinitionMap.get(storeName); StoreDefinition newStoreDef = newStoreDefinitionMap.get(storeName); if(oldStoreDef == null && newStoreDef != null || oldStoreDef != null && newStoreDef == null || oldStoreDef != null && newStoreDef != null && !oldStoreDef.equals(newStoreDef)) { storesChanged.add(storeName); // depends on control dependency: [if], data = [none] } } System.out.println("Updating metadata version for the following stores: " + storesChanged); try { adminClient.metadataMgmtOps.updateMetadataversion(adminClient.getAdminClientCluster() .getNodeIds(), storesChanged); // depends on control dependency: [try], data = [none] } catch(Exception e) { System.err.println("Error while updating metadata version for the specified store."); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public AIRequestedTick updateRequestToValue( long tick, AIMessageItem msgItem, boolean valueDelivered, SendDispatcher sendDispatcher) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry( tc, "updateRequestToValue", new Object[] { Long.valueOf(tick), msgItem, Boolean.valueOf(valueDelivered)}); AIRequestedTick rt = null; _targetStream.setCursor(tick); TickRange tickRange = _targetStream.getNext(); if (tickRange.type == TickRange.Requested) { rt = (AIRequestedTick) tickRange.value; RemoteDispatchableKey ck = rt.getRemoteDispatchableKey(); // The tick keeps the in-memory Java reference of the message object if valueDelivered=false, // the constructor takes the reference to extract reliability and priority // Re-enter the ck as part of the V/U's state, just in case ordered // delivery applies to consumer cardinality other than one AIValueTick valueTick = new AIValueTick( tick, msgItem, valueDelivered, ck, rt.getOriginalTimeout(), rt.getIssueTime(), msgItem.getMessage().getRedeliveredCount().intValue()); TickRange valueRange = new TickRange(TickRange.Value, tick, tick); valueRange.value = valueTick; valueRange.valuestamp = tick; _targetStream.writeRange(valueRange); if (rt.getTimeout() > 0L || rt.getTimeout() == _mp.getCustomProperties().get_infinite_timeout()) { if (rt.isSlowed()) { _slowedGetTOM.removeTimeoutEntry(rt); } else { _eagerGetTOM.removeTimeoutEntry(rt); } } } else if (tickRange.type == TickRange.Accepted) { sendDispatcher.sendAccept(tick); } else if (tickRange.type == TickRange.Rejected) { AIRejectedRange rr = (AIRejectedRange)tickRange.value; sendDispatcher.sendReject(tick, tick, rr.unlockCount, rr.recovery); } else if (tickRange.type == TickRange.Completed) { sendDispatcher.sendCompleted(tick, tick); } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "updateRequestToValue", rt); return rt; } }
public class class_name { public AIRequestedTick updateRequestToValue( long tick, AIMessageItem msgItem, boolean valueDelivered, SendDispatcher sendDispatcher) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.entry( tc, "updateRequestToValue", new Object[] { Long.valueOf(tick), msgItem, Boolean.valueOf(valueDelivered)}); AIRequestedTick rt = null; _targetStream.setCursor(tick); TickRange tickRange = _targetStream.getNext(); if (tickRange.type == TickRange.Requested) { rt = (AIRequestedTick) tickRange.value; // depends on control dependency: [if], data = [none] RemoteDispatchableKey ck = rt.getRemoteDispatchableKey(); // The tick keeps the in-memory Java reference of the message object if valueDelivered=false, // the constructor takes the reference to extract reliability and priority // Re-enter the ck as part of the V/U's state, just in case ordered // delivery applies to consumer cardinality other than one AIValueTick valueTick = new AIValueTick( tick, msgItem, valueDelivered, ck, rt.getOriginalTimeout(), rt.getIssueTime(), msgItem.getMessage().getRedeliveredCount().intValue()); TickRange valueRange = new TickRange(TickRange.Value, tick, tick); valueRange.value = valueTick; // depends on control dependency: [if], data = [none] valueRange.valuestamp = tick; // depends on control dependency: [if], data = [none] _targetStream.writeRange(valueRange); // depends on control dependency: [if], data = [none] if (rt.getTimeout() > 0L || rt.getTimeout() == _mp.getCustomProperties().get_infinite_timeout()) { if (rt.isSlowed()) { _slowedGetTOM.removeTimeoutEntry(rt); // depends on control dependency: [if], data = [none] } else { _eagerGetTOM.removeTimeoutEntry(rt); // depends on control dependency: [if], data = [none] } } } else if (tickRange.type == TickRange.Accepted) { sendDispatcher.sendAccept(tick); // depends on control dependency: [if], data = [none] } else if (tickRange.type == TickRange.Rejected) { AIRejectedRange rr = (AIRejectedRange)tickRange.value; sendDispatcher.sendReject(tick, tick, rr.unlockCount, rr.recovery); // depends on control dependency: [if], data = [none] } else if (tickRange.type == TickRange.Completed) { sendDispatcher.sendCompleted(tick, tick); // depends on control dependency: [if], data = [none] } if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) SibTr.exit(tc, "updateRequestToValue", rt); return rt; } }
public class class_name { protected String getDynamicLoaderExtensionJavaScript(HttpServletRequest request) { final String sourceMethod = "getDynamicLoaderExtensionJavaScript"; //$NON-NLS-1$ boolean isTraceLogging = log.isLoggable(Level.FINER); if (isTraceLogging) { log.entering(sourceClass, sourceMethod); } StringBuffer sb = new StringBuffer(); for (String contribution : getExtensionContributions()) { sb.append(contribution).append("\r\n"); //$NON-NLS-1$ } String cacheBust = AggregatorUtil.getCacheBust(getAggregator()); if (cacheBust != null && cacheBust.length() > 0) { sb.append("if (!require.combo.cacheBust){require.combo.cacheBust = '") //$NON-NLS-1$ .append(cacheBust).append("';}\r\n"); //$NON-NLS-1$ } contributeBootLayerDeps(sb, request); if (moduleIdListHash != null) { sb.append("require.combo.reg(null, ["); //$NON-NLS-1$ for (int i = 0; i < moduleIdListHash.length; i++) { sb.append(i == 0 ? "" : ", ").append(((int)moduleIdListHash[i])&0xFF); //$NON-NLS-1$ //$NON-NLS-2$ } sb.append("]);\r\n"); //$NON-NLS-1$ } sb.append(clientRegisterSyntheticModules()); if (isTraceLogging) { log.exiting(sourceClass, sourceMethod, sb.toString()); } return sb.toString(); } }
public class class_name { protected String getDynamicLoaderExtensionJavaScript(HttpServletRequest request) { final String sourceMethod = "getDynamicLoaderExtensionJavaScript"; //$NON-NLS-1$ boolean isTraceLogging = log.isLoggable(Level.FINER); if (isTraceLogging) { log.entering(sourceClass, sourceMethod); // depends on control dependency: [if], data = [none] } StringBuffer sb = new StringBuffer(); for (String contribution : getExtensionContributions()) { sb.append(contribution).append("\r\n"); //$NON-NLS-1$ // depends on control dependency: [for], data = [contribution] } String cacheBust = AggregatorUtil.getCacheBust(getAggregator()); if (cacheBust != null && cacheBust.length() > 0) { sb.append("if (!require.combo.cacheBust){require.combo.cacheBust = '") //$NON-NLS-1$ .append(cacheBust).append("';}\r\n"); //$NON-NLS-1$ // depends on control dependency: [if], data = [none] } contributeBootLayerDeps(sb, request); if (moduleIdListHash != null) { sb.append("require.combo.reg(null, ["); //$NON-NLS-1$ for (int i = 0; i < moduleIdListHash.length; i++) { sb.append(i == 0 ? "" : ", ").append(((int)moduleIdListHash[i])&0xFF); //$NON-NLS-1$ //$NON-NLS-2$ } sb.append("]);\r\n"); //$NON-NLS-1$ } sb.append(clientRegisterSyntheticModules()); if (isTraceLogging) { log.exiting(sourceClass, sourceMethod, sb.toString()); } return sb.toString(); } }
public class class_name { public static void setClientCacheAggressively(HttpHeaders headers) { if (!headers.contains(CACHE_CONTROL)) { headers.set(CACHE_CONTROL, "public, " + MAX_AGE + "=" + SECS_IN_A_YEAR); } // CORS: // http://sockjs.github.com/sockjs-protocol/sockjs-protocol-0.3.3.html#section-7 if (!headers.contains(ACCESS_CONTROL_MAX_AGE)) { headers.set(ACCESS_CONTROL_MAX_AGE, SECS_IN_A_YEAR); } // Note that SECS_IN_A_YEAR * 1000 is different from SECS_IN_A_YEAR * 1000L // because of integer overflow! if (!headers.contains(EXPIRES)) { headers.set(EXPIRES, formatRfc2822(System.currentTimeMillis() + SECS_IN_A_YEAR * 1000L)); } } }
public class class_name { public static void setClientCacheAggressively(HttpHeaders headers) { if (!headers.contains(CACHE_CONTROL)) { headers.set(CACHE_CONTROL, "public, " + MAX_AGE + "=" + SECS_IN_A_YEAR); // depends on control dependency: [if], data = [none] } // CORS: // http://sockjs.github.com/sockjs-protocol/sockjs-protocol-0.3.3.html#section-7 if (!headers.contains(ACCESS_CONTROL_MAX_AGE)) { headers.set(ACCESS_CONTROL_MAX_AGE, SECS_IN_A_YEAR); // depends on control dependency: [if], data = [none] } // Note that SECS_IN_A_YEAR * 1000 is different from SECS_IN_A_YEAR * 1000L // because of integer overflow! if (!headers.contains(EXPIRES)) { headers.set(EXPIRES, formatRfc2822(System.currentTimeMillis() + SECS_IN_A_YEAR * 1000L)); // depends on control dependency: [if], data = [none] } } }
public class class_name { @Override public ScratchPad getScratchPad() { final Viewport viewport = getViewport(); if (null != viewport) { return viewport.getScratchPad(); } return null; } }
public class class_name { @Override public ScratchPad getScratchPad() { final Viewport viewport = getViewport(); if (null != viewport) { return viewport.getScratchPad(); // depends on control dependency: [if], data = [none] } return null; } }
public class class_name { private ILabel processNode(INode currentNode, ArrayList<ILabel> pathToRootPhrases) throws ContextPreprocessorException { if (debugLabels) { log.debug("preprocessing node: " + currentNode.getNodeData().getId() + ", label: " + currentNode.getNodeData().getName()); } // reset old preprocessing currentNode.getNodeData().setcLabFormula(""); currentNode.getNodeData().setcNodeFormula(""); while (0 < currentNode.getNodeData().getACoLCount()) { currentNode.getNodeData().removeACoL(0); } String label = currentNode.getNodeData().getName(); ILabel result = new Label(label); result.setContext(pathToRootPhrases); try { pipeline.process(result); //should contain only token indexes. including not recognized, but except closed class tokens. //something like // 1 & 2 // 1 & (3 | 4) String formula = result.getFormula(); currentNode.getNodeData().setIsPreprocessed(true); //create acols. one acol for each concept (meaningful) token //non-concept tokens should not make it up to a formula. String[] tokenIndexes = formula.split("[ ()&|~]"); Set<String> indexes = new HashSet<String>(Arrays.asList(tokenIndexes)); List<IToken> tokens = result.getTokens(); for (int i = 0; i < tokens.size(); i++) { IToken token = tokens.get(i); String tokenIdx = Integer.toString(i); if (indexes.contains(tokenIdx)) { IAtomicConceptOfLabel acol = currentNode.getNodeData().createACoL(); acol.setId(i); acol.setToken(token.getText()); acol.setLemma(token.getLemma()); for (ISense sense : token.getSenses()) { acol.addSense(sense); } currentNode.getNodeData().addACoL(acol); } } //prepend all token references with node id formula = formula.replaceAll("(\\d+)", currentNode.getNodeData().getId() + ".$1"); formula = formula.trim(); //set it to the node currentNode.getNodeData().setcLabFormula(formula); } catch (PipelineComponentException e) { if (log.isEnabledFor(Level.WARN)) { log.warn("Falling back to heuristic parser for label (" + result.getText() + "): " + e.getMessage(), e); fallbackCount++; dcp.processNode(currentNode); } } return result; } }
public class class_name { private ILabel processNode(INode currentNode, ArrayList<ILabel> pathToRootPhrases) throws ContextPreprocessorException { if (debugLabels) { log.debug("preprocessing node: " + currentNode.getNodeData().getId() + ", label: " + currentNode.getNodeData().getName()); } // reset old preprocessing currentNode.getNodeData().setcLabFormula(""); currentNode.getNodeData().setcNodeFormula(""); while (0 < currentNode.getNodeData().getACoLCount()) { currentNode.getNodeData().removeACoL(0); } String label = currentNode.getNodeData().getName(); ILabel result = new Label(label); result.setContext(pathToRootPhrases); try { pipeline.process(result); //should contain only token indexes. including not recognized, but except closed class tokens. //something like // 1 & 2 // 1 & (3 | 4) String formula = result.getFormula(); currentNode.getNodeData().setIsPreprocessed(true); //create acols. one acol for each concept (meaningful) token //non-concept tokens should not make it up to a formula. String[] tokenIndexes = formula.split("[ ()&|~]"); Set<String> indexes = new HashSet<String>(Arrays.asList(tokenIndexes)); List<IToken> tokens = result.getTokens(); for (int i = 0; i < tokens.size(); i++) { IToken token = tokens.get(i); String tokenIdx = Integer.toString(i); if (indexes.contains(tokenIdx)) { IAtomicConceptOfLabel acol = currentNode.getNodeData().createACoL(); acol.setId(i); // depends on control dependency: [if], data = [none] acol.setToken(token.getText()); // depends on control dependency: [if], data = [none] acol.setLemma(token.getLemma()); // depends on control dependency: [if], data = [none] for (ISense sense : token.getSenses()) { acol.addSense(sense); // depends on control dependency: [for], data = [sense] } currentNode.getNodeData().addACoL(acol); // depends on control dependency: [if], data = [none] } } //prepend all token references with node id formula = formula.replaceAll("(\\d+)", currentNode.getNodeData().getId() + ".$1"); formula = formula.trim(); //set it to the node currentNode.getNodeData().setcLabFormula(formula); } catch (PipelineComponentException e) { if (log.isEnabledFor(Level.WARN)) { log.warn("Falling back to heuristic parser for label (" + result.getText() + "): " + e.getMessage(), e); // depends on control dependency: [if], data = [none] fallbackCount++; // depends on control dependency: [if], data = [none] dcp.processNode(currentNode); // depends on control dependency: [if], data = [none] } } return result; } }
public class class_name { public void marshall(AudioSelectorSettings audioSelectorSettings, ProtocolMarshaller protocolMarshaller) { if (audioSelectorSettings == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(audioSelectorSettings.getAudioLanguageSelection(), AUDIOLANGUAGESELECTION_BINDING); protocolMarshaller.marshall(audioSelectorSettings.getAudioPidSelection(), AUDIOPIDSELECTION_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(AudioSelectorSettings audioSelectorSettings, ProtocolMarshaller protocolMarshaller) { if (audioSelectorSettings == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(audioSelectorSettings.getAudioLanguageSelection(), AUDIOLANGUAGESELECTION_BINDING); // depends on control dependency: [try], data = [none] protocolMarshaller.marshall(audioSelectorSettings.getAudioPidSelection(), AUDIOPIDSELECTION_BINDING); // depends on control dependency: [try], data = [none] } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public void setGetParams(TreeSet<HtmlParameter> getParams) { if (mUri == null) { return; } if (getParams.isEmpty()) { try { mUri.setQuery(""); } catch (URIException e) { log.error(e.getMessage(), e); } return; } StringBuilder sbQuery = new StringBuilder(); for (HtmlParameter parameter : getParams) { if (parameter.getType() != HtmlParameter.Type.url) { continue; } sbQuery.append(parameter.getName()); sbQuery.append('='); sbQuery.append(parameter.getValue()); sbQuery.append('&'); } if (sbQuery.length() <= 2) { try { mUri.setQuery(""); } catch (URIException e) { log.error(e.getMessage(), e); } return; } String query = sbQuery.substring(0, sbQuery.length() - 1); try { //The previous behaviour was escaping the query, //so it is maintained with the use of setQuery. mUri.setQuery(query); } catch (URIException e) { log.error(e.getMessage(), e); } } }
public class class_name { public void setGetParams(TreeSet<HtmlParameter> getParams) { if (mUri == null) { return; // depends on control dependency: [if], data = [none] } if (getParams.isEmpty()) { try { mUri.setQuery(""); // depends on control dependency: [try], data = [none] } catch (URIException e) { log.error(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return; // depends on control dependency: [if], data = [none] } StringBuilder sbQuery = new StringBuilder(); for (HtmlParameter parameter : getParams) { if (parameter.getType() != HtmlParameter.Type.url) { continue; } sbQuery.append(parameter.getName()); // depends on control dependency: [for], data = [parameter] sbQuery.append('='); // depends on control dependency: [for], data = [none] sbQuery.append(parameter.getValue()); // depends on control dependency: [for], data = [parameter] sbQuery.append('&'); // depends on control dependency: [for], data = [none] } if (sbQuery.length() <= 2) { try { mUri.setQuery(""); // depends on control dependency: [try], data = [none] } catch (URIException e) { log.error(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] return; // depends on control dependency: [if], data = [none] } String query = sbQuery.substring(0, sbQuery.length() - 1); try { //The previous behaviour was escaping the query, //so it is maintained with the use of setQuery. mUri.setQuery(query); // depends on control dependency: [try], data = [none] } catch (URIException e) { log.error(e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static final boolean isFolderType(String typeName) { try { return OpenCms.getResourceManager().getResourceType(typeName).isFolder(); } catch (CmsLoaderException e) { throw new CmsIllegalArgumentException( Messages.get().container(Messages.ERR_UNKNOWN_RESOURCE_TYPE_1, typeName), e); } } }
public class class_name { public static final boolean isFolderType(String typeName) { try { return OpenCms.getResourceManager().getResourceType(typeName).isFolder(); // depends on control dependency: [try], data = [none] } catch (CmsLoaderException e) { throw new CmsIllegalArgumentException( Messages.get().container(Messages.ERR_UNKNOWN_RESOURCE_TYPE_1, typeName), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { @Override public void mapTileRequestFailed(final MapTileRequestState pState) { if (mTileNotFoundImage!=null) { putTileIntoCache(pState.getMapTile(), mTileNotFoundImage, ExpirableBitmapDrawable.NOT_FOUND); for (final Handler handler : mTileRequestCompleteHandlers) { if (handler != null) { handler.sendEmptyMessage(MAPTILE_SUCCESS_ID); } } } else { for (final Handler handler : mTileRequestCompleteHandlers) { if (handler != null) { handler.sendEmptyMessage(MAPTILE_FAIL_ID); } } } if (Configuration.getInstance().isDebugTileProviders()) { Log.d(IMapView.LOGTAG,"MapTileProviderBase.mapTileRequestFailed(): " + MapTileIndex.toString(pState.getMapTile())); } } }
public class class_name { @Override public void mapTileRequestFailed(final MapTileRequestState pState) { if (mTileNotFoundImage!=null) { putTileIntoCache(pState.getMapTile(), mTileNotFoundImage, ExpirableBitmapDrawable.NOT_FOUND); // depends on control dependency: [if], data = [none] for (final Handler handler : mTileRequestCompleteHandlers) { if (handler != null) { handler.sendEmptyMessage(MAPTILE_SUCCESS_ID); // depends on control dependency: [if], data = [none] } } } else { for (final Handler handler : mTileRequestCompleteHandlers) { if (handler != null) { handler.sendEmptyMessage(MAPTILE_FAIL_ID); // depends on control dependency: [if], data = [none] } } } if (Configuration.getInstance().isDebugTileProviders()) { Log.d(IMapView.LOGTAG,"MapTileProviderBase.mapTileRequestFailed(): " + MapTileIndex.toString(pState.getMapTile())); // depends on control dependency: [if], data = [none] } } }
public class class_name { public String getParamTabWpTimeWarp() { String result; if (m_userSettings.getTimeWarp() == CmsContextInfo.CURRENT_TIME) { result = "-"; } else { result = CmsCalendarWidget.getCalendarLocalizedTime( getLocale(), getMessages(), m_userSettings.getTimeWarp()); } return result; } }
public class class_name { public String getParamTabWpTimeWarp() { String result; if (m_userSettings.getTimeWarp() == CmsContextInfo.CURRENT_TIME) { result = "-"; // depends on control dependency: [if], data = [none] } else { result = CmsCalendarWidget.getCalendarLocalizedTime( getLocale(), getMessages(), m_userSettings.getTimeWarp()); // depends on control dependency: [if], data = [none] } return result; } }
public class class_name { public Object parse(String expr, String id) throws PHPException { String varName = PHPInterpeter.getVar(); container.run(varName + " = " + expr); String type = container.get(PHPInterpeter.getType(varName)); if ("string".equals(type)) { return container.get(varName); } if ("boolean".equals(type)) { return new Boolean("1".equals(container.get(varName))); } if ("integer".equals(type)) { return new Integer(container.get(varName)); } if ("double".equals(type)) { return new Double(container.get(varName)); } if ("array".equals(type)) { String s = container.get(PHPInterpeter.serialize(PHPInterpeter.getArray(varName))); String [] ids = Helper.parseArray(s); Object [] array = new Object[ids.length]; for(int i = 0; i < ids.length; i ++) { array[i] = parse(PHPInterpeter.getObject(ids[i]), ids[i]); } return array; } if ("object".equals(type)) { String className = container.get(PHPInterpeter.getClass(varName)); if (id == null) { id = container.get(PHPInterpeter.saveObject(varName)); } PHPClassDescriptor desc = container.getClassDescriptor(className); return desc.createNewObject(id); } return null; } }
public class class_name { public Object parse(String expr, String id) throws PHPException { String varName = PHPInterpeter.getVar(); container.run(varName + " = " + expr); String type = container.get(PHPInterpeter.getType(varName)); if ("string".equals(type)) { return container.get(varName); } if ("boolean".equals(type)) { return new Boolean("1".equals(container.get(varName))); } if ("integer".equals(type)) { return new Integer(container.get(varName)); } if ("double".equals(type)) { return new Double(container.get(varName)); } if ("array".equals(type)) { String s = container.get(PHPInterpeter.serialize(PHPInterpeter.getArray(varName))); String [] ids = Helper.parseArray(s); Object [] array = new Object[ids.length]; for(int i = 0; i < ids.length; i ++) { array[i] = parse(PHPInterpeter.getObject(ids[i]), ids[i]); // depends on control dependency: [for], data = [i] } return array; } if ("object".equals(type)) { String className = container.get(PHPInterpeter.getClass(varName)); if (id == null) { id = container.get(PHPInterpeter.saveObject(varName)); // depends on control dependency: [if], data = [none] } PHPClassDescriptor desc = container.getClassDescriptor(className); return desc.createNewObject(id); } return null; } }
public class class_name { @Override public void triggerJob(String jobKey, JobDataMap data) throws SchedulerException { validateState(); OperableTrigger operableTrigger = simpleTriggerBuilder() .withIdentity(jobKey + "-trigger") .forJob(jobKey) .startAt(new Date()) .build(); // OperableTrigger operableTrigger = TriggerBuilder.newTriggerBuilder().withIdentity(jobKey + // "-trigger").forJob(jobKey) // // .withTriggerImplementation(SimpleScheduleBuilder.simpleScheduleBuilderBuilder().instantiate()).startAt(new Date()).build(); // TODO what does this accomplish??? Seems to sets it's next fire time internally operableTrigger.computeFirstFireTime(null); if (data != null) { operableTrigger.setJobDataMap(data); } boolean collision = true; while (collision) { try { quartzSchedulerResources.getJobStore().storeTrigger(operableTrigger, false); collision = false; } catch (ObjectAlreadyExistsException oaee) { operableTrigger.setName(newTriggerId()); } } notifySchedulerThread(operableTrigger.getNextFireTime().getTime()); notifySchedulerListenersScheduled(operableTrigger); } }
public class class_name { @Override public void triggerJob(String jobKey, JobDataMap data) throws SchedulerException { validateState(); OperableTrigger operableTrigger = simpleTriggerBuilder() .withIdentity(jobKey + "-trigger") .forJob(jobKey) .startAt(new Date()) .build(); // OperableTrigger operableTrigger = TriggerBuilder.newTriggerBuilder().withIdentity(jobKey + // "-trigger").forJob(jobKey) // // .withTriggerImplementation(SimpleScheduleBuilder.simpleScheduleBuilderBuilder().instantiate()).startAt(new Date()).build(); // TODO what does this accomplish??? Seems to sets it's next fire time internally operableTrigger.computeFirstFireTime(null); if (data != null) { operableTrigger.setJobDataMap(data); } boolean collision = true; while (collision) { try { quartzSchedulerResources.getJobStore().storeTrigger(operableTrigger, false); // depends on control dependency: [try], data = [none] collision = false; // depends on control dependency: [try], data = [none] } catch (ObjectAlreadyExistsException oaee) { operableTrigger.setName(newTriggerId()); } // depends on control dependency: [catch], data = [none] } notifySchedulerThread(operableTrigger.getNextFireTime().getTime()); notifySchedulerListenersScheduled(operableTrigger); } }
public class class_name { @Override public boolean isAnnotationDriven() { if (!determinedIfAnnotationDriven) { // lazy load this, and only calculate it once for (Field field : type.getDeclaredFields()) { if (field.isAnnotationPresent(CsvBind.class)) { annotationDriven = true; break; } } determinedIfAnnotationDriven = true; } return annotationDriven; } }
public class class_name { @Override public boolean isAnnotationDriven() { if (!determinedIfAnnotationDriven) { // lazy load this, and only calculate it once for (Field field : type.getDeclaredFields()) { if (field.isAnnotationPresent(CsvBind.class)) { annotationDriven = true; // depends on control dependency: [if], data = [none] break; } } determinedIfAnnotationDriven = true; // depends on control dependency: [if], data = [none] } return annotationDriven; } }
public class class_name { @Override public RowIterator findFirstRowNotNull(Session session, PersistentStore store) { readLock.lock(); try { NodeAVL x = getAccessor(store); while (x != null) { boolean t = colTypes[0].compare( null, x.getRow(store).getData()[colIndex[0]]) >= 0; if (t) { NodeAVL r = x.getRight(store); if (r == null) { break; } x = r; } else { NodeAVL l = x.getLeft(store); if (l == null) { break; } x = l; } } while (x != null) { Object colvalue = x.getRow(store).getData()[colIndex[0]]; if (colvalue == null) { x = next(store, x); } else { break; } } // MVCC while (session != null && x != null) { Row row = x.getRow(store); if (session.database.txManager.canRead(session, row)) { break; } x = next(store, x); } return getIterator(session, store, x); } finally { readLock.unlock(); } } }
public class class_name { @Override public RowIterator findFirstRowNotNull(Session session, PersistentStore store) { readLock.lock(); try { NodeAVL x = getAccessor(store); while (x != null) { boolean t = colTypes[0].compare( null, x.getRow(store).getData()[colIndex[0]]) >= 0; if (t) { NodeAVL r = x.getRight(store); if (r == null) { break; } x = r; // depends on control dependency: [if], data = [none] } else { NodeAVL l = x.getLeft(store); if (l == null) { break; } x = l; // depends on control dependency: [if], data = [none] } } while (x != null) { Object colvalue = x.getRow(store).getData()[colIndex[0]]; if (colvalue == null) { x = next(store, x); // depends on control dependency: [if], data = [none] } else { break; } } // MVCC while (session != null && x != null) { Row row = x.getRow(store); if (session.database.txManager.canRead(session, row)) { break; } x = next(store, x); // depends on control dependency: [while], data = [none] } return getIterator(session, store, x); // depends on control dependency: [try], data = [none] } finally { readLock.unlock(); } } }
public class class_name { private static void drawRectangles(Graphics2D g2, GrayF32 image, GrayF32 template, GrayF32 mask, int expectedMatches) { List<Match> found = findMatches(image, template, mask, expectedMatches); int r = 2; int w = template.width + 2 * r; int h = template.height + 2 * r; for (Match m : found) { System.out.println("Match "+m.x+" "+m.y+" score "+m.score); // this demonstrates how to filter out false positives // the meaning of score will depend on the template technique // if( m.score < -1000 ) // This line is commented out for demonstration purposes // continue; // the return point is the template's top left corner int x0 = m.x - r; int y0 = m.y - r; int x1 = x0 + w; int y1 = y0 + h; g2.drawLine(x0, y0, x1, y0); g2.drawLine(x1, y0, x1, y1); g2.drawLine(x1, y1, x0, y1); g2.drawLine(x0, y1, x0, y0); } } }
public class class_name { private static void drawRectangles(Graphics2D g2, GrayF32 image, GrayF32 template, GrayF32 mask, int expectedMatches) { List<Match> found = findMatches(image, template, mask, expectedMatches); int r = 2; int w = template.width + 2 * r; int h = template.height + 2 * r; for (Match m : found) { System.out.println("Match "+m.x+" "+m.y+" score "+m.score); // depends on control dependency: [for], data = [m] // this demonstrates how to filter out false positives // the meaning of score will depend on the template technique // if( m.score < -1000 ) // This line is commented out for demonstration purposes // continue; // the return point is the template's top left corner int x0 = m.x - r; int y0 = m.y - r; int x1 = x0 + w; int y1 = y0 + h; g2.drawLine(x0, y0, x1, y0); // depends on control dependency: [for], data = [none] g2.drawLine(x1, y0, x1, y1); // depends on control dependency: [for], data = [none] g2.drawLine(x1, y1, x0, y1); // depends on control dependency: [for], data = [none] g2.drawLine(x0, y1, x0, y0); // depends on control dependency: [for], data = [none] } } }
public class class_name { @Override public void writeFile(List lblSeqs, String filename) { String ret = writeString(lblSeqs); try{ BufferedWriter out = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), "UTF-8")); out.write(ret); out.close(); } catch (Exception e){ } } }
public class class_name { @Override public void writeFile(List lblSeqs, String filename) { String ret = writeString(lblSeqs); try{ BufferedWriter out = new BufferedWriter(new OutputStreamWriter( new FileOutputStream(filename), "UTF-8")); out.write(ret); // depends on control dependency: [try], data = [none] out.close(); // depends on control dependency: [try], data = [none] } catch (Exception e){ } // depends on control dependency: [catch], data = [none] } }
public class class_name { static java.util.TimeZone findZone(String id) { if (id.equals("Z")) { return java.util.TimeZone.getTimeZone("GMT+00:00"); } else if (id.startsWith("UTC")) { return java.util.TimeZone.getTimeZone("GMT" + id.substring(3)); } else if (id.startsWith("UT")) { return java.util.TimeZone.getTimeZone("GMT" + id.substring(2)); } else { return java.util.TimeZone.getTimeZone(id); } } }
public class class_name { static java.util.TimeZone findZone(String id) { if (id.equals("Z")) { return java.util.TimeZone.getTimeZone("GMT+00:00"); // depends on control dependency: [if], data = [none] } else if (id.startsWith("UTC")) { return java.util.TimeZone.getTimeZone("GMT" + id.substring(3)); // depends on control dependency: [if], data = [none] } else if (id.startsWith("UT")) { return java.util.TimeZone.getTimeZone("GMT" + id.substring(2)); // depends on control dependency: [if], data = [none] } else { return java.util.TimeZone.getTimeZone(id); // depends on control dependency: [if], data = [none] } } }
public class class_name { public @Nullable CheckResponse check(CheckRequest req) { if (cache == null) { return null; } Preconditions.checkArgument(req.getServiceName().equals(serviceName), "service name mismatch"); Preconditions.checkNotNull(req.getOperation(), "expected check operation was not present"); if (req.getOperation().getImportance() != Importance.LOW) { return null; // send the request now if importance is not LOW } String signature = sign(req).toString(); CachedItem item = cache.getIfPresent(signature); if (item == null) { return null; // signal caller to send the response } else { return handleCachedResponse(req, item); } } }
public class class_name { public @Nullable CheckResponse check(CheckRequest req) { if (cache == null) { return null; // depends on control dependency: [if], data = [none] } Preconditions.checkArgument(req.getServiceName().equals(serviceName), "service name mismatch"); Preconditions.checkNotNull(req.getOperation(), "expected check operation was not present"); if (req.getOperation().getImportance() != Importance.LOW) { return null; // send the request now if importance is not LOW // depends on control dependency: [if], data = [none] } String signature = sign(req).toString(); CachedItem item = cache.getIfPresent(signature); if (item == null) { return null; // signal caller to send the response // depends on control dependency: [if], data = [none] } else { return handleCachedResponse(req, item); // depends on control dependency: [if], data = [none] } } }
public class class_name { public SipServletResponse createResponseToOriginalRequest( SipSession session, int status, String reasonPhrase) { if (session == null) { throw new NullPointerException("Null arg"); } final MobicentsSipSession sipSession = (MobicentsSipSession) session; if (!sipSession.isValidInternal()) { throw new IllegalArgumentException("sip session " + sipSession.getId() + " is invalid !"); } final MobicentsSipServletMessage sipServletMessageImpl = getOriginalRequest(sipSession); if (!(sipServletMessageImpl instanceof SipServletRequestImpl)) { throw new IllegalStateException("session creating transaction message is not a request !"); } final SipServletRequestImpl sipServletRequestImpl = (SipServletRequestImpl) sipServletMessageImpl; if (RoutingState.FINAL_RESPONSE_SENT.equals(sipServletRequestImpl.getRoutingState())) { // checked by TCK test com.bea.sipservlet.tck.agents.api.javax_servlet_sip.B2buaHelperTest.testCreateResponseToOriginalRequest101 throw new IllegalStateException("subsequent response is inconsistent with an already sent response. a Final response has already been sent for this request " + sipServletRequestImpl); } if (logger.isDebugEnabled()) { logger.debug("creating response to original request " + sipServletRequestImpl + " on session " + session); } //set dialog from session in case is forked/derived //do this before creating response so proper toTag is selected SIPTransaction tx = (SIPTransaction) sipServletRequestImpl.getTransaction(); SIPDialog newDialog = (SIPDialog)sipSession.getSessionCreatingDialog(); if (newDialog != null && tx != null) { logger.debug("recovering dialog on transaction before send:" + System.identityHashCode(newDialog)); tx.setDialog(newDialog, newDialog.getDialogId()); } SipServletResponseImpl response = (SipServletResponseImpl) sipServletRequestImpl.createResponse(status, reasonPhrase); return response; } }
public class class_name { public SipServletResponse createResponseToOriginalRequest( SipSession session, int status, String reasonPhrase) { if (session == null) { throw new NullPointerException("Null arg"); } final MobicentsSipSession sipSession = (MobicentsSipSession) session; if (!sipSession.isValidInternal()) { throw new IllegalArgumentException("sip session " + sipSession.getId() + " is invalid !"); } final MobicentsSipServletMessage sipServletMessageImpl = getOriginalRequest(sipSession); if (!(sipServletMessageImpl instanceof SipServletRequestImpl)) { throw new IllegalStateException("session creating transaction message is not a request !"); } final SipServletRequestImpl sipServletRequestImpl = (SipServletRequestImpl) sipServletMessageImpl; if (RoutingState.FINAL_RESPONSE_SENT.equals(sipServletRequestImpl.getRoutingState())) { // checked by TCK test com.bea.sipservlet.tck.agents.api.javax_servlet_sip.B2buaHelperTest.testCreateResponseToOriginalRequest101 throw new IllegalStateException("subsequent response is inconsistent with an already sent response. a Final response has already been sent for this request " + sipServletRequestImpl); } if (logger.isDebugEnabled()) { logger.debug("creating response to original request " + sipServletRequestImpl + " on session " + session); // depends on control dependency: [if], data = [none] } //set dialog from session in case is forked/derived //do this before creating response so proper toTag is selected SIPTransaction tx = (SIPTransaction) sipServletRequestImpl.getTransaction(); SIPDialog newDialog = (SIPDialog)sipSession.getSessionCreatingDialog(); if (newDialog != null && tx != null) { logger.debug("recovering dialog on transaction before send:" + System.identityHashCode(newDialog)); // depends on control dependency: [if], data = [(newDialog] tx.setDialog(newDialog, newDialog.getDialogId()); // depends on control dependency: [if], data = [(newDialog] } SipServletResponseImpl response = (SipServletResponseImpl) sipServletRequestImpl.createResponse(status, reasonPhrase); return response; } }
public class class_name { public boolean add(SpdData data) { if (data == null) return false; if (data instanceof SpdLong) { return super.add(data); } else { return false; } } }
public class class_name { public boolean add(SpdData data) { if (data == null) return false; if (data instanceof SpdLong) { return super.add(data); // depends on control dependency: [if], data = [none] } else { return false; // depends on control dependency: [if], data = [none] } } }
public class class_name { public void marshall(BatchAddFacetToObjectResponse batchAddFacetToObjectResponse, ProtocolMarshaller protocolMarshaller) { if (batchAddFacetToObjectResponse == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
public class class_name { public void marshall(BatchAddFacetToObjectResponse batchAddFacetToObjectResponse, ProtocolMarshaller protocolMarshaller) { if (batchAddFacetToObjectResponse == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } // depends on control dependency: [catch], data = [none] } }
public class class_name { public static <T> Optional<T> get(final Map map, final Class<T> clazz, final Object... path) { if (path == null || path.length == 0) { throw new IllegalArgumentException(PATH_MUST_BE_SPECIFIED); } if (path.length == 1) { return Optional.ofNullable((T) map.get(path[0])); } final Object[] pathToLastNode = Arrays.copyOfRange(path, 0, path.length - 1); final Object lastKey = path[path.length - 1]; Map<Object, Object> intermediateMap = map; for (Object key : pathToLastNode) { final Object node = intermediateMap.get(key); if (node != null) { final Optional<Map> nodeMapOption = ClassApi.cast(node, Map.class); if (nodeMapOption.isPresent()) { intermediateMap = nodeMapOption.get(); } else { throw new IllegalAccessError("Node with key '" + key + "' is not a map!"); } } else { return Optional.empty(); } } return Optional.ofNullable((T) intermediateMap.get(lastKey)); } }
public class class_name { public static <T> Optional<T> get(final Map map, final Class<T> clazz, final Object... path) { if (path == null || path.length == 0) { throw new IllegalArgumentException(PATH_MUST_BE_SPECIFIED); } if (path.length == 1) { return Optional.ofNullable((T) map.get(path[0])); // depends on control dependency: [if], data = [none] } final Object[] pathToLastNode = Arrays.copyOfRange(path, 0, path.length - 1); final Object lastKey = path[path.length - 1]; Map<Object, Object> intermediateMap = map; for (Object key : pathToLastNode) { final Object node = intermediateMap.get(key); if (node != null) { final Optional<Map> nodeMapOption = ClassApi.cast(node, Map.class); if (nodeMapOption.isPresent()) { intermediateMap = nodeMapOption.get(); // depends on control dependency: [if], data = [none] } else { throw new IllegalAccessError("Node with key '" + key + "' is not a map!"); } } else { return Optional.empty(); // depends on control dependency: [if], data = [none] } } return Optional.ofNullable((T) intermediateMap.get(lastKey)); } }
public class class_name { public static Collection<?> asCollection(Object obj) { if (obj == null) { return Collections.EMPTY_SET; // 返回空集合 } else if (obj.getClass().isArray()) { return Arrays.asList(asArray(obj)); } else if (obj instanceof Collection<?>) { return (Collection<?>) obj; // List, Set, Collection 直接返回 } else if (obj instanceof Map<?, ?>) { return ((Map<?, ?>) obj).entrySet(); // 映射表, 返回条目的集合 } else { return Arrays.asList(obj); // 其他类型, 返回包含单个对象的集合 } } }
public class class_name { public static Collection<?> asCollection(Object obj) { if (obj == null) { return Collections.EMPTY_SET; // 返回空集合 // depends on control dependency: [if], data = [none] } else if (obj.getClass().isArray()) { return Arrays.asList(asArray(obj)); // depends on control dependency: [if], data = [none] } else if (obj instanceof Collection<?>) { return (Collection<?>) obj; // List, Set, Collection 直接返回 // depends on control dependency: [if], data = [)] } else if (obj instanceof Map<?, ?>) { return ((Map<?, ?>) obj).entrySet(); // 映射表, 返回条目的集合 // depends on control dependency: [if], data = [)] } else { return Arrays.asList(obj); // 其他类型, 返回包含单个对象的集合 // depends on control dependency: [if], data = [)] } } }
public class class_name { public static String[] getAvailablePortNames() { SerialPort[] ports = SerialPort.getCommPorts(); String[] portNames = new String[ports.length]; for( int i = 0; i < portNames.length; i++ ) { String systemPortName = ports[i].getSystemPortName(); portNames[i] = systemPortName; } return portNames; } }
public class class_name { public static String[] getAvailablePortNames() { SerialPort[] ports = SerialPort.getCommPorts(); String[] portNames = new String[ports.length]; for( int i = 0; i < portNames.length; i++ ) { String systemPortName = ports[i].getSystemPortName(); portNames[i] = systemPortName; // depends on control dependency: [for], data = [i] } return portNames; } }
public class class_name { public List<JAXBElement<? extends AbstractSurfacePatchType>> get_SurfacePatch() { if (_SurfacePatch == null) { _SurfacePatch = new ArrayList<JAXBElement<? extends AbstractSurfacePatchType>>(); } return this._SurfacePatch; } }
public class class_name { public List<JAXBElement<? extends AbstractSurfacePatchType>> get_SurfacePatch() { if (_SurfacePatch == null) { _SurfacePatch = new ArrayList<JAXBElement<? extends AbstractSurfacePatchType>>(); // depends on control dependency: [if], data = [none] } return this._SurfacePatch; } }
public class class_name { public void fatal( final Object message ) { if( m_delegate.isFatalEnabled() && message != null ) { m_delegate.fatal( message.toString(), null ); } } }
public class class_name { public void fatal( final Object message ) { if( m_delegate.isFatalEnabled() && message != null ) { m_delegate.fatal( message.toString(), null ); // depends on control dependency: [if], data = [null )] } } }
public class class_name { @Override public void supplierChanged(SupplierEvent supplierEvent) { SupplierEvent.Type type = supplierEvent.type(); @SuppressWarnings("unchecked") Supplier<T> supplier = (Supplier<T>) supplierEvent.supplier(); switch (type) { case ADD: if (supplierReference.compareAndSet(null, supplier)) { supplierFutureRef.get().complete(supplier); } break; case REMOVE: if (supplierReference.compareAndSet(supplier, null)) { supplierFutureRef.set(new CompletableFuture<>()); } break; default: throw new IllegalStateException("Unknown supplier event: " + supplierEvent); } } }
public class class_name { @Override public void supplierChanged(SupplierEvent supplierEvent) { SupplierEvent.Type type = supplierEvent.type(); @SuppressWarnings("unchecked") Supplier<T> supplier = (Supplier<T>) supplierEvent.supplier(); switch (type) { case ADD: if (supplierReference.compareAndSet(null, supplier)) { supplierFutureRef.get().complete(supplier); // depends on control dependency: [if], data = [none] } break; case REMOVE: if (supplierReference.compareAndSet(supplier, null)) { supplierFutureRef.set(new CompletableFuture<>()); // depends on control dependency: [if], data = [none] } break; default: throw new IllegalStateException("Unknown supplier event: " + supplierEvent); } } }
public class class_name { public static <E, C extends Counter<E>> double L1Norm(C c) { double sumAbs = 0.0; for (E key : c.keySet()) { double count = c.getCount(key); if (count != 0.0) { sumAbs += Math.abs(count); } } return sumAbs; } }
public class class_name { public static <E, C extends Counter<E>> double L1Norm(C c) { double sumAbs = 0.0; for (E key : c.keySet()) { double count = c.getCount(key); if (count != 0.0) { sumAbs += Math.abs(count); // depends on control dependency: [if], data = [(count] } } return sumAbs; } }
public class class_name { private void readOpPackBits(final ImageInputStream pStream, final boolean hasRegion, final int pPixmapCount) throws IOException { // Get rowBytes int rowBytesRaw = pStream.readUnsignedShort(); int rowBytes = rowBytesRaw & 0x7FFF; boolean isPixMap = (rowBytesRaw & 0x8000) > 0; if (DEBUG) { System.out.print(hasRegion ? "packBitsRgn" : "packBitsRect"); System.out.print(", rowBytes: " + rowBytes); if (isPixMap) { System.out.print(", it is a PixMap"); } else { System.out.print(", it is a BitMap"); } } // Get bounds rectangle. THIS IS NOT TO BE SCALED BY THE RESOLUTION! Rectangle bounds = new Rectangle(); int y = pStream.readUnsignedShort(); int x = pStream.readUnsignedShort(); bounds.setLocation(x, y); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); bounds.setSize(x - bounds.x, y - bounds.y); if (DEBUG) { System.out.print(", bounds: " + bounds); } ColorModel colorModel; int cmpSize; if (isPixMap) { // Get PixMap record version number int pmVersion = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", pmVersion: " + pmVersion); } // Get packing format int packType = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", packType: " + packType); } // Get size of packed data (not used for v2) int packSize = pStream.readInt(); if (DEBUG) { System.out.println(", packSize: " + packSize); } // Get resolution info double hRes = PICTUtil.readFixedPoint(pStream); double vRes = PICTUtil.readFixedPoint(pStream); if (DEBUG) { System.out.print("hRes: " + hRes + ", vRes: " + vRes); } // Get pixel type int pixelType = pStream.readUnsignedShort(); if (DEBUG) { if (pixelType == 0) { System.out.print(", indexed pixels"); } else { System.out.print(", RGBDirect"); } } // Get pixel size int pixelSize = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", pixelSize:" + pixelSize); } // Get pixel component count int cmpCount = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", cmpCount:" + cmpCount); } // Get pixel component size cmpSize = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", cmpSize:" + cmpSize); } // planeBytes (ignored) int planeBytes = pStream.readInt(); if (DEBUG) { System.out.print(", planeBytes:" + planeBytes); } // Handle to ColorTable record int clutId = pStream.readInt(); if (DEBUG) { System.out.print(", clutId:" + clutId); } // Reserved pStream.readInt(); // TODO: Seems to be packType 0 all the time? // packType = 0 means default.... if (packType != 0) { throw new IIOException("Unknown pack type: " + packType); } if (pixelType != 0) { throw new IIOException("Unsupported pixel type: " + pixelType); } // Color table colorModel = PICTUtil.readColorTable(pStream, pixelSize); } else { // Old style BitMap record cmpSize = 1; colorModel = QuickDraw.MONOCHROME; } // Get source rectangle. We DO NOT scale the coordinates by the // resolution info, since we are in pixmap coordinates here // TODO: readReactangleNonScaled() Rectangle srcRect = new Rectangle(); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); srcRect.setLocation(x, y); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); srcRect.setSize(x - srcRect.x, y - srcRect.y); if (DEBUG) { System.out.print(", srcRect:" + srcRect); } // TODO: FixMe... // Get destination rectangle. We DO scale the coordinates according to // the image resolution, since we are working in display coordinates Rectangle dstRect = new Rectangle(); readRectangle(pStream, dstRect); if (DEBUG) { System.out.print(", dstRect:" + dstRect); } // Get transfer mode int transferMode = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", mode: " + transferMode); } Rectangle regionBounds = new Rectangle(); Area region = hasRegion ? readRegion(pStream, regionBounds) : null; if (DEBUG) { if (hasRegion) { verboseRegionCmd(", region", regionBounds, region); } else { System.out.println(); } } // Set up pixel buffer for the RGB values byte[] pixArray = new byte[srcRect.height * rowBytes]; int pixBufOffset = 0; // Read in the RGB arrays for (int scanline = 0; scanline < srcRect.height; scanline++) { // Read in the scanline if (rowBytes >= 8) { // Get byteCount of the scanline int packedBytesCount = rowBytes > 250 ? pStream.readUnsignedShort() : pStream.readUnsignedByte(); // Unpack them all Decoder decoder = new PackBitsDecoder(); DataInput unPackBits = new DataInputStream(new DecoderStream(IIOUtil.createStreamAdapter(pStream, packedBytesCount), decoder)); unPackBits.readFully(pixArray, pixBufOffset, rowBytes); } else { // Uncompressed imageInput.readFully(pixArray, pixBufOffset, rowBytes); } // Increment pixel buffer offset pixBufOffset += rowBytes; //////////////////////////////////////////////////// // TODO: This works for single image PICTs only... // However, this is the most common case. Ok for now processImageProgress(scanline * 100 / srcRect.height); if (abortRequested()) { processReadAborted(); // Skip rest of image data for (int skip = scanline + 1; skip < srcRect.height; skip++) { // Get byteCount of the scanline int packedBytesCount; if (rowBytes < 8) { packedBytesCount = rowBytes; } else if (rowBytes > 250) { packedBytesCount = pStream.readUnsignedShort(); } else { packedBytesCount = pStream.readUnsignedByte(); } pStream.readFully(new byte[packedBytesCount], 0, packedBytesCount); if (DEBUG) { System.out.print("Skip " + skip + ", byteCount: " + packedBytesCount); } } break; } //////////////////////////////////////////////////// } // We add all new images to it. If we are just replaying, then // "pPixmapCount" will never be greater than the size of the vector if (images.size() <= pPixmapCount) { // Create BufferedImage and add buffer it for multiple reads DataBuffer db = new DataBufferByte(pixArray, pixArray.length); WritableRaster raster = Raster.createPackedRaster(db, (rowBytes * 8) / cmpSize, srcRect.height, cmpSize, null); BufferedImage img = new BufferedImage(colorModel, raster, colorModel.isAlphaPremultiplied(), null); images.add(img); } // Draw the image BufferedImage img = images.get(pPixmapCount); if (img != null) { srcRect.setLocation(0, 0); // Raster always start at 0,0 context.copyBits(img, srcRect, dstRect, transferMode, region); } // Line break at the end if (DEBUG) { System.out.println(); } } }
public class class_name { private void readOpPackBits(final ImageInputStream pStream, final boolean hasRegion, final int pPixmapCount) throws IOException { // Get rowBytes int rowBytesRaw = pStream.readUnsignedShort(); int rowBytes = rowBytesRaw & 0x7FFF; boolean isPixMap = (rowBytesRaw & 0x8000) > 0; if (DEBUG) { System.out.print(hasRegion ? "packBitsRgn" : "packBitsRect"); System.out.print(", rowBytes: " + rowBytes); if (isPixMap) { System.out.print(", it is a PixMap"); // depends on control dependency: [if], data = [none] } else { System.out.print(", it is a BitMap"); // depends on control dependency: [if], data = [none] } } // Get bounds rectangle. THIS IS NOT TO BE SCALED BY THE RESOLUTION! Rectangle bounds = new Rectangle(); int y = pStream.readUnsignedShort(); int x = pStream.readUnsignedShort(); bounds.setLocation(x, y); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); bounds.setSize(x - bounds.x, y - bounds.y); if (DEBUG) { System.out.print(", bounds: " + bounds); } ColorModel colorModel; int cmpSize; if (isPixMap) { // Get PixMap record version number int pmVersion = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", pmVersion: " + pmVersion); // depends on control dependency: [if], data = [none] } // Get packing format int packType = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", packType: " + packType); // depends on control dependency: [if], data = [none] } // Get size of packed data (not used for v2) int packSize = pStream.readInt(); if (DEBUG) { System.out.println(", packSize: " + packSize); // depends on control dependency: [if], data = [none] } // Get resolution info double hRes = PICTUtil.readFixedPoint(pStream); double vRes = PICTUtil.readFixedPoint(pStream); if (DEBUG) { System.out.print("hRes: " + hRes + ", vRes: " + vRes); // depends on control dependency: [if], data = [none] } // Get pixel type int pixelType = pStream.readUnsignedShort(); if (DEBUG) { if (pixelType == 0) { System.out.print(", indexed pixels"); // depends on control dependency: [if], data = [none] } else { System.out.print(", RGBDirect"); // depends on control dependency: [if], data = [none] } } // Get pixel size int pixelSize = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", pixelSize:" + pixelSize); // depends on control dependency: [if], data = [none] } // Get pixel component count int cmpCount = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", cmpCount:" + cmpCount); // depends on control dependency: [if], data = [none] } // Get pixel component size cmpSize = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", cmpSize:" + cmpSize); // depends on control dependency: [if], data = [none] } // planeBytes (ignored) int planeBytes = pStream.readInt(); if (DEBUG) { System.out.print(", planeBytes:" + planeBytes); // depends on control dependency: [if], data = [none] } // Handle to ColorTable record int clutId = pStream.readInt(); if (DEBUG) { System.out.print(", clutId:" + clutId); // depends on control dependency: [if], data = [none] } // Reserved pStream.readInt(); // TODO: Seems to be packType 0 all the time? // packType = 0 means default.... if (packType != 0) { throw new IIOException("Unknown pack type: " + packType); } if (pixelType != 0) { throw new IIOException("Unsupported pixel type: " + pixelType); } // Color table colorModel = PICTUtil.readColorTable(pStream, pixelSize); } else { // Old style BitMap record cmpSize = 1; colorModel = QuickDraw.MONOCHROME; } // Get source rectangle. We DO NOT scale the coordinates by the // resolution info, since we are in pixmap coordinates here // TODO: readReactangleNonScaled() Rectangle srcRect = new Rectangle(); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); srcRect.setLocation(x, y); y = pStream.readUnsignedShort(); x = pStream.readUnsignedShort(); srcRect.setSize(x - srcRect.x, y - srcRect.y); if (DEBUG) { System.out.print(", srcRect:" + srcRect); } // TODO: FixMe... // Get destination rectangle. We DO scale the coordinates according to // the image resolution, since we are working in display coordinates Rectangle dstRect = new Rectangle(); readRectangle(pStream, dstRect); if (DEBUG) { System.out.print(", dstRect:" + dstRect); } // Get transfer mode int transferMode = pStream.readUnsignedShort(); if (DEBUG) { System.out.print(", mode: " + transferMode); } Rectangle regionBounds = new Rectangle(); Area region = hasRegion ? readRegion(pStream, regionBounds) : null; if (DEBUG) { if (hasRegion) { verboseRegionCmd(", region", regionBounds, region); // depends on control dependency: [if], data = [none] } else { System.out.println(); // depends on control dependency: [if], data = [none] } } // Set up pixel buffer for the RGB values byte[] pixArray = new byte[srcRect.height * rowBytes]; int pixBufOffset = 0; // Read in the RGB arrays for (int scanline = 0; scanline < srcRect.height; scanline++) { // Read in the scanline if (rowBytes >= 8) { // Get byteCount of the scanline int packedBytesCount = rowBytes > 250 ? pStream.readUnsignedShort() : pStream.readUnsignedByte(); // Unpack them all Decoder decoder = new PackBitsDecoder(); DataInput unPackBits = new DataInputStream(new DecoderStream(IIOUtil.createStreamAdapter(pStream, packedBytesCount), decoder)); unPackBits.readFully(pixArray, pixBufOffset, rowBytes); } else { // Uncompressed imageInput.readFully(pixArray, pixBufOffset, rowBytes); } // Increment pixel buffer offset pixBufOffset += rowBytes; //////////////////////////////////////////////////// // TODO: This works for single image PICTs only... // However, this is the most common case. Ok for now processImageProgress(scanline * 100 / srcRect.height); if (abortRequested()) { processReadAborted(); // Skip rest of image data for (int skip = scanline + 1; skip < srcRect.height; skip++) { // Get byteCount of the scanline int packedBytesCount; if (rowBytes < 8) { packedBytesCount = rowBytes; } else if (rowBytes > 250) { packedBytesCount = pStream.readUnsignedShort(); } else { packedBytesCount = pStream.readUnsignedByte(); } pStream.readFully(new byte[packedBytesCount], 0, packedBytesCount); if (DEBUG) { System.out.print("Skip " + skip + ", byteCount: " + packedBytesCount); } } break; } //////////////////////////////////////////////////// } // We add all new images to it. If we are just replaying, then // "pPixmapCount" will never be greater than the size of the vector if (images.size() <= pPixmapCount) { // Create BufferedImage and add buffer it for multiple reads DataBuffer db = new DataBufferByte(pixArray, pixArray.length); WritableRaster raster = Raster.createPackedRaster(db, (rowBytes * 8) / cmpSize, srcRect.height, cmpSize, null); BufferedImage img = new BufferedImage(colorModel, raster, colorModel.isAlphaPremultiplied(), null); images.add(img); } // Draw the image BufferedImage img = images.get(pPixmapCount); if (img != null) { srcRect.setLocation(0, 0); // Raster always start at 0,0 context.copyBits(img, srcRect, dstRect, transferMode, region); } // Line break at the end if (DEBUG) { System.out.println(); } } }
public class class_name { public static HttpResponse readFrom(final InputStream in) { InputStreamReader inputStreamReader; try { inputStreamReader = new InputStreamReader(in, StringPool.ISO_8859_1); } catch (UnsupportedEncodingException unee) { throw new HttpException(unee); } BufferedReader reader = new BufferedReader(inputStreamReader); HttpResponse httpResponse = new HttpResponse(); // the first line String line; try { line = reader.readLine(); } catch (IOException ioex) { throw new HttpException(ioex); } if (line != null) { line = line.trim(); int ndx = line.indexOf(' '); int ndx2; if (ndx > -1) { httpResponse.httpVersion(line.substring(0, ndx)); ndx2 = line.indexOf(' ', ndx + 1); } else { httpResponse.httpVersion(HTTP_1_1); ndx2 = -1; ndx = 0; } if (ndx2 == -1) { ndx2 = line.length(); } try { httpResponse.statusCode(Integer.parseInt(line.substring(ndx, ndx2).trim())); } catch (NumberFormatException nfex) { httpResponse.statusCode(-1); } httpResponse.statusPhrase(line.substring(ndx2).trim()); } httpResponse.readHeaders(reader); httpResponse.readBody(reader); return httpResponse; } }
public class class_name { public static HttpResponse readFrom(final InputStream in) { InputStreamReader inputStreamReader; try { inputStreamReader = new InputStreamReader(in, StringPool.ISO_8859_1); // depends on control dependency: [try], data = [none] } catch (UnsupportedEncodingException unee) { throw new HttpException(unee); } // depends on control dependency: [catch], data = [none] BufferedReader reader = new BufferedReader(inputStreamReader); HttpResponse httpResponse = new HttpResponse(); // the first line String line; try { line = reader.readLine(); // depends on control dependency: [try], data = [none] } catch (IOException ioex) { throw new HttpException(ioex); } // depends on control dependency: [catch], data = [none] if (line != null) { line = line.trim(); // depends on control dependency: [if], data = [none] int ndx = line.indexOf(' '); int ndx2; if (ndx > -1) { httpResponse.httpVersion(line.substring(0, ndx)); // depends on control dependency: [if], data = [none] ndx2 = line.indexOf(' ', ndx + 1); // depends on control dependency: [if], data = [none] } else { httpResponse.httpVersion(HTTP_1_1); // depends on control dependency: [if], data = [none] ndx2 = -1; // depends on control dependency: [if], data = [none] ndx = 0; // depends on control dependency: [if], data = [none] } if (ndx2 == -1) { ndx2 = line.length(); // depends on control dependency: [if], data = [none] } try { httpResponse.statusCode(Integer.parseInt(line.substring(ndx, ndx2).trim())); // depends on control dependency: [try], data = [none] } catch (NumberFormatException nfex) { httpResponse.statusCode(-1); } // depends on control dependency: [catch], data = [none] httpResponse.statusPhrase(line.substring(ndx2).trim()); // depends on control dependency: [if], data = [(line] } httpResponse.readHeaders(reader); httpResponse.readBody(reader); return httpResponse; } }
public class class_name { public static int findEdge(Poly node, Poly neighbour, MeshData tile, MeshData neighbourTile) { // Compare indices first assuming there are no duplicate vertices for (int i = 0; i < node.vertCount; i++) { int j = (i + 1) % node.vertCount; for (int k = 0; k < neighbour.vertCount; k++) { int l = (k + 1) % neighbour.vertCount; if ((node.verts[i] == neighbour.verts[l] && node.verts[j] == neighbour.verts[k]) || (node.verts[i] == neighbour.verts[k] && node.verts[j] == neighbour.verts[l])) { return i; } } } // Fall back to comparing actual positions in case of duplicate vertices for (int i = 0; i < node.vertCount; i++) { int j = (i + 1) % node.vertCount; for (int k = 0; k < neighbour.vertCount; k++) { int l = (k + 1) % neighbour.vertCount; if ((samePosition(tile.verts, node.verts[i], neighbourTile.verts, neighbour.verts[l]) && samePosition(tile.verts, node.verts[j], neighbourTile.verts, neighbour.verts[k])) || (samePosition(tile.verts, node.verts[i], neighbourTile.verts, neighbour.verts[k]) && samePosition(tile.verts, node.verts[j], neighbourTile.verts, neighbour.verts[l]))) { return i; } } } return -1; } }
public class class_name { public static int findEdge(Poly node, Poly neighbour, MeshData tile, MeshData neighbourTile) { // Compare indices first assuming there are no duplicate vertices for (int i = 0; i < node.vertCount; i++) { int j = (i + 1) % node.vertCount; for (int k = 0; k < neighbour.vertCount; k++) { int l = (k + 1) % neighbour.vertCount; if ((node.verts[i] == neighbour.verts[l] && node.verts[j] == neighbour.verts[k]) || (node.verts[i] == neighbour.verts[k] && node.verts[j] == neighbour.verts[l])) { return i; // depends on control dependency: [if], data = [none] } } } // Fall back to comparing actual positions in case of duplicate vertices for (int i = 0; i < node.vertCount; i++) { int j = (i + 1) % node.vertCount; for (int k = 0; k < neighbour.vertCount; k++) { int l = (k + 1) % neighbour.vertCount; if ((samePosition(tile.verts, node.verts[i], neighbourTile.verts, neighbour.verts[l]) && samePosition(tile.verts, node.verts[j], neighbourTile.verts, neighbour.verts[k])) || (samePosition(tile.verts, node.verts[i], neighbourTile.verts, neighbour.verts[k]) && samePosition(tile.verts, node.verts[j], neighbourTile.verts, neighbour.verts[l]))) { return i; // depends on control dependency: [if], data = [none] } } } return -1; } }
public class class_name { private float getAlpha() { float alpha = 1.0f; if (this.composite instanceof AlphaComposite) { AlphaComposite ac = (AlphaComposite) this.composite; alpha = ac.getAlpha(); } return alpha; } }
public class class_name { private float getAlpha() { float alpha = 1.0f; if (this.composite instanceof AlphaComposite) { AlphaComposite ac = (AlphaComposite) this.composite; alpha = ac.getAlpha(); // depends on control dependency: [if], data = [none] } return alpha; } }
public class class_name { public Item getItem(String pathName, ItemGroup context) { if (context==null) context = this; if (pathName==null) return null; if (pathName.startsWith("/")) // absolute return getItemByFullName(pathName); Object/*Item|ItemGroup*/ ctx = context; StringTokenizer tokens = new StringTokenizer(pathName,"/"); while (tokens.hasMoreTokens()) { String s = tokens.nextToken(); if (s.equals("..")) { if (ctx instanceof Item) { ctx = ((Item)ctx).getParent(); continue; } ctx=null; // can't go up further break; } if (s.equals(".")) { continue; } if (ctx instanceof ItemGroup) { ItemGroup g = (ItemGroup) ctx; Item i = g.getItem(s); if (i==null || !i.hasPermission(Item.READ)) { // TODO consider DISCOVER ctx=null; // can't go up further break; } ctx=i; } else { return null; } } if (ctx instanceof Item) return (Item)ctx; // fall back to the classic interpretation return getItemByFullName(pathName); } }
public class class_name { public Item getItem(String pathName, ItemGroup context) { if (context==null) context = this; if (pathName==null) return null; if (pathName.startsWith("/")) // absolute return getItemByFullName(pathName); Object/*Item|ItemGroup*/ ctx = context; StringTokenizer tokens = new StringTokenizer(pathName,"/"); while (tokens.hasMoreTokens()) { String s = tokens.nextToken(); if (s.equals("..")) { if (ctx instanceof Item) { ctx = ((Item)ctx).getParent(); // depends on control dependency: [if], data = [none] continue; } ctx=null; // can't go up further // depends on control dependency: [if], data = [none] break; } if (s.equals(".")) { continue; } if (ctx instanceof ItemGroup) { ItemGroup g = (ItemGroup) ctx; Item i = g.getItem(s); if (i==null || !i.hasPermission(Item.READ)) { // TODO consider DISCOVER ctx=null; // can't go up further // depends on control dependency: [if], data = [none] break; } ctx=i; // depends on control dependency: [if], data = [none] } else { return null; // depends on control dependency: [if], data = [none] } } if (ctx instanceof Item) return (Item)ctx; // fall back to the classic interpretation return getItemByFullName(pathName); } }
public class class_name { private Object[] getParameters(FrameworkMethod fm) { Method m = fm.getMethod(); SecurityActions.setAccessible(m); Class<?>[] parameters = m.getParameterTypes(); Annotation[][] parameterAnnotations = m.getParameterAnnotations(); Object[] result = new Object[parameters.length]; for (int i = 0; i < parameters.length; i++) { Annotation[] parameterAnnotation = parameterAnnotations[i]; boolean inject = false; String name = null; for (int j = 0; j < parameterAnnotation.length; j++) { Annotation a = parameterAnnotation[j]; if (javax.inject.Inject.class.equals(a.annotationType())) { inject = true; } else if (javax.inject.Named.class.equals(a.annotationType())) { name = ((javax.inject.Named)a).value(); } } if (inject) { result[i] = resolveBean(name != null ? name : parameters[i].getSimpleName(), parameters[i]); } else { result[i] = null; } } return result; } }
public class class_name { private Object[] getParameters(FrameworkMethod fm) { Method m = fm.getMethod(); SecurityActions.setAccessible(m); Class<?>[] parameters = m.getParameterTypes(); Annotation[][] parameterAnnotations = m.getParameterAnnotations(); Object[] result = new Object[parameters.length]; for (int i = 0; i < parameters.length; i++) { Annotation[] parameterAnnotation = parameterAnnotations[i]; boolean inject = false; String name = null; for (int j = 0; j < parameterAnnotation.length; j++) { Annotation a = parameterAnnotation[j]; if (javax.inject.Inject.class.equals(a.annotationType())) { inject = true; // depends on control dependency: [if], data = [none] } else if (javax.inject.Named.class.equals(a.annotationType())) { name = ((javax.inject.Named)a).value(); // depends on control dependency: [if], data = [none] } } if (inject) { result[i] = resolveBean(name != null ? name : parameters[i].getSimpleName(), parameters[i]); // depends on control dependency: [if], data = [none] } else { result[i] = null; // depends on control dependency: [if], data = [none] } } return result; } }