Diff
stringlengths 5
2k
| FaultInducingLabel
int64 0
1
|
---|---|
* Copyright (c) OSGi Alliance (2010, 2016). All Rights Reserved.
@Version("1.2")
import org.osgi.annotation.versioning.Version; | 1 |
if (entry != null && entry.trim().length() > 0) { | 0 |
public class Canonicalizer11_OmitComments extends Canonicalizer20010315 {
super(false, true); | 0 |
boolean exactMatch, boolean fireOnBegin )
builder.fireOnBegin( fireOnBegin ); | 0 |
AbstractResourceProvider provider = new ServiceResourceProvider(
AbstractResourceProvider provider = new ServiceResourceProvider(
AbstractResourceProvider provider = new ServiceResourceProvider( | 0 |
TypedQuery<UpgradeEntity> query = entityManagerProvider.get().createNamedQuery(
"UpgradeEntity.findUpgradeByRequestId", UpgradeEntity.class);
| 0 |
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.aurora.common.stats;
import com.google.common.base.Preconditions;
/**
* Calculate the entropy of a discrete distribution of <T>.
*
* @author Gilad Mishne
*/
public class Entropy<T> {
private final CounterMap<T> counts = new CounterMap<T>();
private int total = 0;
private static double Log2(double n) {
return Math.log(n) / Math.log(2);
}
public Entropy(Iterable<T> elements) {
Preconditions.checkNotNull(elements);
for (T element : elements) {
counts.incrementAndGet(element);
total++;
}
}
public double entropy() {
double entropy = 0;
for (int count: counts.values()) {
double prob = (double) count / total;
entropy -= prob * Log2(prob);
}
return entropy;
}
public double perplexity() {
return Math.pow(2, entropy());
}
} | 0 |
* @author <a href="mailto:[email protected]">Björn Lütkemeier</a>
* @version CVS $Id: EventLinkTransformer.java,v 1.5 2003/07/10 13:17:04 cziegeler Exp $ | 0 |
/**
* Returns whether this is a constant animation (i.e., a 'set' animation).
*/
protected boolean isConstantAnimation() {
return true;
} | 0 |
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathFactory;
import org.apache.xml.security.samples.DSNamespaceContext;
XPathFactory xpf = XPathFactory.newInstance();
XPath xpath = xpf.newXPath();
xpath.setNamespaceContext(new DSNamespaceContext());
String expression = "//ds:SignedInfo[1]";
Element signedInfo =
(Element) xpath.evaluate(expression, doc, XPathConstants.NODE); | 0 |
HostEntity hostEntity = null;
hostRoleCommandEntity = hostRoleCommandDAO.merge(hostRoleCommandEntity);
hostEntity = hostDAO.merge(hostEntity);
stageEntity = stageDAO.merge(stageEntity);
| 1 |
.putInputs("timer", "timer.out")
.putOutputs("timer", "timer.out")
.putOutputs("output", "output.out")
.putPcollections("output.out", pc("output.out")) | 0 |
BlockEnvironmentHelper.enterBlock(block);
BlockEnvironmentHelper.leaveBlock(); | 0 |
import org.apache.accumulo.core.security.tokens.PasswordToken; | 0 |
Map<String, AlertDefinitionSummary> summaries = new HashMap<>();
List<AlertDefinitionSummary> groupedResources = new ArrayList<>(
entrySet.size()); | 1 |
package org.apache.felix.sigil.common.osgi; | 0 |
return Boolean.valueOf(proxy == args[0]); | 0 |
import org.apache.aurora.scheduler.storage.JobUpdateStore;
bindStore(JobUpdateStore.Mutable.class, DBJobUpdateStore.class); | 0 |
package org.apache.commons.ognl.internal.entry;
import org.apache.commons.ognl.internal.CacheException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
/**
* User: Maurizio Cucchiara
* Date: 10/17/11
* Time: 1:14 AM
*/
public class MethodAccessCacheEntryFactory
implements CacheEntryFactory<Method, MethodAccessEntryValue>
{
public static final MethodAccessEntryValue INACCESSIBLE_NON_PUBLIC_METHOD =
new MethodAccessEntryValue( false, true );
public static final MethodAccessEntryValue ACCESSIBLE_NON_PUBLIC_METHOD =
new MethodAccessEntryValue( true, true );
public static final MethodAccessEntryValue PUBLIC_METHOD = new MethodAccessEntryValue( true );
public MethodAccessEntryValue create( Method method )
throws CacheException
{
final boolean notPublic = !Modifier.isPublic( method.getModifiers( ) ) || !Modifier.isPublic(
method.getDeclaringClass( ).getModifiers( ) );
if ( notPublic )
{
if ( !method.isAccessible( ) )
{
return INACCESSIBLE_NON_PUBLIC_METHOD;
}
else
{
return ACCESSIBLE_NON_PUBLIC_METHOD;
}
}
else
{
return PUBLIC_METHOD;
}
}
} | 0 |
package org.apache.commons.codec2; | 0 |
import java.util.concurrent.TimeUnit;
* Wait and verify that the channel has been successfully opened.
* @throws SshException if the action failed for any reason
* Wait and verify that the channel has been successfully opened within the specified timeout.
* @throws SshException if the action failed for any reason
*/
void verify(long timeout, TimeUnit unit) throws SshException;
/**
* Wait and verify that the authentication succeeded within the specified timeout.
* @throws SshException if the action failed for any reason
*/
void verify(long timeoutMillis) throws SshException;
/** | 0 |
private static org.apache.commons.logging.Log log =
org.apache.commons.logging.LogFactory.getLog(ApacheCanonicalizer.class);
if (log.isDebugEnabled()) {
log.debug("Created canonicalizer for algorithm: " + getAlgorithm());
if (log.isDebugEnabled()) {
log.debug("Canonicalizing " + nodeSet.size() + " nodes");
if (log.isDebugEnabled()) {
log.debug("Created transform for algorithm: " + getAlgorithm());
if (log.isDebugEnabled()) {
log.debug("ApacheData = true");
if (log.isDebugEnabled()) {
log.debug("isNodeSet() = true");
if (log.isDebugEnabled()) {
log.debug("isNodeSet() = false"); | 0 |
import java.net.URLDecoder;
// decode the URL before using it to create a File object as spaces in the filename
// are encoded as '%20' by URL class
final File keyStoreFile = new File(URLDecoder.decode(serverJksResource.getFile())); | 0 |
public class ReadOnlyDictionary<S, T> extends Dictionary<S, T> implements Map<S, T>
private final Hashtable<S, T> m_delegatee;
public ReadOnlyDictionary( final Dictionary<S, T> delegatee )
this.m_delegatee = ( Hashtable<S, T> ) delegatee;
this.m_delegatee = new Hashtable<S, T>();
for ( Enumeration<S> ke = delegatee.keys(); ke.hasMoreElements(); )
S key = ke.nextElement();
public Enumeration<T> elements()
public T get( final Object key )
public Enumeration<S> keys()
public T put( final S key, final T value )
public T remove( final Object key ) | 0 |
*
*
*
/**
*
* @version $Revision: 1.5 $
* Adds a new attribute definition to this dynamic tag
//-------------------------------------------------------------------------
if (name == null) {
throw new MissingAttributeException("name");
}
if (className == null) {
throw new MissingAttributeException("className");
}
Class theClass = null;
try {
ClassLoader classLoader = getClassLoader();
theClass = classLoader.loadClass(className);
}
catch (ClassNotFoundException e) {
try {
theClass = getClass().getClassLoader().loadClass(className);
}
try {
theClass = Class.forName(className);
}
throw new JellyTagException(
"Could not find class: "
+ className
+ " using ClassLoader: "
+ classLoader);
}
}
}
}
//-------------------------------------------------------------------------
/**
/**
* Sets the ClassLoader to use to load the class.
*/
public void setVarAttribute(String varAttribute) {
//-------------------------------------------------------------------------
| 0 |
* @since 4.0 | 0 |
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at | 0 |
// maintain the map of ComponentContext objects created for the
private IdentityHashMap serviceContexts = new IdentityHashMap();
BundleComponentContext serviceContext = new BundleComponentContext( this, bundle );
Object service = createImplementationObject( serviceContext );
if (service != null) {
serviceContext.setImplementationObject( service );
serviceContexts.put( service, serviceContext );
return service;
ComponentContext serviceContext = ( ComponentContext ) serviceContexts.remove( service );
disposeImplementationObject( service, serviceContext );
if ( serviceContexts.isEmpty() && getState() == STATE_ACTIVE ) | 0 |
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import org.junit.Rule;
import org.ops4j.pax.exam.junit.PaxExamServer;
@Rule
public PaxExamServer exam = new PaxExamServer(ServerConfiguration.class);
| 0 |
* @author <a href="mailto:[email protected]">Vincent Hardy</a> | 1 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.ipojo.api;
import org.apache.felix.ipojo.metadata.Element;
/**
* Common interfaces for all contributions.
* @author <a href="mailto:[email protected]">Felix Project Team</a>
*/
public interface HandlerConfiguration {
/**
* Gets the Handler description.
* @return the Element-Attribute structure containing the handler
* configuration.
*/
public Element getElement();
} | 0 |
* @author <a href="http://commons.apache.org/vfs/team-list.html">Commons VFS team</a>
/**
* The user name to be associated with changes to the file.
* @param opts
* @param creatorName
*/
public void setCreatorName(FileSystemOptions opts, String creatorName)
{
setParam(opts, "creatorName", creatorName);
}
/**
* Return the user name to be associated with changes to the file.
* @param opts
* @return
*/
public String getCreatorName(FileSystemOptions opts)
{
return getString(opts, "creatorName");
}
/**
* Whether to use versioning
*/
public void setVersioning(FileSystemOptions opts, boolean versioning)
{
setParam(opts, "versioning", new Boolean(versioning));
}
/**
* The cookies to add to the reqest
*/
public boolean isVersioning(FileSystemOptions opts)
{
return getBoolean(opts, "versioning", false);
} | 0 |
PAssert.that(inputs.apply("sql", QueryTransform.withQueryString(sql)))
inputs.apply("sql", QueryTransform.withQueryString(sql));
inputs.apply("sql", QueryTransform.withQueryString(sql));
inputs.apply("sql", QueryTransform.withQueryString(sql));
inputs.apply("sql", QueryTransform.withQueryString(sql));
.apply("join", QueryTransform.withQueryString(sql)) | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 0 |
* @version CVS $Id: JPEGSourceInspector.java,v 1.4 2003/05/19 10:02:05 stephan Exp $
private String PROPERTY_NS = "http://apache.org/cocoon/inspector/jpeg/1.0"; | 0 |
import org.easymock.EasyMock;
expect(managementController.getHostComponents(EasyMock.<Set<ServiceComponentHostRequest>>anyObject())).andReturn(responses).anyTimes();
expect(managementController.getHostComponents(EasyMock.<Set<ServiceComponentHostRequest>>anyObject())).andReturn(responses).anyTimes(); | 0 |
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
* @version CVS $Id: XMLSerializer.java,v 1.2 2004/03/05 13:02:50 bdelacretaz Exp $ | 1 |
@Test
@Test | 0 |
public static boolean isClasspathUri(String uri) {
if ( uri.startsWith(ResourcePatternResolver.CLASSPATH_ALL_URL_PREFIX)
|| uri.startsWith(ResourceLoader.CLASSPATH_URL_PREFIX) ) {
return true;
}
return false;
}
public static void readProperties(String propertiesPath,
Properties properties,
ResourceLoader resourceLoader,
Log logger) {
// check if directory exists
boolean load = true;
if ( !ResourceUtils.isClasspathUri(propertiesPath) ) {
final Resource resource = resolver.getResource(propertiesPath);
if (!resource.exists() ) {
load = false;
}
}
if ( load ) {
try {
resources = resolver.getResources(propertiesPath + "/*.properties");
if (logger != null && logger.isDebugEnabled())
logger.debug("Found " + resources.length + " matching resources in " + propertiesPath
+ "/*.properties");
} catch (IOException ignore) {
if (logger != null && logger.isDebugEnabled()) {
logger.debug("Unable to read properties from directory '" + propertiesPath
+ "' - Continuing initialization.", ignore);
} | 0 |
protected static final String[] TYPE_VALUES = { | 1 |
import com.twitter.common.stats.Stats;
import com.twitter.common.stats.StatsProvider;
bind(StatsProvider.class).toInstance(Stats.STATS_PROVIDER); | 0 |
allTests(false,BooleanComparator.booleanComparator(false));
allTests(true,BooleanComparator.booleanComparator(true));
assertSame(BooleanComparator.getFalseFirstComparator(),BooleanComparator.booleanComparator(false));
assertSame(BooleanComparator.getTrueFirstComparator(),BooleanComparator.booleanComparator(true)); | 0 |
if (c.isDestination()) {
ExecutableTrigger<W> subTrigger = c.nextStepTowardsDestination(); | 0 |
package org.apache.beam.runners.spark.translation;
import org.apache.beam.runners.spark.SparkPipelineRunner; | 0 |
import org.apache.atlas.typesystem.types.ClassType;
import org.apache.atlas.typesystem.types.DataTypes;
import org.apache.atlas.typesystem.types.HierarchicalTypeDefinition;
import org.apache.atlas.typesystem.types.utils.TypesUtil;
import org.codehaus.jettison.json.JSONException;
import org.codehaus.jettison.json.JSONObject;
@Test
public void testTypeUpdateWithReservedAttributes() throws AtlasException, JSONException {
String typeName = "test_type_"+ RandomStringUtils.randomAlphanumeric(10);
HierarchicalTypeDefinition<ClassType> typeDef = TypesUtil.createClassTypeDef(
typeName, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("test_type_attribute", DataTypes.STRING_TYPE));
TypesDef typesDef = new TypesDef(typeDef, false);
JSONObject type = metadataService.createType(TypesSerialization.toJson(typesDef));
Assert.assertNotNull(type.get(AtlasClient.TYPES));
HierarchicalTypeDefinition<ClassType> updatedTypeDef = TypesUtil.createClassTypeDef(
typeName, ImmutableList.<String>of(),
TypesUtil.createUniqueRequiredAttrDef("test_type_attribute", DataTypes.STRING_TYPE),
TypesUtil.createOptionalAttrDef("test_type_invalid_attribute$", DataTypes.STRING_TYPE));
TypesDef updatedTypesDef = new TypesDef(updatedTypeDef, false);
try {
metadataService.updateType(TypesSerialization.toJson(updatedTypesDef));
Assert.fail("Should not be able to update type with reserved character");
} catch (AtlasException ae) {
// pass.. expected
}
String typeDefinition = metadataService.getTypeDefinition(typeName);
Assert.assertNotNull(typeDefinition);
}
| 0 |
listener.onInit(config, options);
final Map<String, MetricsReporterFactory> reporterFactories = getMetricsReporters();
appDescriptor
.withApplicationContainerContextFactory(executionContext.new Factory())
.withMetricsReporterFactories(reporterFactories);
listener.onInit(config, options);
new SamzaPipelineResult(app, runner, executionContext, listener, config); | 0 |
params.setBackgroundRGB(new int [] { 255, 255, 255 });
params.setSRGBIntent(PNGEncodeParam.INTENT_PERCEPTUAL);
boolean forceTransparentWhite = false; | 0 |
import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
* Provides information regarding the content of repositories.
* Gets the repository file which was set on this command. The repository can
* be set either by the creator of the command or by the
* {@link ExecutionCommandWrapper} when it is about to execute the command.
*
* @see #setRepositoryFile(CommandRepository)
* Sets the {@link CommandRepository} which will be sent down to the agent
* instructing it on which repository file to create on the host. In most
* cases, it is not necessary to set this file since the
* {@link ExecutionCommandWrapper} will set it in the event that it is
* missing. In fact, it is only appropriate to set this file in the following
* cases:
* <ul>
* <li>When distributing a repository to hosts in preparation for upgrade.
* This is because the service/component desired stack is not pointing to the
* new repository yet</li>
* <li>If the command does not contain a host or service/component></li>
* </ul>
*
* @param repository
* the command repository instance. | 0 |
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Before;
import org.junit.Test;
* @version $Id:$
public class TestPoolableConnection {
@Before
@Test
@Test
@Test | 0 |
public Process exec(final CommandLine cmd, final Map<String, String> env) | 0 |
public static final Authorizations EMPTY = new Authorizations();
| 0 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/ | 0 |
settings.forEach(itr -> tableQueryConfig.addIterator(itr)); | 0 |
if (ef != null)
return ef.create(DOMUtilities.getPrefix(qualifiedName),
document);
if (customFactories != null) {
ElementFactory cef;
cef = (ElementFactory)customFactories.get(namespaceURI, name);
if (cef != null)
return cef.create(DOMUtilities.getPrefix(qualifiedName),
document);
throw document.createDOMException
(DOMException.NOT_FOUND_ERR, "invalid.element",
new Object[] { namespaceURI, qualifiedName }); | 0 |
public E lower(final E e) {
public E floor(final E e) {
public E ceiling(final E e) {
public E higher(final E e) {
public NavigableSet<E> subSet(final E fromElement, final boolean fromInclusive, final E toElement, final boolean toInclusive) {
public NavigableSet<E> headSet(final E toElement, final boolean inclusive) {
public NavigableSet<E> tailSet(final E fromElement, final boolean inclusive) { | 0 |
p.apply("ReadLines", TextIO.Read.from(options.getInputFile())) | 0 |
protected void clearInternal()
super.clearInternal(); | 0 |
/*
* Copyright 1999-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.components.elementprocessor.impl.poi.hssf.elements;
/**
* No-op implementation of ElementProcessor to handle the "val-string"
* tag
*
* This element has no attributes and contains a string
*
* This element is not used in HSSFSerializer 1.0
*
* @author Marc Johnson ([email protected])
* @version $Id$
*/
public class EP_ValString extends BaseElementProcessor {
/**
* constructor
*/
public EP_ValString() {
super(null);
}
} // end public class EP_ValString | 0 |
private static final String DFLT_FILENAME_ATTR = "file"; | 0 |
import java.io.Closeable;
public interface HttpClientConnectionManager extends Closeable {
* Returns a {@link LeaseRequest} object which can be used to obtain
* a {@link ConnectionEndpoint} to cancel the request by calling
* {@link LeaseRequest#cancel()}.
* Please note that newly allocated endpoints can be leased
* {@link ConnectionEndpoint#isConnected() disconnected}. The consumer of the endpoint
* is responsible for fully establishing the route to the endpoint target
* by calling {@link #connect(ConnectionEndpoint, long, TimeUnit, HttpContext)}
* in order to connect directly to the target or to the first proxy hop,
* and optionally calling {@link #upgrade(ConnectionEndpoint, HttpContext)} method
* to upgrade the underlying transport to Transport Layer Security after having
* executed a {@code CONNECT} method to all intermediate proxy hops.
LeaseRequest lease(HttpRoute route, Object state);
* Releases the endpoint back to the manager making it potentially
* @param endpoint the managed endpoint.
* @param newState the new connection state of {@code null} if state-less.
void release(ConnectionEndpoint endpoint, Object newState, long validDuration, TimeUnit timeUnit);
* Connects the endpoint to the initial hop (connection target in case
* or multiple proxies).
* @param endpoint the managed endpoint.
* @param connectTimeout connect timeout.
* @param timeUnit the time unit.
ConnectionEndpoint endpoint,
long connectTimeout,
TimeUnit timeUnit,
* Upgrades the endpoint's underlying transport to Transport Layer Security.
* @param endpoint the managed endpoint.
ConnectionEndpoint endpoint, | 0 |
Collection<IPackageImport> imports = sigilProject.getBundle().getBundleInfo().getImports();
Collection<IRequiredBundle> requiredBundles = sigilProject.getBundle().getBundleInfo() | 0 |
StateTag<? super K, WatermarkHoldState<W>> address) {
Map<W, WatermarkHoldState<W>> map = context.accessInEachMergingWindow(address);
WatermarkHoldState<W> result = context.access(address);
&& result.getOutputTimeFn().dependsOnlyOnEarliestInputTimestamp()) {
if (result.getOutputTimeFn().dependsOnlyOnWindow()) {
for (WatermarkHoldState<W> source : map.values()) {
StateTag<? super K, WatermarkHoldState<W>> address,
Collection<WatermarkHoldState<W>> sources, WatermarkHoldState<W> result,
&& result.getOutputTimeFn().dependsOnlyOnEarliestInputTimestamp()) {
if (result.getOutputTimeFn().dependsOnlyOnWindow()) {
for (WatermarkHoldState<W> source : sources) {
Instant hold = result.getOutputTimeFn().assignOutputTime(
BoundedWindow.TIMESTAMP_MIN_VALUE, resultWindow);
for (WatermarkHoldState<W> source : sources) {
for (WatermarkHoldState<W> source : sources) {
result.add(result.getOutputTimeFn().merge(resultWindow, outputTimesToMerge)); | 0 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.jexl2.parser;
/**
* Identifiers, variables and registers.
*/
public class ASTIdentifier extends JexlNode {
private int register = -1;
public ASTIdentifier(int id) {
super(id);
}
public ASTIdentifier(Parser p, int id) {
super(p, id);
}
void setRegister(String r) {
if (r.charAt(0) == '#') {
register = Integer.parseInt(r.substring(1));
}
}
void setRegister(int r) {
register = r;
}
public int getRegister() {
return register;
}
@Override
public Object jjtAccept(ParserVisitor visitor, Object data) {
return visitor.visit(this, data);
}
} | 0 |
import java.util.Date;
if (!cookie.isExpired(new Date())) {
Date now = new Date();
if (((Cookie) (it.next())).isExpired(now)) { | 0 |
public <T> Coder<T> getCoder(TypeToken<T> typeToken) {
return AvroCoder.of(typeToken); | 0 |
* Document doc;
* Element refElem = ref.toElement(doc);
* Element refElem = doc.getElement("Reference"); // PSEUDO
XMLUtils.addReturnToElement(this.constructionElement);
this.baseURI = BaseURI;
// this._manifest.appendChild(this.constructionElement);
// this._manifest.appendChild(this.doc.createTextNode("\n"));
this.constructionElement.appendChild(transforms.getElement());
XMLUtils.addReturnToElement(this.constructionElement);
MessageDigestAlgorithm.getInstance(this.doc,
this.constructionElement.appendChild(digestMethodElem);
XMLUtils.addReturnToElement(this.constructionElement);
XMLUtils.createElementInSignatureSpace(this.doc,
this.constructionElement.appendChild(digestValueElement);
XMLUtils.addReturnToElement(this.constructionElement);
this.baseURI=BaseURI;
transforms = new Transforms(el,this.baseURI);
return MessageDigestAlgorithm.getInstance(this.doc, uri);
this.constructionElement.setAttributeNS(null, Constants._ATT_URI,
return this.constructionElement.getAttributeNS(null, Constants._ATT_URI);
this.constructionElement.setAttributeNS(null, Constants._ATT_ID, Id);
IdResolver.registerElementById(this.constructionElement, Id);
return this.constructionElement.getAttributeNS(null, Constants._ATT_ID);
this.constructionElement.setAttributeNS(null, Constants._ATT_TYPE,
return this.constructionElement.getAttributeNS(null,
Text t = this.doc.createTextNode(base64codedValue);
Attr URIAttr = this.constructionElement.getAttributeNodeNS(null,
this.baseURI, this._manifest._perManifestResolvers);
XMLSignatureInput input = resolver.resolve(URIAttr, this.baseURI);
transforms = new Transforms(this.doc);
this.constructionElement.insertBefore | 0 |
* @version $Id: StringType.java,v 1.3 2004/02/11 09:53:44 antonio Exp $ | 0 |
AuthenticationToken token = Property.createInstanceFromPropertyName(conf, Property.TRACE_TOKEN_TYPE, AuthenticationToken.class, new PasswordToken()); | 0 |
import org.apache.xml.security.test.TestUtils;
Element nscontext = TestUtils.createDSctx(doc, "ds", | 0 |
ArrayList<CompactionInfo> compactions = new ArrayList<>();
HashSet<ByteSequence> allColumnFamilies = new HashSet<>();
List<SortedKeyValueIterator<Key,Value>> iters = new ArrayList<>(filesToCompact.size());
ArrayList<FileSKVIterator> readers = new ArrayList<>(filesToCompact.size()); | 0 |
// Thermos executors in the wild have been observed using 48-54MB RSS, setting to 128MB
// to be extra vigilant initially.
.addResources(Resources.makeMesosResource(Resources.CPUS, 0.25))
.addResources(Resources.makeMesosResource(Resources.RAM_MB, 128)) | 0 |
* {@link ParDo.MultiOutput} is translated to {@link ApexParDoOperator} that wraps the {@link DoFn}.
implements TransformTranslator<ParDo.MultiOutput<InputT, OutputT>> {
public void translate(ParDo.MultiOutput<InputT, OutputT> transform, TranslationContext context) {
ParDo.MultiOutput.class.getSimpleName(), | 0 |
public static String getHomeId(AtlasElement element) {
return element.getProperty(Constants.HOME_ID_KEY, String.class);
}
| 0 |
httpProcessorCopy, requestHandlerRegistry, connStrategyCopy, responseFactoryCopy, this.streamListener); | 0 |
import org.apache.sshd.server.auth.password.PasswordAuthenticator; | 0 |
// Try to gently feed more data to the event dispatcher
// if the session input buffer has not been fully exhausted
// (the choice of 5 iterations is purely arbitrary)
for (int i = 0; i < 5; i++) {
this.eventDispatch.inputReady(session);
if (!session.hasBufferedInput()
|| (session.getEventMask() & SelectionKey.OP_READ) == 0) {
break;
}
} | 0 |
if (!isNullOrEmpty(dataflowOptions.getDataflowWorkerJar())) {
List<String> experiments =
dataflowOptions.getExperiments() == null
? new ArrayList<>()
: dataflowOptions.getExperiments();
experiments.add("use_staged_dataflow_worker_jar");
dataflowOptions.setExperiments(experiments);
}
| 0 |
* <li><code>flowContext</code> - which represents the bean that was from the Flowscript </li>
* <li><code>continuation</code> - which represents the current continuation - an instance of {@link org.apache.cocoon.components.flow.WebContinuation}</li>
*
* @version CVS $Id: FlowVelocityGenerator.java,v 1.5 2003/04/15 06:52:28 coliver Exp $ | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 0 |
try {
prepareSocket(channel.socket());
Object attachment = sessionRequest.getAttachment();
ChannelEntry entry = new ChannelEntry(channel, attachment);
addChannel(entry);
} catch (IOException ex) {
sessionRequest.failed(ex);
} | 0 |
public Set<Class<?>> getExtensionHosts() {
return extensionHostClasses;
}
| 0 |
return FlinkKeyUtils.encodeKey(key, keyCoder); | 0 |
import cz.seznam.euphoria.core.client.io.UnsplittableBoundedSource;
import java.util.Collections;
public HadoopSource(
Class<K> keyClass, Class<V> valueClass,
Class<? extends InputFormat<K, V>> hadoopFormatCls,
Configuration hadoopConf) {
public List<BoundedDataSource<Pair<K, V>>> split(long desiredSplitSize) {
@Override
public Set<String> getLocations() {
return Collections.singleton("unknown");
}
@Override
public BoundedReader<Pair<K, V>> openReader() throws IOException {
throw new UnsupportedOperationException(
"Please call `split` on this source first.");
}
@Override
public int getDefaultParallelism() {
try {
Configuration c = conf.getWritable();
return getHadoopFormatInstance()
.getSplits(HadoopUtils.createJobContext(c))
.size();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private static class HadoopPartition<K, V> extends UnsplittableBoundedSource<Pair<K, V>> {
| 0 |
import org.apache.accumulo.core.clientImpl.ClientContext;
import org.apache.accumulo.core.clientImpl.MasterClient;
import org.apache.accumulo.core.clientImpl.thrift.ThriftNotActiveServiceException;
import org.apache.accumulo.miniclusterImpl.MiniAccumuloConfigImpl; | 0 |
* @version $Revision$ $Date$ | 0 |
import java.io.IOException;
import java.io.InputStream;
public Node deserialize(byte[] source, Node ctx) throws XMLEncryptionException, IOException {
try (InputStream is = new ByteArrayInputStream(fragment)) {
return deserialize(ctx, new StreamSource(is));
} | 0 |
* eventually committed. Committed elements are executed by the {@link PTransform PTransforms} that
* consume the {@link PCollection} this bundle is a part of at a later point.
*
/** Returns the PCollection that the elements of this bundle belong to. */
* Returns the key that was output in the most recent {@code GroupByKey} in the execution of this
* bundle.
* Returns an {@link Iterable} containing all of the elements that have been added to this {@link
* CommittedBundle}.
* committed this bundle. Downstream synchronized processing time watermarks cannot progress past
* this point before consuming this bundle.
* Return a new {@link CommittedBundle} that is like this one, except calls to {@link
* #getElements()} will return the provided elements. This bundle is unchanged.
* <p>The value of the {@link #getSynchronizedProcessingOutputWatermark() synchronized processing
* output watermark} of the returned {@link CommittedBundle} is equal to the value returned from
* the current bundle. This is used to ensure a {@link PTransform} that could not complete
* processing on input elements properly holds the synchronized processing time to the appropriate
* value. | 1 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 0 |
FIRE_AND_FINISH(true, true); | 0 |
import org.apache.accumulo.cluster.standalone.StandaloneAccumuloCluster; | 0 |
// public to maintain binary compatibility
public DigestUtils() { | 0 |
@Override
@Override
@Override
@Override
@Override
@Override
@Override | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 0 |
public ByteConverterTestCase(final String name) {
protected NumberConverter makeConverter(final Object defaultValue) {
final String[] message= {
final Object[] input = {
final Byte[] expected = {
final Converter converter = makeConverter();
final Class<?> clazz = Byte.class;
final Long min = new Long(Byte.MIN_VALUE);
final Long max = new Long(Byte.MAX_VALUE);
final Long minMinusOne = new Long(min.longValue() - 1);
final Long maxPlusOne = new Long(max.longValue() + 1);
} catch (final Exception e) {
} catch (final Exception e) { | 0 |
import com.google.common.collect.ImmutableMap;
private static final String COUNTDOWN_MS_PARAM = "forceMaintenanceCountdownMs";
long startMs = hostMaintenanceRequest.get().getCreatedTimestampMs();
long timeoutMs = TimeAmount.of(
hostMaintenanceRequest.get().getTimeoutSecs(),
Time.SECONDS)
.as(Time.MILLISECONDS);
long endMs = startMs + timeoutMs;
long remainingMs = endMs - System.currentTimeMillis();
.join(
MAINTENANCE_COUNTDOWN_STAT_NAME,
InstanceKeys.toString(Tasks.getJob(task), Tasks.getInstanceId(task))
)
)
.getAndSet(remainingMs);
if (remainingMs < 0) {
ImmutableMap.of(COUNTDOWN_MS_PARAM, Long.toString(remainingMs)), | 0 |
* @author <a href="mailto:[email protected]">Björn Lütkemeier</a>
* @version CVS $Id: ObjectModelAction.java,v 1.2 2003/07/10 13:17:04 cziegeler Exp $ | 0 |
import org.apache.xml.security.signature.Reference;
import org.apache.xml.security.utils.JavaUtils;
boolean result=false;
result=signature.checkSignatureValue(cert);
result=signature.checkSignatureValue(pk);
if (!result) {
StringBuffer sb = new StringBuffer();
for (int i = 0; i < signature.getSignedInfo().getLength(); i++) {
boolean refVerify =
signature.getSignedInfo().getVerificationResult(i);
if (refVerify) {
log.debug("Reference " + i + " was OK");
} else {
sb.append(i + " ");
JavaUtils.writeBytesToFilename(filename + i + ".apache.txt", signature.getSignedInfo().item(i).getContentsAfterTransformation().getBytes());
log.debug("Reference " + i );
}
}
throw new RuntimeException("Falle:"+sb.toString());
}
return result; | 0 |
* This class provides support for the SVGTransformable interface.
* Creates a new SVGTransformableSupport.
public static SVGAnimatedTransformList getTransform(Element elt) {
throw new RuntimeException(" !!! TODO: getTransform()"); | 0 |
this.verifyHMAC(filename, resolver, followManifests, hmacKey); | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.