Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
package org.apache.http.contrib.benchmark; import org.apache.http.message.BasicHeader; public class DefaultHeader extends BasicHeader { public DefaultHeader(final String name, final String value) {
0
// System.out.println("Rgns: " + srcORgn + " - " + srcNRgn); // System.out.println("ATs: " + oat + " - " + nat); // System.out.println // ("Adding: " + oat + " - " + nat + "\n" + // org.ImageDisplay.stringShape(oRgn) + "\n" + // org.ImageDisplay.stringShape(nRgn) + "\n"); boolean doPut = false; doPut = true; } else if (!dirtyNodes.containsKey(gnWRef)) doPut = true; if (doPut) { AffineTransform at = gn.getTransform(); if (at != null) at = (AffineTransform)at.clone(); dirtyNodes.put(gnWRef, at);
0
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
0
import java.io.IOException; import java.io.InputStream; import java.io.UnsupportedEncodingException; import java.util.Set; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.apache.xml.security.signature.XMLSignatureInput; import org.apache.xpath.CachedXPathAPI; import org.w3c.dom.Comment; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.SAXException;
0
* Copyright 1999-2006 The Apache Software Foundation
0
throw new IOException("Follower is ahead of the leader, leader summary: " + leaderStateSummary.getCurrentEpoch() + " (current epoch), " + leaderStateSummary.getLastZxid() + " (last zxid)"); /** * Get string representation of a given packet type * @param packetType * @return string representing the packet type */ public static String getPacketType(int packetType) { switch (packetType) { case DIFF: return "DIFF"; case TRUNC: return "TRUNC"; case SNAP: return "SNAP"; case OBSERVERINFO: return "OBSERVERINFO"; case NEWLEADER: return "NEWLEADER"; case FOLLOWERINFO: return "FOLLOWERINFO"; case UPTODATE: return "UPTODATE"; case LEADERINFO: return "LEADERINFO"; case ACKEPOCH: return "ACKEPOCH"; case REQUEST: return "REQUEST"; case PROPOSAL: return "PROPOSAL"; case ACK: return "ACK"; case COMMIT: return "COMMIT"; case PING: return "PING"; case REVALIDATE: return "REVALIDATE"; case SYNC: return "SYNC"; case INFORM: return "INFORM"; default: return "UNKNOWN"; } }
0
assertEquals("true", config2.getString("configuration.loaded")); assertEquals("true", config2.getString("configuration.loaded")); /** * Tests whether the escape character for list delimiters can be itself * escaped and survives a save operation. */ public void testSaveEscapedEscapingCharacter() throws ConfigurationException { conf.addProperty("test.dirs", "C:\\Temp\\\\,D:\\Data\\\\,E:\\Test\\"); List dirs = conf.getList("test.dirs"); assertEquals("Wrong number of list elements", 3, dirs.size()); if (testSavePropertiesFile.exists()) { assertTrue(testSavePropertiesFile.delete()); } conf.save(testSavePropertiesFile); PropertiesConfiguration checkConfig = new PropertiesConfiguration( testSavePropertiesFile); ConfigurationAssert.assertEquals(conf, checkConfig); }
0
import org.apache.aurora.scheduler.resources.Resources;
0
filterValue = SVG_NONE_VALUE; operator = SVG_ARITHMETIC_VALUE; operator = SVG_ARITHMETIC_VALUE; operator = SVG_IN_VALUE; operator = SVG_OUT_VALUE; operator = SVG_IN_VALUE; operator = SVG_OUT_VALUE; Element compositeFilter = domFactory.createElement(SVG_FILTER_TAG); compositeFilter.setAttribute(SVG_FILTER_UNITS_ATTRIBUTE, SVG_OBJECT_BOUNDING_BOX_VALUE); compositeFilter.setAttribute(SVG_X_ATTRIBUTE, VALUE_ZERO_PERCENT); compositeFilter.setAttribute(SVG_Y_ATTRIBUTE, VALUE_ZERO_PERCENT); compositeFilter.setAttribute(SVG_WIDTH_ATTRIBUTE, VALUE_HUNDRED_PERCENT); compositeFilter.setAttribute(SVG_HEIGHT_ATTRIBUTE, VALUE_HUNDRED_PERCENT); Element feFlood = domFactory.createElement(SVG_FE_FLOOD_TAG); feFlood.setAttribute(SVG_FLOOD_COLOR_ATTRIBUTE, "white"); feFlood.setAttribute(SVG_FLOOD_OPACITY_ATTRIBUTE, "1"); Element feMerge = domFactory.createElement(SVG_FE_MERGE_TAG); Element feMergeNodeFlood = domFactory.createElement(SVG_FE_MERGE_NODE_TAG); Element feMergeNodeComposite = domFactory.createElement(SVG_FE_MERGE_NODE_TAG); Element groupOne = domFactory.createElement(SVG_G_TAG); Element groupTwo = domFactory.createElement(SVG_G_TAG); Element groupThree = domFactory.createElement(SVG_G_TAG); Element group = domFactory.createElement(SVG_G_TAG); rect.setAttribute(SVG_FILTER_ATTRIBUTE, compositeDesc.getFilterValue());
0
} catch (AssertionError | Exception e) {
0
import org.apache.accumulo.core.crypto.CryptoServiceFactory; mfw = new RFileOperations().newWriterBuilder() .forFile(file, fs, conf, CryptoServiceFactory.newDefaultInstance())
0
import java.util.concurrent.atomic.AtomicReference;
0
import java.nio.charset.StandardCharsets; import org.apache.beam.sdk.io.BoundedSource.BoundedReader; public void testEstimatedSizeBytes() { public void testRead() { @Override public void testWrite() { assertEquals(1000, service.getTable().size()); void load() { Map<Integer, Scientist> getTable() { public BoundedReader<Scientist> createReader(CassandraIO.CassandraSource source) { private static class FakeCassandraReader extends BoundedSource.BoundedReader { FakeCassandraReader(CassandraIO.CassandraSource source) { public boolean start() { public boolean advance() { size = size + scientist.toString().getBytes(StandardCharsets.UTF_8).length; public List<BoundedSource<Scientist>> split( CassandraIO.Read spec, long desiredBundleSizeBytes) { private static class FakeCassandraWriter implements Writer<Scientist> { static class Scientist implements Serializable { String name; int id; @Override
0
* @version CVS $Id: Cocoon.java,v 1.14 2003/09/04 19:41:58 bruno Exp $ params.setParameter("force-shutdown", "true");
0
public class H2AsyncMainClientExec implements AsyncExecChainHandler {
0
/** * @return a human readable explanation why the annotated operator is recommendation * for native implementation by an executor */ /** @return the state complexity */ StateComplexity state(); /** @return the number of global repartition operations */
0
checkCountOverflow(usage.m_count.incrementAndGet()); checkCountOverflow(usage.m_serviceObjectsCount.incrementAndGet()); private void checkCountOverflow(long c) { if (c == Long.MAX_VALUE) { throw new ServiceException( "The use count for the service overflowed.", ServiceException.UNSPECIFIED, null); } } long count = usage.m_count.decrementAndGet(); final AtomicLong m_count = new AtomicLong(); final AtomicLong m_serviceObjectsCount = new AtomicLong();
0
import javax.security.sasl.SaslException; public CustomQuorumPeer() throws SaslException { } protected QuorumPeer getQuorumPeer() throws SaslException {
0
/* * Copyright 2004-2005 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.portal.coplets.basket; /** * This is a per user basket that is persistent between sessions * Make a subclass to add your specific functionality * * @version CVS $Id: Basket.java 30941 2004-07-29 19:56:58Z vgritsenko $ */ public class Folder extends ContentStore { public Folder(String id) { super(id); } }
0
InputStream is = null; is = this.getClass().getClassLoader().getResourceAsStream(CLUSTER_DEFINITION_FILE); } finally { if (is != null) { try { is.close(); } catch (IOException ex) { } }
0
* One element of an HTTP {@link Header header} value. Header element * is a name / value pair with a number of optional name / value * parameters. /** * Returns header element name. * * @return header element name */ /** * Returns header element value. * * @return header element value */ /** * Returns an array of name / value pairs. * * @return array of name / value pairs */ /** * Returns the first parameter with the given name. * * @param name parameter name * * @return name / value pair */ /** * Returns the total count of parameters. * * @return parameter count */ /** * Returns parameter with the given index. * * @param index * @return name / value pair */
0
import org.apache.cocoon.portal.services.LayoutFactory;
0
span.getSpan().addKVAnnotation("files", ("" + smallestFiles.size())); span.getSpan().addKVAnnotation("read", ("" + mcs.getEntriesRead())); span.getSpan().addKVAnnotation("written", ("" + mcs.getEntriesWritten())); span.getSpan().addKVAnnotation("extent", ("" + getExtent())); span.getSpan().addKVAnnotation("read", ("" + majCStats.getEntriesRead())); span.getSpan().addKVAnnotation("written", ("" + majCStats.getEntriesWritten()));
0
* Gets the path encoded suitable for url like file system e.g. (http, webdav).
0
throw new RuntimeException(e); throw new RuntimeException(e); throw new RuntimeException(e);
0
import org.apache.accumulo.core.security.thrift.SecurityErrorCode; import org.apache.accumulo.core.security.tokens.PasswordToken;
0
import org.apache.beam.runners.spark.util.BroadcastHelper; import org.apache.beam.sdk.transforms.OldDoFn; import org.apache.beam.sdk.util.WindowedValue; import org.apache.beam.sdk.values.TupleTag; import org.apache.spark.api.java.function.PairFlatMapFunction; import org.joda.time.Instant;
0
import java.util.Set; import org.apache.aurora.gen.storage.StoredJobUpdateDetails; * TODO(wfarner): Generate immutable wrappers for storage.thrift structs, use an immutable object * here. Set<StoredJobUpdateDetails> fetchAllJobUpdateDetails(); /** * Determines whether an update ID represents a currently-active job update. * * @param updateId Job update ID. * @return {@code true} if this update has exclusive access to the job, otherwise {@code false}. */ boolean isActive(String updateId); * @param lockToken UUID identifying the lock associated with this update. void saveJobUpdate(IJobUpdate update, String lockToken);
0
LOG.info("writeToDisk == true but configFilename == null");
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ if (buffer == null) return null; if (bytesList == null) return null; if (bytesList == null) return null; if (bytes == null) return null;
0
CombinedConfiguration cc = factory.getConfiguration(true); String value = cc.getString("key1"); CombinedConfiguration cc = factory.getConfiguration(true); String value = cc.getString("key1");
0
private final Map<String, Plugin> plugins; this.plugins = new HashMap<String, Plugin>(); plugin = plugins.get( label ); Map<String, String> getLocalizedLabelMap( final ResourceBundleManager resourceBundleManager, final Locale locale ) final Map<String, String> map = new HashMap<String, String>(); oldPlugin = plugins.remove( label ); return plugins.values().toArray( new Plugin[plugins.size()] ); protected void doUngetConsolePlugin( @SuppressWarnings("unused") AbstractWebConsolePlugin consolePlugin ) public Enumeration<String> getInitParameterNames() return new Enumeration<String>() public String nextElement() public Enumeration<String> getInitParameterNames() return new Enumeration<String>() public String nextElement() Class<?> pluginClass = getClass().getClassLoader().loadClass(pluginClassName);
0
import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import org.junit.Test; public class StringUtilTest { @Test @Test @Test @Test
0
package org.apache.commons.codec;
1
this.cloneInfo.namespaceId = Namespaces.getNamespaceId(HdfsZooInstance.getInstance(), Tables.qualify(this.cloneInfo.tableName).getFirst()); Utils.checkTableDoesNotExist(environment.getInstance(), cloneInfo.tableName, cloneInfo.tableId, TableOperation.CLONE); TableManager.getInstance().cloneTable(cloneInfo.srcTableId, cloneInfo.tableId, cloneInfo.tableName, cloneInfo.namespaceId, cloneInfo.propertiesToSet,
0
* Copyright 2001-2006 The Apache Software Foundation
0
* or more contributor license agreements. See the NOTICE file * regarding copyright ownership. The ASF licenses this file * with the License. You may obtain a copy of the License at * KIND, either express or implied. See the License for the
0
import javax.net.ssl.SSLContext; import org.apache.hc.core5.reactor.ssl.TlsCapable; private final SSLContext sslContext; final ConnectionReuseStrategy connectionReuseStrategy, final SSLContext sslContext) { this.sslContext = sslContext; if (sslContext != null && ioSession instanceof TlsCapable) { ((TlsCapable) ioSession).startTls(sslContext, null ,null, null); }
0
import org.apache.accumulo.server.tabletserver.log.DfsLogger.DFSLoggerInputStreams; DFSLoggerInputStreams inputStreams = DfsLogger.readHeaderAndReturnStream(fs, srcPath, conf); this.input = inputStreams.getOriginalInput(); this.decryptingInput = inputStreams.getDecryptingInputStream();
0
import org.apache.accumulo.core.clientImpl.Credentials; if (job.isSuccessful()) { return 0; } else { System.out.println(job.getStatus().getFailureInfo()); return 1; } @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "path provided by test") File tf = folder.newFile("root_test.pw"); String outString = new Credentials(getAdminPrincipal(), getAdminToken()).serialize(); out.println(outString); Configuration conf = CachedConfiguration.getInstance(); conf.set("hadoop.tmp.dir", new File(tf.getAbsolutePath()).getParent()); conf.set("mapreduce.framework.name", "local"); conf.set("mapreduce.cluster.local.dir", new File(System.getProperty("user.dir"), "target/mapreduce-tmp").getAbsolutePath()); assertEquals(0, ToolRunner.run(conf, new MRTokenFileTester(), new String[] {tf.getAbsolutePath(), table1, table2}));
0
try (AccumuloClient c = getAccumuloClient()) { c.tableOperations().create(table1); c.tableOperations().create(table2); BatchWriter bw = c.createBatchWriter(table1, new BatchWriterConfig()); BatchWriter bw2 = c.createBatchWriter(table2, new BatchWriterConfig()); for (int i = 0; i < 100; i++) { Mutation t1m = new Mutation(new Text(String.format("%s_%09x", table1, i + 1))); t1m.put(new Text(), new Text(), new Value(String.format("%s_%09x", table1, i).getBytes())); bw.addMutation(t1m); Mutation t2m = new Mutation(new Text(String.format("%s_%09x", table2, i + 1))); t2m.put(new Text(), new Text(), new Value(String.format("%s_%09x", table2, i).getBytes())); bw2.addMutation(t2m); } bw.close(); bw2.close(); MRTester.main(new String[] {table1, table2}); assertNull(e1); assertNull(e2); }
0
@Override
0
import org.apache.commons.configuration.builder.BuilderParameters; CombinedConfigurationBuilder parentBuilder = new CombinedConfigurationBuilder() { @Override protected void initChildBuilderParameters( BuilderParameters params) { // set a property value; this should be overridden by // child builders if (params instanceof BasicBuilderParameters) { ((BasicBuilderParameters) params) .setListDelimiter('!'); } } }; new ConfigurationDeclaration(parentBuilder, declConfig)
0
Predicate predicate = new GreaterPredicate<>(propertyId, 10); GreaterPredicate predicate = new GreaterPredicate<>(propertyId, 10); Predicate predicate = new GreaterPredicate<>(propertyId, 0.1);
1
* Returns a content stream of the entity. * {@link #isRepeatable Repeatable} entities are expected * to create a new instance of {@link InputStream} for each invocation * of this method and therefore can be consumed multiple times. * Entities that are not {@link #isRepeatable repeatable} are expected * to return the same {@link InputStream} instance and therefore * may not be consumed more than once. * @return content stream of the entity. * if content stream cannot be created. * * @see #isRepeatable()
0
Type targetKeyType = null, targetValueType = null; if (typeArguments != null && typeArguments.length > 1) { targetKeyType = typeArguments[0]; targetValueType = typeArguments[1];
0
import org.apache.hc.core5.testing.SSLTestContexts; scheme == ProtocolScheme.HTTPS ? SSLTestContexts.createClientSSLContext() : null);
0
AccumuloConfiguration acuconf = FileOutputConfigurator .getAccumuloConfiguration(AccumuloFileOutputFormat.class, job1.getConfiguration()); assertEquals(new SamplerConfigurationImpl(samplerConfig), SamplerConfigurationImpl.newSamplerConfig(acuconf)); acuconf = FileOutputConfigurator.getAccumuloConfiguration(AccumuloFileOutputFormat.class, job2.getConfiguration()); assertEquals(new SamplerConfigurationImpl(samplerConfig), SamplerConfigurationImpl.newSamplerConfig(acuconf));
0
private void addChangeRecord(ChangeRecord c) {
0
import static java.util.concurrent.TimeUnit.SECONDS; import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.AGGREGATOR_CHECKPOINT_DELAY; import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.TimelineMetricConfiguration.RESULTSET_FETCH_SIZE; import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils.getRoundedAggregateTimeMillis; import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.aggregators.AggregatorUtils.getRoundedCheckPointTimeMillis; import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.availability.AggregationTaskRunner.ACTUAL_AGGREGATOR_NAMES; import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.Date; import java.util.Iterator; import java.util.List; import org.slf4j.LoggerFactory;
0
private TemporaryFolder folder1 = new TemporaryFolder(new File(System.getProperty("user.dir") + "/target"));
0
Node selectedElem = null; selectedElem = doc; //Set resultSet = dereferenceSameDocumentURI(selectedElem); XMLSignatureInput result = new XMLSignatureInput(selectedElem); result.setExcludeComments(true); //log.debug("We return a nodeset with " + resultSet.size() + " nodes");
0
private final InstructionHandle[] targets;
0
import org.apache.beam.sdk.schemas.annotations.DefaultSchema;
0
import org.apache.hc.core5.io.ShutdownType; local.initiateShutdown(); try { local.awaitTermination(gracePeriod, timeUnit); } catch (InterruptedException e) { Thread.currentThread().interrupt(); } local.shutdown(ShutdownType.IMMEDIATE);
0
public static final class LogLevel { public static final LogLevel DEBUG = new LogLevel( "DEBUG", 0 ); public static final LogLevel INFO = new LogLevel( "INFO", 1 ); public static final LogLevel WARN = new LogLevel( "WARN", 2 ); public static final LogLevel ERROR = new LogLevel( "ERROR", 3 ); public static final LogLevel FATAL_ERROR = new LogLevel( "FATAL_ERROR", 4 ); public static final LogLevel DISABLED = new LogLevel( "NONE", 5 ); public static LogLevel getLogLevelForName(final String name) { if( DEBUG.getName().equals( name ) ) { return DEBUG; } else if( INFO.getName().equals( name ) ) { return INFO; } else if( WARN.getName().equals( name ) ) { return WARN; } else if( ERROR.getName().equals( name ) ) { return ERROR; } else if( FATAL_ERROR.getName().equals( name ) ) { return FATAL_ERROR; } else if( DISABLED.getName().equals( name ) ) { return DISABLED; } else { return DEBUG; } } private final String name; private final int level; public LogLevel(String name, int level) { this.name = name; this.level = level; } public int getLevel() { return this.level; } public String getName() { return this.name; } } /** * Get the bootstrap logger. * @param logLevel The log level to use according to the {@link Logger} interface. */ Logger getBootstrapLogger(LogLevel logLevel);
0
import org.apache.accumulo.harness.AccumuloClusterIT; public class SplitRecoveryIT extends AccumuloClusterIT {
0
import static org.apache.beam.sdk.extensions.sql.impl.utils.CalciteUtils.VARCHAR; Schema payloadSchema = Schema.builder().addInt32Field("id").addStringField("name").build(); Schema.builder() .addDateTimeField("event_timestamp") .addMapField("attributes", VARCHAR, VARCHAR) .addRowField("payload", payloadSchema) Schema payloadSchema = Schema.builder().addInt32Field("id").addStringField("name").build(); Schema.builder() .addDateTimeField("event_timestamp") .addMapField("attributes", VARCHAR, VARCHAR) .addRowField("payload", payloadSchema)
0
import java.util.concurrent.atomic.AtomicReference; import org.apache.aurora.scheduler.app.ServiceGroupMonitor; import static org.easymock.EasyMock.expectLastCall; private AtomicReference<ImmutableSet<ServiceInstance>> schedulers; ServiceGroupMonitor serviceGroupMonitor = createMock(ServiceGroupMonitor.class); bind(ServiceGroupMonitor.class).toInstance(serviceGroupMonitor); schedulers = new AtomicReference<>(ImmutableSet.of()); serviceGroupMonitor.start(); expectLastCall(); expect(serviceGroupMonitor.get()).andAnswer(schedulers::get).anyTimes(); schedulers.set( schedulers.set(ImmutableSet.of());
0
* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License.
0
Buffer buffer = session.createBuffer(SshConstants.SSH_MSG_CHANNEL_OPEN);
0
private ThreadLocal<DateFormat> dateFormatThreadLocal = new ThreadLocal<DateFormat>(){ @Override protected DateFormat initialValue() { //2016-03-11T10:42:36.376Z return new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSX"); } };
1
/** * Sets the default Kafka topic to write to. Use {@code ProducerRecords} to set topic name per * published record. */ checkArgument(getTopic() != null, "withTopic() is required when isEOS() is true");
0
import org.apache.accumulo.core.iterators.user.IntersectingIterator;
0
public int execute(final String fullCommand, final CommandLine cl, final Shell shellState) throws Exception { final String tableName = OptUtil.getTableOpt(cl, shellState); final List<IteratorSetting> tableScanIterators = shellState.scanIteratorOptions.remove(tableName); if (tableScanIterators == null) { } else { } final String name = cl.getOptionValue(nameOpt.getOpt()); final List<IteratorSetting> tableScanIterators = shellState.scanIteratorOptions.get(tableName); if (!found) { } else { if (shellState.scanIteratorOptions.get(tableName).size() == 0) { } final Options o = new Options();
0
super(logFactory, self.tickTime, self.minSessionTimeout, self.maxSessionTimeout, self.clientPortListenBacklog, zkDb, self);
0
if (splits.size() == 0) { splits = ImmutableList.of(boundedSource); }
0
@Override public String toString() @Override public String stringifyValue() @Override public void dump(DataOutputStream dos) throws IOException
0
* Copyright 2001-2005 The Apache Software Foundation
0
if (saslEnabled()) { opts.updateKerberosCredentials(); vopts.updateKerberosCredentials();
0
@Test public void testFELIX2987(BundleContext context) { // mimics testComponentWithRequiredConfigurationAndServicePropertyPropagation DependencyManager m = new DependencyManager(context); // helper class that ensures certain steps get executed in sequence Ensure e = new Ensure(); // create a service provider and consumer Component s1 = m.createComponent().setImplementation(new ConfigurationConsumer2(e)).setInterface(Runnable.class.getName(), null).add(m.createConfigurationDependency().setPid("test").setPropagate(true)); Component s2 = m.createComponent().setImplementation(new ConfigurationCreator(e)).add(m.createServiceDependency().setService(ConfigurationAdmin.class).setRequired(true)); Component s3 = m.createComponent().setImplementation(new ConfiguredServiceConsumer(e)).add(m.createServiceDependency().setService(Runnable.class, ("(testkey=testvalue)")).setRequired(true)); m.add(s1); m.add(s2); m.add(s3); e.waitForStep(4, 15000); m.remove(s1); m.remove(s2); m.remove(s3); // ensure we executed all steps inside the component instance e.step(5); } static class ConfigurationConsumer2 extends ConfigurationConsumer { public ConfigurationConsumer2(Ensure e) { super(e); } }
0
/* * Copyright (c) OSGi Alliance (2012, 2013). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Namespace Package Version 1.1. * * <p> * Bundles should not need to import this package at runtime since all * the types in this package just contain constants for capability and * requirement namespaces specified by the OSGi Alliance. * * @author $Id$ */ @Version("1.1") package org.osgi.framework.namespace; import org.osgi.annotation.versioning.Version;
0
* limitations under the License.
0
QueryResponse queryForResources(Request request, Predicate predicate)
0
private DirectGraph graph; ConsumerTrackingPipelineVisitor visitor = new ConsumerTrackingPipelineVisitor(); p.traverseTopologically(visitor); graph = visitor.getGraph(); manager = WatermarkManager.create(clock, graph); ConsumerTrackingPipelineVisitor trackingVisitor = new ConsumerTrackingPipelineVisitor(); p.traverseTopologically(trackingVisitor); DirectGraph graph = trackingVisitor.getGraph(); AppliedPTransform<?, ?, ?> theFlatten = graph.getProducer(multiConsumer); WatermarkManager tstMgr = WatermarkManager.create(clock, graph);
0
package org.apache.accumulo.server.test.functional; import java.util.Iterator; import org.apache.accumulo.core.data.Key; import org.apache.accumulo.core.data.Value; import org.apache.accumulo.core.iterators.Combiner; public class BadCombiner extends Combiner { @Override public Value reduce(Key key, Iterator<Value> iter) { throw new IllegalStateException(); } }
1
import java.util.Objects; return this.equals(other); @Override public int hashCode() { return Objects.hash(countElems); }
0
import org.osgi.util.converter.TypeReference;
0
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.vfs2.cache; import org.apache.commons.vfs2.FileObject; /** * Tests for {@link SoftRefFilesCache} used by {@link SoftRefFilesCacheTestCase}. */ public class SoftRefFilesCacheTests extends FilesCacheTestsBase { public void testFilesCache() throws Exception { final FileObject scratchFolder = getWriteFolder(); final FileObject dir1 = scratchFolder.resolveFile("dir1"); final FileObject dir1_2 = scratchFolder.resolveFile("dir1"); // since both are still referenced they are not purged assertSame(dir1, dir1_2); } public void testClass() { assertTrue(getManager().getFilesCache() instanceof SoftRefFilesCache); } }
0
conf.set("mapreduce.cluster.local.dir", new File(System.getProperty("user.dir"), "target/mapreduce-tmp").getAbsolutePath());
0
* @deprecated (4.2) use {@link AuthenticationStrategy} @Deprecated
0
final StringBuilder data = new StringBuilder(); try { accept( ToStringVisitor.INSTANCE, data ); } catch ( OgnlException e ) { // ignored. } return data.toString();
0
import org.apache.commons.configuration.ex.ConfigurationException; import org.apache.commons.configuration.ex.ConfigurationRuntimeException;
0
long delayMillis = 50; tailer = new Tailer(file, listener, delayMillis, false); final long testDelayMillis = delayMillis * 10; Thread.sleep(testDelayMillis); Thread.sleep(testDelayMillis); Thread.sleep(testDelayMillis); Thread.sleep(testDelayMillis); Thread.sleep(testDelayMillis);
0
import org.apache.avalon.framework.service.ServiceException; import org.apache.avalon.framework.service.ServiceManager; import org.apache.avalon.framework.service.ServiceSelector; import org.apache.avalon.framework.service.Serviceable; * @version CVS $Id$ implements Serviceable, Disposable { /** The service manager instance */ protected ServiceManager manager; protected ServiceSelector inputSelector; protected InputModule input; protected Configuration inputConf; // will become an empty configuration object // during configure() so why bother here... public void service(ServiceManager manager) throws ServiceException { this.inputSelector=(ServiceSelector) this.manager.lookup(INPUT_MODULE_SELECTOR); ServiceSelector inputSelector = this.inputSelector; inputSelector=(ServiceSelector) this.manager.lookup(INPUT_MODULE_SELECTOR); if (inputSelector.isSelectable(type)){ if (type != null && inputSelector.isSelectable(type)) +(inputSelector!=null && inputSelector.isSelectable(type)?"known":"unknown")); } catch (ServiceException ce) { ServiceSelector inputSelector = this.inputSelector; inputSelector=(ServiceSelector) this.manager.lookup(INPUT_MODULE_SELECTOR); } catch (ServiceException ce) { ServiceSelector cs = this.inputSelector; cs = (ServiceSelector) this.manager.lookup(INPUT_MODULE_SELECTOR); } catch (ServiceException e) { if (cs.isSelectable(name)) { } catch (ServiceException e) {
0
new DBAccessor.DBColumnInfo(USER_AUTHENTICATION_AUTHENTICATION_KEY_COLUMN, String.class, 2048, null, true),
0
} catch (Throwable t) { LOG.fatal("Severe unrecoverable error, exiting", t); LOG.info("SyncRequestProcessor exited!");
0
import com.twitter.aurora.scheduler.storage.Storage.Work.Quiet; if (Quotas.greaterThan(additionalQuota, Quotas.noQuota())) { public boolean hasRemaining(final String role, final Quota quota) { return storage.consistentRead(new Quiet<Boolean>() { @Override public Boolean apply(StoreProvider storeProvider) { Quota reserved = storeProvider.getQuotaStore().fetchQuota(role).or(Quotas.noQuota()); return Quotas.geq(reserved, Quotas.add(getConsumption(role), quota));
0
* * * * * * * *
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
import org.apache.batik.dom.AbstractElement; import org.apache.batik.util.SVGConstants; ((AbstractElement) doc.getDocumentElement()).setIdAttributeNS(null, "blah", true);
0
/** * Test Flink runner. */ FlinkPipelineOptions flinkOptions = PipelineOptionsValidator.validate(FlinkPipelineOptions.class, options); OutputT apply(PTransform<InputT, OutputT> transform, InputT input) {
0
protected String name; name = Arrays.asList(args).toString(); log.debug("Executing command {}", name); new Thread(this, "ScpCommand: " + name).start(); if (file.exists() && file.isDirectory()) { throw new IOException("File is a directory: " + file); } else if (file.exists() && !file.canWrite()) { throw new IOException("Can not write to file: " + file); }
0
for (String arg : args) { System.out.println("Last modified: " + DateFormat.getInstance().format( new Date(file.getContent().getLastModifiedTime())));
0
* @author <a href="mailto:[email protected]">Henning P. Schmiedehausen</a> * @version $Id: ClassPropertiesConfiguration.java,v 1.8 2004/06/24 12:35:14 ebourg Exp $ throw new FileNotFoundException("Could not open Resource " + resourceName);
0
import org.apache.accumulo.core.client.Accumulo; client = Accumulo.newClient().from(getClientProperties()).build();
0
$Id: BooleanVariableAssignmentImpl.java,v 1.1 2004/03/01 12:30:49 cziegeler Exp $ package org.apache.cocoon.workflow.impl; import org.apache.cocoon.workflow.BooleanVariableAssignment; import org.apache.cocoon.workflow.WorkflowException; import org.apache.cocoon.workflow.WorkflowInstance; * @version $Id: BooleanVariableAssignmentImpl.java,v 1.1 2004/03/01 12:30:49 cziegeler Exp $
0
return TypeInformation.of((Class) MultiWindowedElement.class);
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
1
.setUrl(dockerImageUrl)
0