Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
ClientInfo clientInfo = getClientInfo(job); String principal = clientInfo.getPrincipal(); AuthenticationToken token = clientInfo.getAuthenticationToken(); try (AccumuloClient c = Accumulo.newClient().from(clientInfo.getProperties()).build()) {
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//validator/src/test/org/apache/commons/validator/TestCommon.java,v 1.2 2003/09/06 05:22:47 rleland Exp $ * $Revision: 1.2 $ * $Date: 2003/09/06 05:22:47 $ abstract public class TestCommon extends TestCase {
0
* Creates a upload progress listener that emits relevant statistics about the
0
private final String headerName;
0
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import org.junit.Test; * @version $Id$ public class TestBasicDataSourceFactory { @Test @Test
0
import org.apache.accumulo.core.security.tokens.PasswordToken;
0
public class MemoryIterator extends WrappingIterator implements InterruptibleIterator {
1
* shutdown the connection. Use #shutdown() instead.
0
//TODO : consider using getOpCode //TODO : fix bug on double-space //TODO : use a logger //TODO : merge this class with another class only static method. //TODO use logger.
0
LOG.debug("Created transform for algorithm: {}", getAlgorithm()); LOG.debug("ApacheData = true"); LOG.debug("isNodeSet() = true"); LOG.debug("DOMSubTreeData = true"); LOG.debug("isNodeSet() = false");
0
import org.apache.commons.digester3.internal.DigesterImpl;
0
* @throws IOException If an error occurs while fetching the time. * @throws IOException If an error occurs while fetching the time.
0
static Map<File,WeakReference<Workspace>> cache = newHashMap(); final Map<String,Project> models = newHashMap(); final Map<String,Action> commands = newMap(); @Override public void propertiesChanged() { } catch (Exception e) { public void fillActions(Map<String,Action> all) { } catch (Exception e) { } catch (Exception e) { * Signal a BndListener plugin. We ran an infinite bug loop :-( final ThreadLocal<Reporter> signalBusy = new ThreadLocal<Reporter>(); if (signalBusy.get() != null) } catch (Exception e) { } catch (Exception e) { } finally { @Override public void signal() { throw new TimeLimitExceededException("Cached File Repo is locked and can't acquire it"); } finally { if (!dest.isFile() || dest.lastModified() < jentry.getTime() || jentry.getTime() == 0) { } finally { } finally { @Override protected void setTypeSpecificPlugins(Set<Object> list) {
0
* Copyright 2004-2006 The Apache Software Foundation. import org.apache.commons.configuration.event.ConfigurationEvent; import org.apache.commons.configuration.event.ConfigurationListener; public void testClone() { MapConfiguration config = (MapConfiguration) getConfiguration(); MapConfiguration copy = (MapConfiguration) config.clone(); StrictConfigurationComparator comp = new StrictConfigurationComparator(); assertTrue("Configurations are not equal", comp.compare(config, copy)); } /** * Tests if the cloned configuration decoupled from the original. */ public void testCloneModify() { MapConfiguration config = (MapConfiguration) getConfiguration(); config.addConfigurationListener(new ConfigurationListener() { public void configurationChanged(ConfigurationEvent event) { // Just a dummy } }); MapConfiguration copy = (MapConfiguration) config.clone(); assertTrue("Event listeners were copied", copy .getConfigurationListeners().isEmpty()); config.addProperty("cloneTest", Boolean.TRUE); assertFalse("Map not decoupled", copy.containsKey("cloneTest")); copy.clearProperty("key1"); assertEquals("Map not decoupled (2)", "value1", config .getString("key1")); }
0
import org.apache.accumulo.core.security.thrift.SecurityErrorCode; import org.apache.accumulo.core.security.thrift.ThriftSecurityException;
0
public int compare(final File file1, final File file2) { final long result = file1.lastModified() - file2.lastModified();
1
private Map<HostAndPort,Long> firstSeenDead; firstSeenDead = new HashMap<>(); gcwal = new GarbageCollectWriteAheadLogs(context, volMgr, false, firstSeenDead); ReplicationGCWAL(AccumuloServerContext context, VolumeManager fs, boolean useTrash, Map<HostAndPort,Long> firstSeenDead, List<Entry<Key,Value>> replData) throws IOException { super(context, fs, useTrash, firstSeenDead); ReplicationGCWAL replGC = new ReplicationGCWAL(null, volMgr, false, firstSeenDead, replData); GarbageCollectWriteAheadLogs gcWALs = new GarbageCollectWriteAheadLogs(context, volMgr, false, firstSeenDead); GarbageCollectWriteAheadLogs gcWALs = new GarbageCollectWriteAheadLogs(context, volMgr, false, firstSeenDead); GarbageCollectWriteAheadLogs gcWALs = new GarbageCollectWriteAheadLogs(context, volMgr, false, firstSeenDead); GarbageCollectWriteAheadLogs gcWALs = new GarbageCollectWriteAheadLogs(context, volMgr, false, firstSeenDead); public GCWALPartialMock(AccumuloServerContext ctx, VolumeManager vm, boolean useTrash, Map<HostAndPort,Long> firstSeenDead, boolean holdLock) throws IOException { super(ctx, vm, useTrash, firstSeenDead); return new GCWALPartialMock(ctx, VolumeManagerImpl.get(), false, firstSeenDead, locked); public GCWALDeadTserverCollectMock(AccumuloServerContext ctx, VolumeManager vm, boolean useTrash, Map<HostAndPort,Long> firstSeenDead) throws IOException { super(ctx, vm, useTrash, firstSeenDead); GarbageCollectWriteAheadLogs gcwal2 = new GCWALDeadTserverCollectMock(ctx, vm, false, firstSeenDead);
0
String attrName = evt.getAttrName(); if (attrName.equals(SVG_X1_ATTRIBUTE) || attrName.equals(SVG_Y1_ATTRIBUTE) || attrName.equals(SVG_X2_ATTRIBUTE) || attrName.equals(SVG_Y2_ATTRIBUTE)) {
0
import com.google.common.collect.Lists;
0
@Override public void visit(State state, Properties props) throws Exception { Connector conn = state.getConnector(); Random rand = (Random) state.get("rand"); @SuppressWarnings("unchecked") List<String> tableNames = (List<String>) state.get("tables"); String tableName = tableNames.get(rand.nextInt(tableNames.size())); // TODO need to sometimes do null start and end ranges TreeSet<Text> range = new TreeSet<Text>(); range.add(new Text(String.format("%016x", Math.abs(rand.nextLong())))); range.add(new Text(String.format("%016x", Math.abs(rand.nextLong())))); try { conn.tableOperations().merge(tableName, range.first(), range.last()); log.debug("merged " + tableName); } catch (TableOfflineException toe) { log.debug("merge " + tableName + " failed, table is not online"); } catch (TableNotFoundException tne) { log.debug("merge " + tableName + " failed, doesnt exist"); }
1
log( LogService.LOG_DEBUG, "Disabling Component", cm[i].getComponentMetadata(), null );
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//digester/src/java/org/apache/commons/digester/plugins/PluginRules.java,v 1.5 2003/11/02 23:26:59 rdonkin Exp $ * $Revision: 1.5 $ * $Date: 2003/11/02 23:26:59 $ throw new PluginAssertionFailure( throw new PluginAssertionFailure( throw new PluginAssertionFailure(
0
import java.util.Arrays; import org.junit.Before; import org.junit.Test; import com.twitter.mesos.gen.Identity; first.getAssignedTask().getTask().setOwner(new Identity("A", "A")); second.getAssignedTask().getTask().setOwner(new Identity("B", "B")); new TwitterTaskInfo() .setOwner(new Identity("jake", "jake")) .setJobName("spin") .setShardId(42);
0
/* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.faces.context; import org.apache.cocoon.environment.Request; import java.util.Enumeration; import java.util.HashSet; import java.util.Set; /** * Request header values map * * @author <a href="mailto:[email protected]">Vadim Gritsenko</a> * @version CVS $Id$ */ class RequestHeaderValuesMap extends BaseMap { private Request request; RequestHeaderValuesMap(Request request) { this.request = request; } public Object get(Object key) { return request.getHeaders(key.toString()); } public Set entrySet() { Set entries = new HashSet(); for (Enumeration e = request.getHeaderNames(); e.hasMoreElements();) { String name = (String) e.nextElement(); entries.add(new BaseMap.Entry(name, request.getHeaders(name))); } return entries; } public boolean equals(Object obj) { if (obj == null || !(obj instanceof RequestHeaderValuesMap)) { return false; } return super.equals(obj); } }
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//beanutils/src/java/org/apache/commons/beanutils/WrapDynaBean.java,v 1.7 2003/10/09 20:43:15 rdonkin Exp $ * $Revision: 1.7 $ * $Date: 2003/10/09 20:43:15 $ * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear. * @version $Revision: 1.7 $ $Date: 2003/10/09 20:43:15 $
0
import java.util.Map; case ENUM8: case ENUM16: return Schema.FieldType.STRING; ENUM8, ENUM16, public abstract Map<String, Integer> enumValues(); @Nullable public ColumnType withNullable(boolean nullable) { return toBuilder().nullable(nullable).build(); } public static ColumnType enum8(Map<String, Integer> enumValues) { return ColumnType.builder() .typeName(TypeName.ENUM8) .nullable(false) .enumValues(enumValues) .build(); } public static ColumnType enum16(Map<String, Integer> enumValues) { return ColumnType.builder() .typeName(TypeName.ENUM16) .nullable(false) .enumValues(enumValues) .build(); } case ENUM8: case ENUM16: abstract Builder toBuilder(); public abstract Builder enumValues(Map<String, Integer> enumValues);
0
* @param conn the {@link java.sql.Connection Connection} from which I was created
0
bar1Factory = (ComponentFactory) Utils.getFactoryByName(getContext(), "composite.bar.3"); Factory fact = Utils.getFactoryByName(getContext(), "composite.empty"); ComponentFactory fact1 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-1"); ComponentFactory fact2 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-2"); ComponentFactory fact3 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-3"); ComponentFactory fact1 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-1"); ComponentFactory fact2 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-2"); ComponentFactory fact3 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-3"); ServiceReference ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "under"); Architecture arch = (Architecture) getContext().getService(ref); ComponentFactory fact1 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-1"); ComponentFactory fact2 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-2"); ComponentFactory fact3 = (ComponentFactory) Utils.getFactoryByName(getContext(), "COMPO-FooBarProviderType-3"); ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "under"); arch = (Architecture) getContext().getService(ref); ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "under"); arch = (Architecture) getContext().getService(ref); ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "under"); arch = (Architecture) getContext().getService(ref); ref = Utils.getServiceReferenceByName(getContext(), Architecture.class.getName(), "under"); arch = (Architecture) getContext().getService(ref); getContext().ungetService(ref);
0
* Copyright 2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License.
0
return getResourcesLocal(name);
0
import org.apache.ambari.server.state.SecurityType; ClusterRequest r = new ClusterRequest(null, clusterName, State.INSTALLED.name(), SecurityType.NONE, stackName, null);
0
int lastColon = a.lastIndexOf(':');
0
TimelineMetric counterMetric = new TimelineMetric(); counterMetric.setMetricName("TestMetric"); counterMetric.setHostName("TestHost"); counterMetric.setAppId("TestAppId"); counterMetric.setMetricValues(metricValues); counterMetric.setType("COUNTER"); Map<TimelineClusterMetric, Double> timelineClusterMetricMap = secondAggregator.sliceFromTimelineMetric(counterMetric, timeSlices); TimelineClusterMetric timelineClusterMetric = new TimelineClusterMetric(counterMetric.getMetricName(), counterMetric.getAppId(), counterMetric.getInstanceId(), 0l, null); timelineClusterMetric.setTimestamp(roundedStartTime + 2*sliceInterval); Assert.assertTrue(timelineClusterMetricMap.containsKey(timelineClusterMetric)); Assert.assertEquals(timelineClusterMetricMap.get(timelineClusterMetric), 6.0); timelineClusterMetric.setTimestamp(roundedStartTime + 4*sliceInterval); Assert.assertTrue(timelineClusterMetricMap.containsKey(timelineClusterMetric)); Assert.assertEquals(timelineClusterMetricMap.get(timelineClusterMetric), 12.0); timelineClusterMetricMap = secondAggregator.sliceFromTimelineMetric(metric, timeSlices); timelineClusterMetric = new TimelineClusterMetric(metric.getMetricName(), metric.getAppId(), Assert.assertEquals(timelineClusterMetricMap.get(timelineClusterMetric), 4.5); Assert.assertEquals(timelineClusterMetricMap.get(timelineClusterMetric), 7.5);
0
/** -1 is reserved for the http service servlet context. */ private final AtomicLong idCounter = new AtomicLong(-1);
0
String[][] data = { {"a", "0"}, {"e", "0"}, {"i", "0"}, {"o", "0"}, {"u", "0"}, {"\u00E4", "0"}, {"\u00F6", "0"}, {"\u00FC", "0"}, {"aa", "0"}, {"ha", "0"}, {"h", ""}, {"aha", "0"}, {"b", "1"}, {"p", "1"}, {"ph", "3"}, {"f", "3"}, {"v", "3"}, {"w", "3"}, {"g", "4"}, {"k", "4"}, {"q", "4"}, {"x", "48"}, {"ax", "048"}, {"cx", "48"}, {"l", "5"}, {"cl", "45"}, {"acl", "085"}, {"mn", "6"}, {"r", "7"}}; assertEquals("Failed to correctly convert element of index: " + i, data[i][1], koellePhon.colognePhonetic(data[i][0])); String[][] data = {{"Müller-Lüdenscheidt", "65752682"}, {"Breschnew", "17863"}, {"Wikipedia", "3412"}}; assertFalse("Cologne-phonetic encodings should not be equal", koellePhon.isCologneEqual("Meyer", "Müller")); assertTrue("Cologne-phonetic encodings should be equal", koellePhon.isCologneEqual("Meyer", "Mayr"));
1
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.MoreObjects;
0
* @version $Id$
0
out.println(" " + caps[i].getName() + ":" + caps[i].getPropertiesAsMap());
0
import org.apache.flink.streaming.api.functions.source.ParallelSourceFunction; import org.apache.flink.streaming.api.functions.source.SourceFunction; public class UnboundedFlinkSource<T> extends UnboundedSource<T, UnboundedSource.CheckpointMark> { private final SourceFunction<T> flinkSource; public UnboundedFlinkSource(SourceFunction<T> source) { public SourceFunction<T> getFlinkSource() { public List<? extends UnboundedSource<T, UnboundedSource.CheckpointMark>> generateInitialSplits(int desiredNumSplits, PipelineOptions options) throws Exception { public UnboundedReader<T> createReader(PipelineOptions options, @Nullable CheckpointMark checkpointMark) { public Coder<UnboundedSource.CheckpointMark> getCheckpointMarkCoder() { /** * Creates a new unbounded source from a Flink source. * @param flinkSource The Flink source function * @param <T> The type that the source function produces. * @return The wrapped source function. */ public static <T> UnboundedSource<T, UnboundedSource.CheckpointMark> of(SourceFunction<T> flinkSource) { return new UnboundedFlinkSource<>(flinkSource); }
0
* * @author <a href="mailto:[email protected]">Felix Project Team</a>
0
patternDef.setAttributeNS(null, SVG_PATTERN_UNITS_ATTRIBUTE,
0
import org.apache.hc.client5.http.impl.sync.RoutedHttpRequest;
0
Element[] handlers = m_elements[0].getElements("Handler"); Element[] all = new Element[components.length + composites.length + handlers.length]; for (int i = 0; i < handlers.length; i++) { all[l] = handlers[i]; l++; }
0
* @see org.apache.cocoon.portal.profile.ProfileManager#getLayout(java.lang.String) public Layout getLayout(String layoutID) { final String layoutKey = this.portalService.getDefaultLayoutKey();
0
options.getBar(); options.getBar();
0
hiveSiteProperties.put("hive.cluster.delegation.token.store.zookeeper.connectString", expectedHostName + ":" + expectedPortNum + "," + expectedHostNameTwo + ":" + expectedPortNum); assertEquals("hive zookeeper connectString property not properly exported", createExportedAddress(expectedPortNum, expectedHostGroupName) + "," + createExportedAddress(expectedPortNum, expectedHostGroupNameTwo), hiveSiteProperties.get("hive.cluster.delegation.token.store.zookeeper.connectString"));
0
.append( ret )
0
/* * Copyright (C) 2016 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.cloud.dataflow.sdk.options; /** * Properties that can be set when using Pubsub with the Dataflow SDK. */ @Description("Options that are used to configure BigQuery. See " + "https://cloud.google.com/bigquery/what-is-bigquery for details on BigQuery.") public interface PubsubOptions extends ApplicationNameOptions, GcpOptions, PipelineOptions, StreamingOptions { /** * Root URL for use with the Pubsub API. */ @Description("Root URL for use with the Pubsub API") @Default.String("https://pubsub.googleapis.com") @Hidden String getPubsubRootUrl(); void setPubsubRootUrl(String value); }
0
private final CommandLauncher launcher;
0
import org.apache.commons.configuration.sync.NoOpSynchronizer;
0
import org.apache.accumulo.server.fs.VolumeChooserEnvironment; VolumeChooserEnvironment chooserEnv = new VolumeChooserEnvironment(Optional.empty()); logPath = fs.choose(chooserEnv, ServerConstants.getBaseUris()) + Path.SEPARATOR + ServerConstants.WAL_DIR + Path.SEPARATOR + logger + Path.SEPARATOR + filename;
0
@Nullable PTransform<PCollection<UserT>, PCollectionView<Integer>> computeNumShards); @Nullable ValueProvider<Integer> numShardsProvider); private final @Nullable PCollectionView<Integer> numShardsView; @Nullable PCollectionView<Integer> numShardsView) {
0
stackInfo = new StackInfo(); } else { } stackInfo.setLibraryClassLoader(stackDirectory.getLibraryClassLoader()); if (service.getModuleInfo().getRoleCommandOrder() == null) { } errorSet.addAll(errors);
0
setDocument(doc); setElement(XMLUtils.createElementInSignatureSpace(doc, this.getBaseLocalName()));
0
import org.apache.beam.sdk.transforms.SerializableFunction; /** * register {@link SerializableFunction} as a UDF function used in this query. * Note, {@link SerializableFunction} must have a constructor without arguments. */ public QueryTransform withUdf(String functionName, SerializableFunction sfn){ getSqlEnv().registerUdf(functionName, sfn); return this; } /** * register {@link SerializableFunction} as a UDF function used in this query. * Note, {@link SerializableFunction} must have a constructor without arguments. */ public SimpleQueryTransform withUdf(String functionName, SerializableFunction sfn){ getSqlEnv().registerUdf(functionName, sfn); return this; }
0
response.setEntity(new NStringEntity(message, ContentType.DEFAULT_TEXT)); response.setEntity(new NStringEntity(message, ContentType.DEFAULT_TEXT));
0
if (colon + 1 < host.length()) { try { port = Integer.parseInt(host.substring(colon + 1)); } catch (NumberFormatException ex) { } host = host.substring(0, colon);
0
* get only the name from the filename. int index = fullFilename.lastIndexOf('/'); // Given path * get the path from the filename.
0
package org.apache.felix.dependencymanager.impl;
0
if (shape == null) if (shapePainter == null) primitiveBounds = shape.getBounds2D(); else primitiveBounds = shapePainter.getPaintedBounds2D();
0
import org.apache.excalibur.source.Source; public static final ThreadLocal PARENT_CONTEXT = new ThreadLocal(); // TODO: use default includes, global variables and directories buffer.append("<avalon:sitemap location=\"sitemap.xmap\" uriPrefix=\""); public static CocoonWebApplicationContext createApplicationContext(String uriPrefix, // let's determine our context url Source s = sitemapResolver.resolveURI("a"); String contextUrl = s.getURI(); sitemapResolver.release(s); contextUrl = contextUrl.substring(0, contextUrl.length() - 1); PARENT_CONTEXT.set(parentContext); try { final CocoonWebApplicationContext context = new CocoonWebApplicationContext(classloader, parentContext, contextUrl, definition); return context; } finally { PARENT_CONTEXT.set(null); } protected static ClassLoader createClassLoader(BeanFactory parentFactory, Configuration config, ServletContext servletContext, SourceResolver sitemapResolver)
0
@SuppressWarnings({"unchecked", "serial", "rawtypes", "unused"}) public class ActiveCompaction implements org.apache.thrift.TBase<ActiveCompaction, ActiveCompaction._Fields>, java.io.Serializable, Cloneable, Comparable<ActiveCompaction> { public enum _Fields implements org.apache.thrift.TFieldIdEnum {
0
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.testing; /** * Category tag for the ParDoLifecycleTest for exclusion (BEAM-3241). */ public interface UsesParDoLifecycle {}
0
import com.google.common.util.concurrent.ThreadFactoryBuilder;
0
* <pre>{@code * }</pre>
0
// TODO: PROBABLY INCORRECT. Fix it.
0
protected Object serializeDeserialize(Object obj) throws Exception { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ObjectOutputStream out = new ObjectOutputStream(buffer); out.writeObject(obj); out.close(); ObjectInputStream in = new ObjectInputStream(new ByteArrayInputStream(buffer.toByteArray())); Object dest = in.readObject(); in.close(); return dest; } Object dest = serializeDeserialize(obj);
0
import org.apache.accumulo.core.tabletserver.thrift.TDurability; client.update(Tracer.traceInfo(), ai.toThrift(instance), extent.toThrift(), m.toThrift(), TDurability.DEFAULT);
1
package org.apache.beam.sdk.extensions.sql.transform; import org.apache.beam.sdk.extensions.sql.schema.BeamSqlRow;
0
import static com.google.common.base.Preconditions.checkNotNull; private final DoFn.ProcessContinuation continuation; @Nullable RestrictionT residualRestriction, DoFn.ProcessContinuation continuation, Instant futureOutputWatermark) { this.continuation = checkNotNull(continuation); if (continuation.shouldResume()) { checkNotNull(residualRestriction); } /** * Can be {@code null} only if {@link #getContinuation} specifies the call should not resume. * However, the converse is not true: this can be non-null even if {@link #getContinuation} * is {@link DoFn.ProcessContinuation#stop()}. */ public DoFn.ProcessContinuation getContinuation() { return continuation; } * @return Information on how to resume the call: residual restriction, a {@link * DoFn.ProcessContinuation}, and a future output watermark.
0
/* import org.xml.sax.helpers.XMLReaderFactory; * Pojoization allows creating an iPOJO bundle from a "normal" bundle. System.err.println(mes); * @param metadata the iPOJO metadata input stream. * @param metadataFile the iPOJO metadata file (XML). } JarEntry je = m_inputJar.getJarEntry(classname); throw new IOException("The class " + classname + " connot be found in the input Jar file"); // // * classes. if (name != null) { // Only handler and component have a classname attribute * Is the class already manipulated. if (meta.length() != 0) { * * * Parses XML Metadata. XMLReader parser = XMLReaderFactory.createXMLReader(); parser.setFeature("http://apache.org/xml/features/validation/schema", true); * @param element : actual element.
0
"-rw-r--r-- 1 ftp group with spaces in it as allowed in cygwin see bug 38634 83853 Jan 22 2001 zxJDBC-1.2.4.tar.gz", // Bug 38634 => NET-16
0
kerberosDescriptorFactory, Collections.emptySet(), Collections.emptyMap(), null); kerberosDescriptorFactory, Collections.emptySet(), Collections.emptyMap(), null); kerberosDescriptorFactory, Collections.emptySet(), Collections.emptyMap(), null);
0
package org.apache.accumulo.test.functional;
0
public class ToStringOutputArchive implements OutputArchive { /** Creates a new instance of ToStringOutputArchive */ public ToStringOutputArchive(OutputStream out) stream.print(escapeString(s)); public void writeBuffer(byte[] buf, String tag) stream.print(escapeBuffer(buf)); private static String escapeString(String s) { if (s == null) { return ""; } StringBuilder sb = new StringBuilder(s.length() + 1); sb.append('\''); int len = s.length(); for (int i = 0; i < len; i++) { char c = s.charAt(i); switch(c) { case '\0': sb.append("%00"); break; case '\n': sb.append("%0A"); break; case '\r': sb.append("%0D"); break; case ',': sb.append("%2C"); break; case '}': sb.append("%7D"); break; case '%': sb.append("%25"); break; default: sb.append(c); } } return sb.toString(); } private static String escapeBuffer(byte[] barr) { if (barr == null || barr.length == 0) { return ""; } StringBuilder sb = new StringBuilder(barr.length + 1); sb.append('#'); for (byte b : barr) { sb.append(Integer.toHexString(b)); } return sb.toString(); }
0
/* * Copyright 2002-2005 The Apache Software Foundation * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. * * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.core.container.handler; import org.apache.avalon.framework.logger.Logger; import org.apache.cocoon.components.ComponentInfo; import org.apache.cocoon.core.container.CoreServiceManager; /** * A component handler used to alias roles: it delegates all its calls to another * handler. * * @version $Id$ * @since 2.2 */ public class AliasComponentHandler extends AbstractComponentHandler { ComponentHandler aliasedHandler; public AliasComponentHandler(Logger logger, ComponentHandler aliasedHandler) { super(new ComponentInfo(), logger); getInfo().setConfiguration(CoreServiceManager.EMPTY_CONFIGURATION); this.aliasedHandler = aliasedHandler; } protected Object doGet() throws Exception { return this.aliasedHandler.get(); } protected void doPut(Object component) throws Exception { this.aliasedHandler.put(component); } protected void doInitialize() { // nothing to do here } public boolean isSingleton() { return this.aliasedHandler.isSingleton(); } }
0
* test pipelines. Meanwhile it is valid for multiple {@link PubsubTestClient}s to be created * from the same client factory and run in parallel. Thus we can't enforce aliasing of the * following data structures over all clients and must resort to a static. * Publish mode only: Messages which should throw when first sent to simulate transient publish * failure. */ @Nullable Set<OutgoingMessage> remainingFailingOutgoingMessages; /** final Iterable<OutgoingMessage> expectedOutgoingMessages, final Iterable<OutgoingMessage> failingOutgoingMessages) { STATE.remainingFailingOutgoingMessages = Sets.newHashSet(failingOutgoingMessages); if (STATE.remainingFailingOutgoingMessages.remove(outgoingMessage)) { throw new RuntimeException("Simulating failure for " + outgoingMessage); }
0
import org.apache.ambari.server.ldap.service.AmbariLdapConfigurationProvider; bind(AmbariLdapConfigurationProvider.class).toInstance(createMock(AmbariLdapConfigurationProvider.class));
0
public Rectangle2D getPrimitiveBounds() { public Rectangle2D getGeometryBounds() { public Shape getOutline() {
0
import org.apache.accumulo.server.test.MiniAccumuloCluster;
0
import static org.apache.aurora.scheduler.storage.Storage.MutableStoreProvider; * @param storeProvider Storage provider. MutableStoreProvider storeProvider, private TaskInfo assign(MutableStoreProvider storeProvider, Offer offer, IScheduledTask task) { storeProvider, MutableStoreProvider storeProvider, return Optional.of(assign(storeProvider, offer.getOffer(), task));
0
* * * * @version CVS $Id: InterpreterSelector.java,v 1.4 2004/06/03 12:37:56 vgritsenko Exp $ implements Configurable, ThreadSafe { private String defaultLanguage; public void configure(Configuration config) throws ConfigurationException { super.configure(config); this.defaultLanguage = config.getAttribute("default", null); // Finish the initialization of the already created components Configuration[] configurations = config.getChildren("component-instance"); if (configurations.length == 0) { throw new ConfigurationException("No languages defined!"); } for (int i = 0; i < configurations.length; i++) { Configuration conf = configurations[i]; String hint = conf.getAttribute("name").trim(); if (!this.getComponentHandlers().containsKey(hint)) { throw new ConfigurationException( "Could not find component for hint: " + hint ); } if (i == 0 && defaultLanguage == null) { defaultLanguage = hint; } } public String getDefaultLanguage() { return defaultLanguage; }
0
impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null); impl.execute(route, req1, context, null); final HttpResponse result = impl.execute(route, req2, context, null);
0
import org.apache.accumulo.core.security.thrift.ThriftSecurityException; import org.apache.accumulo.server.security.SecurityOperationImpl; SecurityOperationImpl.getInstance().deleteTable(SecurityConstants.getSystemCredentials(), tableId); } catch (ThriftSecurityException e) {
1
@Override public Object getTableObject() { return table; }
0
import org.apache.beam.runners.core.SimplePushbackSideInputDoFnRunner; SimplePushbackSideInputDoFnRunner.create(underlying, sideInputs, sideInputReader);
0
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
0
* Copyright 1999-2002,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ * @version $Id: ConnectionFactoryAvalonDataSource.java,v 1.3 2004/03/05 13:02:02 bdelacretaz Exp $
1
return new HashMap<>();
0
import org.apache.beam.sdk.transforms.windowing.Window.Assign; @Override public String getUrn(Assign<?> transform) { return PTransforms.WINDOW_TRANSFORM_URN; }
0
* {@link org.apache.commons.net.SocketClient}.
0
import org.apache.beam.dsls.sql.schema.BeamSqlRowType; BeamSqlRowType resultType = BeamSqlRowType.create(Arrays.asList("f_int2", "size"), BeamSqlRowType resultType = BeamSqlRowType.create( BeamSqlRowType resultType = BeamSqlRowType.create(Arrays.asList("f_int", "f_long"), BeamSqlRowType resultType = BeamSqlRowType.create( BeamSqlRowType resultType = BeamSqlRowType.create( BeamSqlRowType resultType = BeamSqlRowType.create(
0
import org.apache.beam.sdk.values.PCollectionTuple; * A new method {@link #buildBeamPipeline(PCollectionTuple)} is added, it's * A {@link BeamRelNode} is a recursive structure, the * {@link BeamPipelineCreator} visits it with a DFS(Depth-First-Search) * algorithm. PCollection<BeamSQLRow> buildBeamPipeline(PCollectionTuple inputPCollections) throws Exception;
0
reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current ); reportUnexpectedCharacterError( current );
0
assert false : "hashCode not designed";
0
Copyright 2002 The Apache Software Foundation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */
0
* The namespace URI for which this Rule is relevant, if any. * * @since 3.0 */ String namespaceURI() default ""; /**
0
* @since 2.0
1
* http://www.apache.org/licenses/LICENSE-2.0
0