Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.fn.data; /** * A receiver of streamed data that can be closed. * * <p>The close method for a {@link CloseableFnDataReceiver} must be idempotent. */ public interface CloseableFnDataReceiver<T> extends FnDataReceiver<T>, AutoCloseable { /** * {@inheritDoc}. * * <p>Does nothing if this {@link CloseableFnDataReceiver} is already closed. */ @Override void close() throws Exception; }
0
options.setProject(PROJECT_ID); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, dataflowAggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, dataflowAggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, dataflowAggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, dataflowAggregatorTransforms); JOB_ID, options, dataflowAggregatorTransforms); JOB_ID, options, dataflowAggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, dataflowAggregatorTransforms); new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, aggregatorTransforms); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, null); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, null); DataflowPipelineJob job = new DataflowPipelineJob(JOB_ID, options, null);
0
import org.apache.http.util.Asserts; Asserts.check(!this.shutdown && !this.endOfStream, "Buffer already closed for writing"); Asserts.check(!this.shutdown && !this.endOfStream, "Buffer already closed for writing");
0
/* * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation. For more * information on the Apache Software Foundation, please see * <http://www.apache.org/>. * */ package org.apache.http.conn.scheme; import java.io.IOException; import java.net.Socket; import java.net.UnknownHostException; @Deprecated class LayeredSocketFactoryAdaptor extends SocketFactoryAdaptor implements LayeredSocketFactory { private final LayeredSchemeSocketFactory factory; LayeredSocketFactoryAdaptor(final LayeredSchemeSocketFactory factory) { super(factory); this.factory = factory; } public Socket createSocket( final Socket socket, final String host, int port, boolean autoClose) throws IOException, UnknownHostException { return this.factory.createLayeredSocket(socket, host, port, autoClose); } }
0
public void setContentBufferSize (final int contentBufferSize) { public void setSelectInterval (final long selectInterval) {
0
doc = XMLUtils.read(is, true, false, new org.apache.xml.security.utils.IgnoreAllErrorHandler());
0
import org.apache.accumulo.core.security.tokens.PasswordToken;
0
for (Object beginDependent : beginDependents) { InstanceTime it = (InstanceTime) beginDependent; for (Object endDependent : endDependents) { InstanceTime it = (InstanceTime) endDependent;
0
import com.google.common.base.Joiner; import static org.apache.accumulo.tserver.logger.LogEvents.COMPACTION_FINISH; import static org.apache.accumulo.tserver.logger.LogEvents.COMPACTION_START; import static org.apache.accumulo.tserver.logger.LogEvents.DEFINE_TABLET; import static org.apache.accumulo.tserver.logger.LogEvents.MANY_MUTATIONS; import static org.apache.accumulo.tserver.logger.LogEvents.OPEN;
0
URL url = BundleCache.getSecureAction().createURL(null, getLocation(), null);
1
/** * * @version $Id$ */ public class TIFFRegistryEntry * any default color correction the file may public Filter handleStream(InputStream inIS, SeekableStream ss =
0
* Copyright 1999-2004 The Apache Software Foundation * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * @version $Revision: 1.8 $ $Date: 2004/02/29 14:17:43 $
0
import org.apache.beam.runners.spark.translation.streaming.utils.SparkTestPipelineOptionsForStreaming; public SparkTestPipelineOptionsForStreaming commonOptions = new SparkTestPipelineOptionsForStreaming();
0
public OpenFuture open(int recipient, int rwsize, int rmpsize, Buffer buffer) { throw new IllegalStateException(); }
0
package org.apache.hc.core5.http.nio.pool;
0
* widgets descriptor file path */ private String widgetsDescriptorFilePath; /** * Obtain the path to the (stack-level) widgets descriptor file * * @return the path to the (stack-level) widgets descriptor file */ public String getWidgetsDescriptorFilePath() { return widgetsDescriptorFilePath; } /** if (subDirs.contains(WIDGETS_DESCRIPTOR_FILE_NAME)) { widgetsDescriptorFilePath = getAbsolutePath() + File.separator + WIDGETS_DESCRIPTOR_FILE_NAME; }
0
import org.apache.cocoon.portal.coplet.CopletInstance; public ContentItem(CopletInstance cid, boolean content) {
0
@Test
0
import com.twitter.aurora.scheduler.storage.entities.IScheduledTask; IScheduledTask task1 = createTask("foo", "id1", 1, 1, 1); IScheduledTask task2 = createTask("foo", "id2", 1, 1, 1); ScheduledTask builder = createTask("foo", "id1", 3, 3, 3).newBuilder(); builder.getAssignedTask().getTask().setProduction(false); IScheduledTask task = IScheduledTask.build(builder); IScheduledTask task = createTask("bar", "id1", 1, 1, 1); IScheduledTask updatingTask = createTask("foo", "id1", 1, 1, 1); updatingTask.getAssignedTask().getTask().newBuilder(), private IExpectationSetters<?> returnTasks(IScheduledTask... tasks) { private IScheduledTask createTask( String jobName, String taskId, int cpus, int ramMb, int diskMb) { return IScheduledTask.build(new ScheduledTask() .setTask(createTaskConfig(jobName, cpus, ramMb, diskMb))));
0
* @version $Id$
0
iterEnv = new TabletIteratorEnvironment(context, IteratorScope.majc, !propogateDeletes, acuTableConf, getMajorCompactionReason()); iterEnv = new TabletIteratorEnvironment(context, IteratorScope.minc, acuTableConf);
0
protected static final Logger log = Logger.getLogger(SimpleMacIT.class);
0
* <p>{@link com.google.cloud.dataflow.sdk.options.PipelineOptions} encapsulates the various
0
public interface Params { String clusterName(); String serversetPath(); default List<Class<? extends Module>> extraModules() { return ImmutableList.of(); } default String statsUrlPrefix() { return ""; } } public static void main(Params params, Module appEnvironmentModule) { new ServiceDiscoveryModule(params.serversetPath(), zkClientConfig.credentials), .setClusterName(params.clusterName()) .setStatsUrlPrefix(params.statsUrlPrefix()))); Params params = new Params() { @Override public String clusterName() { return CLUSTER_NAME.get(); } @Override public String serversetPath() { return SERVERSET_PATH.get(); } @Override public List<Class<? extends Module>> extraModules() { return EXTRA_MODULES.get(); } @Override public String statsUrlPrefix() { return STATS_URL_PREFIX.get(); } }; .addAll(Iterables.transform(params.extraModules(), MoreModules::getModule)) main(params, Modules.combine(modules));
0
/** * IOReactorStatus represents an internal status of an I/O reactor. */ /** * The reactor is inactive / has not been started */ /** * The reactor is active / processing I/O events. */ /** * Shutdown of the reactor has been requested. */ /** * The reactor is shutting down. */ /** * The reactor has shut down. */
0
* Copyright 2002-2004,2006 The Apache Software Foundation * If the array of predicates is empty, then this predicate returns true. * If the collection of predicates is empty, then this predicate returns true. * If the array of predicates is empty, then this predicate returns false. * If the collection of predicates is empty, then this predicate returns false. * If the array of predicates is empty, then this predicate returns false. * If the collection of predicates is empty, then this predicate returns false. * If the array of predicates is empty, then this predicate returns true. * If the collection of predicates is empty, then this predicate returns true.
0
private final Set<String> resourceNames = new HashSet<>(); this.pkPropertyIds = new HashSet<>(getKeyPropertyIds().values()); Set<Resource> resourceSet = new HashSet<>(); Set<ViewInstanceEntity> instanceDefinitions = new HashSet<>(); Map<Resource.Type, String> keyPropertyIds = new HashMap<>(); Set<String> propertyIds = new HashSet<>();
0
public void createAmbariResources(ClusterTopology topology, String clusterName, SecurityType securityType, String repoVersionString, Long repoVersionId) { if (null == repoVersionString && null == repoVersionId) { Long defaultRepoVersionId = null; defaultRepoVersionId = (Long) resource.getPropertyValue(VersionDefinitionResourceProvider.VERSION_DEF_ID); repoVersion = repositoryVersionDAO.findByPK(defaultRepoVersionId); } else if (null != repoVersionId){ repoVersion = repositoryVersionDAO.findByPK(repoVersionId); if (null == repoVersion) { throw new IllegalArgumentException(String.format( "Could not identify repository version with repository version id %s for installing services. " + "Specify a valid repository version id with '%s'", repoVersionId, ProvisionClusterRequest.REPO_VERSION_ID_PROPERTY)); } "Could not identify repository version with stack %s and version %s for installing services. " stackId, repoVersionString, ProvisionClusterRequest.REPO_VERSION_PROPERTY));
0
import cz.seznam.euphoria.shadow.com.google.common.base.Preconditions; import cz.seznam.euphoria.shadow.com.google.common.collect.AbstractIterator;
0
log.info("Accumulo data dirs are " + Arrays.asList(ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration()))); log.fatal("It appears the directories " + Arrays.asList(ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration())) + " were previously initialized."); String[] configuredTableDirs = ServerConstants.prefix(ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration()), ServerConstants.TABLE_DIR); if (ServerConfiguration.getSiteConfiguration().get(Property.INSTANCE_VOLUMES).trim().equals("")) { Configuration fsConf = CachedConfiguration.getInstance(); initDirs(fs, uuid, ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration()), false); for (String baseDir : ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration())) { initializedDirs .addAll(Arrays.asList(ServerConstants.checkBaseDirs(ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration()), true))); uinitializedDirs.addAll(Arrays.asList(ServerConstants.getConfiguredBaseDirs(ServerConfiguration.getSiteConfiguration()))); VolumeManager fs = VolumeManagerImpl.get(ServerConfiguration.getSiteConfiguration());
0
OfflineResolver.register( "http://www.w3.org/Signature/2002/04/xml-stylesheet.b64", "data/ie/baltimore/merlin-examples/merlin-xmldsig-twenty-three/xml-stylesheet.b64", "text/plain");
0
return consumers.computeIfAbsent(endpoint, (LogicalEndpoint unused) -> SettableFuture.create());
0
import java.util.concurrent.CountDownLatch; import com.yahoo.zookeeper.server.ServerStats; private static final int CONNECTION_TIMEOUT=30000; private CountDownLatch clientConnected; ServerStats.registerAsConcrete(); ServerStats.unregister(); clientConnected=null; private ZooKeeper createClient() throws KeeperException, IOException,InterruptedException{ clientConnected=new CountDownLatch(1); ZooKeeper zk = new ZooKeeper(hostPort, 30000, this); if(!clientConnected.await(CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS)){ fail("Unable to connect to server"); } return zk; } zk =createClient(); zk = createClient(); ZooKeeper zk = createClient(); ZooKeeper zk = createClient(); ZooKeeper zk = createClient(); if (event.getState() == Event.KeeperStateSyncConnected) { clientConnected.countDown(); } if (event.getType() != Event.EventNone) { try { events.put(event); } catch (InterruptedException e) { e.printStackTrace(); } } }
0
import org.apache.accumulo.core.security.thrift.TCredentials; private TCredentials credentials; public ConnectorImpl(Instance instance, TCredentials cred) throws AccumuloException, AccumuloSecurityException { if (!iface.authenticate(Tracer.traceInfo(), credentials))
1
* * */ ArrayConverter dateArrayConverter = new ConvertUtilsBean(), new ConvertUtilsBean(), new PropertyUtilsBean()); assertEquals("Set property failed (2)", bean.getBooleanProperty(), true); new ConvertUtilsBean(), "Value " + i + " incorrectly converted ", "Mapped property set correctly", "some.dotty.value", * Throw an exception with the specified message.
0
request.setClusters(null);
0
expect(m_vdfXml.getClusterSummary(EasyMock.anyObject(Cluster.class), EasyMock.anyObject(AmbariMetaInfo.class))).andReturn(
0
import org.apache.accumulo.core.volume.VolumeConfiguration; FileSystem fs = VolumeConfiguration.getVolume(dir, CachedConfiguration.getInstance(), ServerConfigurationUtil.getConfiguration(instance)).getFileSystem();
1
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.digester3.examples.documentmarkup; import org.apache.commons.beanutils.MethodUtils; import org.apache.commons.digester3.Rule; /** * When a text segment is discovered, it calls a specific method on the top * object on the stack. */ public class SetTextSegmentRule extends Rule implements TextSegmentHandler { /** * The method name to call on the parent object. */ private String methodName = null; public SetTextSegmentRule(String methodName) { this.methodName = methodName; } /** * Process the end of this element. */ public void textSegment(String text) throws Exception { // Call the specified method MethodUtils.invokeMethod(this.getDigester().peek(0), this.methodName, new Object[]{ text }, new Class[]{ String.class }); } }
0
* @version CVS $Id: SessionContextProvider.java,v 1.3 2003/05/23 12:13:13 cziegeler Exp $ String ROLE = SessionContextProvider.class.getName();
0
if (kke == null) { KerberosPrincipalEntity kpe = kerberosPrincipalDAO.find(principal.getPrincipal()); if(kpe == null) { kpe = new KerberosPrincipalEntity(principal.getPrincipal(), principal.isService(), principal.getCacheFile()); kerberosPrincipalDAO.create(kpe); } KerberosKeytabPrincipalEntity entity = kerberosKeytabPrincipalDAO.findOrCreate(kke, hostEntity, kpe); kpe.addKerberosKeytabPrincipal(entity); kerberosPrincipalDAO.merge(kpe);
0
assertEquals( AttributeDefinition.PASSWORD, AD.toType( "Password" ) );
0
final Logger logger, final Context context, final ServiceManager serviceManager,
0
// Simulate a KILLED ack from the executor. @Test public void testKillNoTasksDoesNotThrow() throws Exception { control.replay(); buildScheduler(); scheduler.killTasks(Query.roleScoped("role_absent"), OWNER_A.getUser()); }
0
return setNamespaceAndReturn( new CallMethodRule(targetOffset, methodName, paramCount, paramTypes, useExactMatch));
0
import org.apache.sshd.util.test.JUnitTestSupport; public class BuiltinCompressionsTest extends JUnitTestSupport {
0
@Override @Override @Override @Override @Override
0
// **** This has been changed due to BEANUTILS-449 **** //convertValueToType(converter, "(B)", Integer.class, localizedDecimalValue, localizedDecimalPattern, expectedValue);
0
import com.google.common.collect.Maps; import org.apache.aurora.common.util.testing.FakeBuildInfo; import static org.apache.aurora.common.util.testing.FakeBuildInfo.generateBuildInfo; snapshotStore = new SnapshotStoreImpl( generateBuildInfo(), clock, storageUtil.storage); metadata.setDetails(Maps.newHashMap()); metadata.getDetails().put(FakeBuildInfo.DATE, FakeBuildInfo.DATE); metadata.getDetails().put(FakeBuildInfo.GIT_REVISION, FakeBuildInfo.GIT_REVISION); metadata.getDetails().put(FakeBuildInfo.GIT_TAG, FakeBuildInfo.GIT_TAG); Snapshot snapshot = snapshotStore.createSnapshot(); assertEquals(expected, snapshot);
0
package com.google.cloud.dataflow.examples.cookbook;
0
private static final Map<ClassLoader, DocumentBuilder[][]> DOCUMENT_BUILDERS = Collections.synchronizedMap(new WeakHashMap<ClassLoader, DocumentBuilder[][]>()); public static DocumentBuilder createDocumentBuilder(boolean validating) throws ParserConfigurationException { return createDocumentBuilder(validating, true); } public static DocumentBuilder createDocumentBuilder( boolean validating, boolean disAllowDocTypeDeclarations ) throws ParserConfigurationException { DocumentBuilder db = getDocumentBuilder(validating, disAllowDocTypeDeclarations); db.reset(); return db; } /** * Return this document builder to be reused * @param db DocumentBuilder returned from any of {@link #createDocumentBuilder} methods. * @return whether it was successfully returned to the pool */ @Deprecated public static boolean repoolDocumentBuilder(DocumentBuilder db) { return true; } private static DocumentBuilder getDocumentBuilder(boolean validating, boolean disAllowDocTypeDeclarations) throws ParserConfigurationException { return newDocumentBuilder(validating, disAllowDocTypeDeclarations); DocumentBuilder[][] cacheValue = DOCUMENT_BUILDERS.get(loader); cacheValue = new DocumentBuilder[2][2]; DocumentBuilder db = cacheValue[validating ? 1 : 0][disAllowDocTypeDeclarations ? 1 : 0]; db = newDocumentBuilder(validating, disAllowDocTypeDeclarations); cacheValue[validating ? 1 : 0][disAllowDocTypeDeclarations ? 1 : 0] = db; private static DocumentBuilder newDocumentBuilder(boolean validating, boolean disAllowDocTypeDeclarations) throws ParserConfigurationException { return f.newDocumentBuilder();
0
* @deprecated see {@link java.io.DataInputStream#readLine()} This method will be removed when it is * removed from the DataInput interface this class implements (which will probably never happen).
0
import org.apache.sshd.common.util.Transformer;
0
// add auto instance configurations if required addAutoInstanceDefinition(viewDefinition); private void addAutoInstanceDefinition(ViewEntity viewEntity) { ViewConfig viewConfig = viewEntity.getConfiguration(); String viewName = viewEntity.getViewName(); AutoInstanceConfig autoInstanceConfig = viewConfig.getAutoInstance(); if (autoInstanceConfig == null) { return; } List<String> services = autoInstanceConfig.getServices(); Map<String, org.apache.ambari.server.state.Cluster> allClusters = clustersProvider.get().getClusters(); for (org.apache.ambari.server.state.Cluster cluster : allClusters.values()) { String clusterName = cluster.getClusterName(); StackId stackId = cluster.getCurrentStackVersion(); Set<String> serviceNames = cluster.getServices().keySet(); for (String service : services) { try { if (checkAutoInstanceConfig(autoInstanceConfig, stackId, service, serviceNames)) { LOG.info("Auto creating instance of view " + viewName + " for cluster " + clusterName + "."); ViewInstanceEntity viewInstanceEntity = createViewInstanceEntity(viewEntity, viewConfig, autoInstanceConfig); viewInstanceEntity.setClusterHandle(clusterName); installViewInstance(viewInstanceEntity); } } catch (Exception e) { LOG.error("Can't auto create instance of view " + viewName + " for cluster " + clusterName + ". Caught exception :" + e.getMessage(), e); } } } }
0
* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at
0
import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableList;
0
* @param image the image to distort * @param offsets the displacement map * @param xChannel defines the channel of off whose values will be * on X-axis operation * @param yChannel defines the channel of off whose values will be * @param rh the rendering hints * @param off the displacement map
1
} catch (final NullPointerException ex) {}
0
@Override @Override
0
import org.apache.cocoon.blocks.servlet.BlockConstants; import org.apache.cocoon.blocks.servlet.ServiceManagerRegistry;
0
/* * We suppress the "try" warning here because the close() method's signature * allows it to throw InterruptedException which is strongly advised against * by AutoCloseable (see: http://docs.oracle.com/javase/7/docs/api/java/lang/AutoCloseable.html#close()). * close() will never throw an InterruptedException but the exception remains in the * signature for backwards compatibility purposes. */ @SuppressWarnings("try") public class ZooKeeper implements AutoCloseable { * <p> * Added in 3.5.3: <a href="https://docs.oracle.com/javase/tutorial/essential/exceptions/tryResourceClose.html">try-with-resources</a> * may be used instead of calling close directly.
0
import cz.seznam.euphoria.core.annotation.audience.Audience; @Audience(Audience.Type.INTERNAL)
0
public final class ASTIntegerLiteral extends JexlNode implements JexlNode.Literal<Integer> {
0
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings; @SuppressFBWarnings(value = "PATH_TRAVERSAL_IN", justification = "process runs in same security context as admin who provided path")
0
this.services = new EnumMap<>(Service.class);
0
* <p>The parameters are described via the external xml * @cocoon.sitemap.component.documentation * This is the action used to validate Request parameters. *
0
storageUtil.expectOperations();
0
public static String asString(InputStream pStream) throws IOException { public static String asString(InputStream pStream, String pEncoding)
0
* Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0
0
package org.apache.commons.codec;
1
public int read() throws IOException { public int read(byte[] b, int off, int len) throws IOException { public int read(byte[] b) throws IOException { public int available() throws IOException { public void close() throws IOException {
0
package org.apache.cocoon.auth.impl; import org.apache.cocoon.auth.SecurityHandler;
0
.setRowSchema(CalciteUtils.toBeamSchema(getRowType()));
0
import static org.junit.Assert.assertFalse; public void testUpgradeBehaviorTag() throws JAXBException { // given String xml = "<property>\n" + " <name>prop_name</name>\n" + " <value>prop_val</value>\n" + " <on-ambari-upgrade add=\"false\" change=\"true\" delete=\"true\"/>\n" + " <on-stack-upgrade add=\"true\" change=\"true\" delete=\"false\"/>\n" + "</property>"; // when PropertyInfo propertyInfo = propertyInfoFrom(xml); // then assertFalse(propertyInfo.getPropertyAmbariUpgradeBehavior().isAdd()); assertTrue(propertyInfo.getPropertyAmbariUpgradeBehavior().isChange()); assertTrue(propertyInfo.getPropertyAmbariUpgradeBehavior().isDelete()); assertTrue(propertyInfo.getPropertyStackUpgradeBehavior().isAdd()); assertTrue(propertyInfo.getPropertyStackUpgradeBehavior().isChange()); assertFalse(propertyInfo.getPropertyStackUpgradeBehavior().isDelete()); } @Test JAXBContext jaxbCtx = JAXBContext.newInstance(PropertyInfo.class, PropertyUpgradeBehavior.class); ), PropertyInfo.class
0
/** * The {@link HostPredicate} interaface is used as a way to pass a custom * implementation into one of the many * {@link MaintenanceStateHelper#isOperationAllowed(org.apache.ambari.server.controller.spi.Resource.Type, Service)} * methods. */ public static interface HostPredicate { /** * Gets whether the specified host should not be included in a result set. * * @param hostname * the host (not {@code null}). * @return {@code true} to indicate that the specified host is not a valid * choice, {@code false} otherwise. * @throws AmbariException */ public boolean shouldHostBeRemoved(String hostname) throws AmbariException; }
0
* the response objects can be {@code null} if there is no incoming or * Otherwise returns {@code null}. * @return HTTP request, if available, {@code null} otherwise. * @return HTTP response, if available, {@code null} otherwise.
0
import org.apache.cocoon.portal.services.aspects.PortalManagerAspect; import org.apache.cocoon.portal.services.aspects.PortalManagerAspectPrepareContext; import org.apache.cocoon.portal.services.aspects.PortalManagerAspectRenderContext; * @see org.apache.cocoon.portal.services.aspects.PortalManagerAspect#prepare(org.apache.cocoon.portal.services.aspects.PortalManagerAspectPrepareContext) public void prepare(PortalManagerAspectPrepareContext aspectContext) final Map objectModel = aspectContext.getPortalService().getProcessInfoProvider().getObjectModel(); if ( !aspectContext.getPortalService().getUserService().getUser().isAnonymous() ) { aspectContext.getPortalService().getUserService().getUser().getUserInfos()); * @see org.apache.cocoon.portal.services.aspects.PortalManagerAspect#render(org.apache.cocoon.portal.services.aspects.PortalManagerAspectRenderContext, org.xml.sax.ContentHandler, java.util.Properties) final Map objectModel = aspectContext.getPortalService().getProcessInfoProvider().getObjectModel();
0
import java.nio.charset.StandardCharsets; sks = new KeySelectors.SecretKeySelector("secret".getBytes(StandardCharsets.US_ASCII));
0
import org.apache.batik.util.Platform; Toolkit toolkit = Toolkit.getDefaultToolkit(); Cursor moveCursor = Cursor.getPredefinedCursor(Cursor.MOVE_CURSOR); if (Platform.isOSX) { try { Image img = toolkit.createImage (CursorManager.class.getResource("resources/move.gif")); moveCursor = toolkit.createCustomCursor (img, new Point(11, 11), "move"); } catch (Exception ex) { } } cursorMap.put(SVG_MOVE_VALUE, moveCursor);
0
* * Note that this is the original URI, and is * unaffected by redirects. * * To find the final URI after any redirects have been processed, * please see the section entitled * <a href="http://hc.apache.org/httpcomponents-client/tutorial/html/fundamentals.html#d4e205">HTTP execution context</a> * in the * <a href="http://hc.apache.org/httpcomponents-client/tutorial/html">HttpClient Tutorial</a>
0
private Map<String, String> hostAttributes = new HashMap<>(); Set<String> hostNames = new HashSet<>(); List<HostEntity> hostEntities = new ArrayList<>(); HashSet<State> validStates = new HashSet<>(); HashSet<State> invalidStates = new HashSet<>(); Set<String> hostNames = new HashSet<>(); HashMap<String, String> tags = new HashMap<>(2); Set<String> hostNames = new HashSet<>(); Map<String, Map<String, String>> c1PropAttributes = new HashMap<>(); Map<String, Map<String, String>> c2PropAttributes = new HashMap<>(); Set<String> hostNames = new HashSet<>(); Set<String> hostNames = new HashSet<>();
1
* Default {@link org.apache.http.nio.reactor.IOEventDispatch} implementation * that supports both plain (non-encrypted) and SSL encrypted client side HTTP * connections.
0
import org.apache.http.HttpConnection; import org.apache.http.conn.HttpSSLConnection; import org.apache.http.protocol.ExecutionContext; HttpConnection conn = (HttpConnection) context.getAttribute( ExecutionContext.HTTP_CONNECTION); if (conn instanceof HttpSSLConnection) { SSLSession sslsession = ((HttpSSLConnection) conn).getSSLSession(); if (sslsession != null) { userPrincipal = sslsession.getLocalPrincipal(); }
0
import org.apache.sshd.common.Factory; Factory<Command> getShellFactory(); List<NamedFactory<Command>> getSubsystemFactories();
0
XMLUtils.read(input.getOctetStream(), false, secureValidation);
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
1
public class AtlasServerIdSelectorTest { String atlasServerId = AtlasServerIdSelector.selectServerId(configuration); AtlasServerIdSelector.selectServerId(configuration); AtlasServerIdSelector.selectServerId(configuration);
0
/** A {@link BigQuerySourceBase} for reading BigQuery tables. */ static <T> BigQueryTableSource<T> create( SerializableFunction<SchemaAndRecord, T> parseFn) { TableReference table = setDefaultProjectIfAbsent( options.as(BigQueryOptions.class), BigQueryIO.JSON_FACTORY.fromString(jsonTable.get(), TableReference.class)); Table tableRef = bqServices.getDatasetService(options.as(BigQueryOptions.class)).getTable(table);
1
import com.fasterxml.jackson.databind.Module; import org.apache.beam.sdk.util.common.ReflectHelpers; out.writeUTF(createMapper().writeValueAsString(pipelineOptions)); this.pipelineOptions = createMapper().readValue(s, PipelineOptions.class) /** * Use an {@link ObjectMapper} configured with any {@link Module}s in the class path allowing * for user specified configuration injection into the ObjectMapper. This supports user custom * types on {@link PipelineOptions}. */ private static ObjectMapper createMapper() { return new ObjectMapper().registerModules( ObjectMapper.findModules(ReflectHelpers.findClassLoader())); }
0
package org.apache.beam.sdk.io; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.Coder.Context; import org.apache.beam.sdk.coders.StringUtf8Coder; import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.io.Read.Bounded; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.runners.DirectPipelineRunner; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.util.IOChannelUtils; import org.apache.beam.sdk.util.MimeTypes; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.PDone; import org.apache.beam.sdk.values.PInput; read = org.apache.beam.sdk.io.Read.from( read = org.apache.beam.sdk.io.Read.from( read = org.apache.beam.sdk.io.Read.from( read = org.apache.beam.sdk.io.Read.from( return input.apply("Write", org.apache.beam.sdk.io.Write.to( * A {@link org.apache.beam.sdk.io.FileBasedSource.FileBasedReader FileBasedReader} * A {@link org.apache.beam.sdk.io.FileBasedSink.FileBasedWriteOperation * A {@link org.apache.beam.sdk.io.FileBasedSink.FileBasedWriter FileBasedWriter}
0
import com.fasterxml.jackson.databind.node.ObjectNode; public void testStatusOfActiveServerIsReturned() { ObjectNode entity = (ObjectNode) response.getEntity(); assertEquals(entity.get("Status").asText(), "ACTIVE"); public void testResourceGetsValueFromServiceState() { ObjectNode entity = (ObjectNode) response.getEntity(); assertEquals(entity.get("Status").asText(), "PASSIVE");
0
public ServletConfiguration(final Servlet servlet) public ServletConfiguration(final ServletConfig config) protected Object getPropertyInternal(final String key) final Enumeration<String> en = config.getInitParameterNames();
0
if ("true".equals(System.getProperty("org.apache.accumulo.test.functional.useCredProviderForIT"))) { cfg.setUseCredentialProvider(true); }
0
package com.twitter.mesos.scheduler.httphandlers; import java.util.Map; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import com.google.common.collect.ImmutableMap; import com.google.inject.Inject; import com.twitter.mesos.scheduler.CronJobManager; /** * HTTP interface to dump state of the internal cron scheduler. */ @Path("/cron") public class Cron { private final CronJobManager cronManager; @Inject Cron(CronJobManager cronManager) { this.cronManager = cronManager; } /** * Dumps the state of the cron manager. * * @return An HTTP response containing the cron manager's state. */ @GET @Produces(MediaType.APPLICATION_JSON) public Response dumpContents() { Map<String, Object> response = ImmutableMap.<String, Object>builder() .put("scheduled", cronManager.getScheduledJobs()) .put("pending", cronManager.getPendingRuns()) .build(); return Response.ok(response).build(); } }
0
import org.apache.cocoon.spring.ResourceUtils;
0
import org.apache.accumulo.server.security.SecurityUtil; SecurityUtil.serverLogin(); Instance instance = HdfsZooInstance.getInstance(); public SimpleGarbageCollector() {}
0
import org.osgi.converter.Converter; * Specify the converter to be used by the code, if an alternative, adapted, * converter is to be used. * @param converter The converter to use. * @return This Deserializing object to allow further invocations on it. Deserializing<T> with(Converter converter);
0
import static com.google.common.base.Charsets.UTF_8; log.error(" saw = " + new String(entry.getValue().get()) + " expected = " + new String(value, UTF_8));
0
return nm(row, cf, cq, new Value(value.getBytes(Constants.UTF8)));
0
import java.lang.reflect.Type; import javax.annotation.Nullable; * <p>Implementations of this interface are generated at runtime to map Row fields back into * objet fields. /** * Sets the specified field on object to value. */ /** * Returns the name of the field. */ String name(); /** * Returns the field type. */ Class type(); /** * If the field is a container type, returns the element type. */ @Nullable Type elementType(); /** * If the field is a map type, returns the key type. */ @Nullable Type mapKeyType(); /** * If the field is a map type, returns the key type. */ @Nullable Type mapValueType();
0