Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
package org.apache.accumulo.core.clientImpl; import org.apache.accumulo.core.clientImpl.thrift.ClientService; import org.apache.accumulo.core.clientImpl.thrift.ClientService.Client; import org.apache.accumulo.core.clientImpl.thrift.ThriftSecurityException;
0
import java.util.Comparator;
0
import org.apache.ambari.server.controller.internal.PropertyIdImpl; import org.apache.ambari.server.controller.internal.ResourceImpl;
1
"EC-256-gpg2-public.asc", "EC-521-gpg2-public.asc",
0
public void testPrompt() throws Exception { Context c = new Context(); c.addCommand("echo", this); c.set("USER", "gnodet"); c.set("APPLICATION", "karaf"); //c.set("SCOPE", ""); Object p = c.execute("\"@|bold ${USER}|@${APPLICATION}:@|bold ${SCOPE}|> \""); System.out.println("Prompt: " + p); } public CharSequence echo(Object args[]) { if (args == null) { return ""; } StringBuilder sb = new StringBuilder(); String del = ""; for (Object arg : args) { sb.append(del); if (arg != null) { sb.append(arg); del = " "; } } return sb; }
0
import org.apache.sshd.common.util.security.SecurityUtils;
0
package org.apache.sshd.cli.client; import org.apache.sshd.client.SshClient; public class SshKeyScanMain implements Channel, Callable<Void>, ServerKeyVerifier, SessionListener, SimplifiedLog { public SshKeyScanMain() { public static List<String> parseCommandLineArguments(SshKeyScanMain scanner, String... args) throws IOException { public static <S extends SshKeyScanMain> S setInputStream(S scanner, Collection<String> hosts) throws IOException { public static <S extends SshKeyScanMain> S initializeScanner(S scanner, Collection<String> hosts) throws IOException { try (SshKeyScanMain scanner = new SshKeyScanMain()) {
0
import org.apache.batik.ext.awt.image.GraphicsUtil; import org.apache.batik.ext.awt.image.codec.tiff.TIFFEncodeParam; import org.apache.batik.ext.awt.image.codec.tiff.TIFFField; import org.apache.batik.ext.awt.image.codec.tiff.TIFFImageDecoder; import org.apache.batik.ext.awt.image.codec.tiff.TIFFImageEncoder; import org.apache.batik.ext.awt.image.rendered.FormatRed;
0
io.setDescription( "MaxCombiner interprets Values as Longs and finds their maximum. A variety of encodings (variable length, fixed length, or string) are available");
0
import org.apache.beam.sdk.values.PCollection; import org.apache.beam.vendor.grpc.v1_13_1.com.google.protobuf.ByteString; import org.apache.beam.vendor.grpc.v1_13_1.com.google.protobuf.InvalidProtocolBufferException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; private static final Logger LOG = LoggerFactory.getLogger(CreateExecutableStageNodeFunction.class); .setIsBounded(RunnerApi.IsBounded.Enum.BOUNDED) // This will eventually inform the SDK Harness of the existing timers. if (!parDoPayload.getTimerSpecsMap().isEmpty()) { // Build the necessary components to inform the SDK Harness of the pipeline's // timers. for (Map.Entry<String, RunnerApi.TimerSpec> entry : parDoPayload.getTimerSpecsMap().entrySet()) { timerIds.add(entry.getKey()); }
0
@Override public boolean isReversed() { return false; } @Override public void maybeReverse(boolean mirror) { }
0
/* * $Header: /cvshome/build/ee.foundation/src/java/util/ListResourceBundle.java,v 1.6 2006/03/14 01:20:26 hargrave Exp $ * * (C) Copyright 2001 Sun Microsystems, Inc. * Copyright (c) OSGi Alliance (2001, 2005). All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package java.util; public abstract class ListResourceBundle extends java.util.ResourceBundle { public ListResourceBundle() { } protected abstract java.lang.Object[][] getContents(); public java.util.Enumeration getKeys() { return null; } public final java.lang.Object handleGetObject(java.lang.String var0) { return null; } }
0
@Override @Override
0
// Create extra adapter service dependency upon which our adapter depends on. } else { Assert.fail("wrong call to updated method: count=" + updateCount); }
0
import java.util.Iterator; import org.apache.hc.core5.annotation.NotThreadSafe; import org.apache.hc.core5.http.Header; import org.apache.hc.core5.http.HeaderElement; import org.apache.hc.core5.http.HttpResponse; import org.apache.hc.core5.util.Args; final Iterator<Header> it = response.headerIterator("Allow"); final Header header = it.next();
0
package org.apache.hadoop.metadata.typesystem.types.store; return !name.startsWith(".") && !name.startsWith("_") && !name.equals(ARCHIVE_LOCATION.getName()); public synchronized void delete(String namespace) throws StorageException { Path archivePath = new Path(ARCHIVE_LOCATION, jsonFile.getName() + System.currentTimeMillis());
0
origStage.getRequestContext(), origStage.getClusterHostInfo(), origStage.getCommandParamsStage(), origStage.getHostParamsStage());
0
package org.apache.accumulo.examples.wikisearch.normalizer;
0
import org.apache.hc.core5.annotation.Internal; /** * {@link IOEventHandler} that also exposes {@link HttpConnection} properties. * * @since 5.0 */ @Internal
0
import org.apache.accumulo.core.metadata.schema.TabletsMetadata; return () -> { try { return TabletsMetadata.builder().forTable(tableId).fetchLocation().fetchPrev() .build(context).stream().map(tm -> { Location loc = Location.NONE; if (tm.hasCurrent()) { loc = new Location(new TServerInstance(tm.getLocation())); } return new Pair<>(tm.getExtent(), loc); }).iterator(); } catch (Exception e) { throw new RuntimeException(e); } };
0
List<V> list = listMap.get((K) "A"); ListIterator<V> listIt = listMap.get((K) "B").listIterator(); List<V> list = listMap.get((K) "A"); MultiValuedMap map1 = makeObject(); MultiValuedMap map2 = makeObject(); ListValuedMap map1 = makeObject(); ListValuedMap map2 = makeObject();
0
import org.apache.accumulo.core.client.AccumuloClient; AccumuloClient conn; public synchronized static void configureMetadataTable(AccumuloClient conn, String tableName) {
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
* nor may "Apache" appear in their name, without
0
import java.io.Serializable; public final class ByteArrayBuffer implements Serializable { private static final long serialVersionUID = 4359112959524048036L;
0
import org.apache.beam.sdk.values.Schema; private final Schema schema; private SqlRowCoder(Schema schema) { this.schema = schema; public static SqlTypeCoder of(Schema schema) { return new SqlRowCoder(schema); public Schema getSchema() { return schema; return schema.getRowCoder(); && Objects.equals(this.schema, ((SqlRowCoder) other).schema); return Objects.hashCode(this.schema);
0
if (SecurityUtils.isECCSupported()) {
0
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.webconsole.plugins.ds.internal; import java.io.IOException; import org.osgi.framework.Constants; import org.osgi.framework.InvalidSyntaxException; import org.osgi.service.cm.Configuration; import org.osgi.service.cm.ConfigurationAdmin; public class ConfigurationAdminSupport { public boolean check(final Object obj, final String pid) { final ConfigurationAdmin ca = (ConfigurationAdmin)obj; try { // we use listConfigurations to not create configuration // objects persistently without the user providing actual // configuration String filter = '(' + Constants.SERVICE_PID + '=' + pid + ')'; Configuration[] configs = ca.listConfigurations(filter); if (configs != null && configs.length > 0) { return true; } } catch (InvalidSyntaxException ise) { // should print message } catch (IOException ioe) { // should print message } return false; } }
0
public void process(byte cmd, Buffer buffer) throws Exception { if (cmd == SshConstants.SSH_MSG_USERAUTH_REQUEST) { buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_BANNER, 0); buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_SUCCESS, 0); buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_FAILURE, 0); buffer = session.createBuffer(SshConstants.SSH_MSG_USERAUTH_FAILURE, 0);
0
public abstract class JoinFn<LeftT, RightT, K, OutputT> extends DoFn<KV<K, CoGbkResult>, Pair<K, OutputT>> { TupleTag<LeftT> leftTag, TupleTag<RightT> rightTag) { ProcessContext c, K key, CoGbkResult value,
0
import javax.annotation.Nullable; @Nullable
0
@Test() public void testGetDefaultAgentTaskTimeout() { Properties ambariProperties = new Properties(); Configuration conf = new Configuration(ambariProperties); Assert.assertEquals("900", conf.getDefaultAgentTaskTimeout(false)); Assert.assertEquals("1800", conf.getDefaultAgentTaskTimeout(true)); ambariProperties = new Properties(); ambariProperties.setProperty("agent.task.timeout", "4"); ambariProperties.setProperty("agent.package.install.task.timeout", "82"); conf = new Configuration(ambariProperties); Assert.assertEquals("4", conf.getDefaultAgentTaskTimeout(false)); Assert.assertEquals("82", conf.getDefaultAgentTaskTimeout(true)); }
0
public void deleteClassification(final String guid, final String classificationName) throws AtlasBaseException { deleteClassification(guid, classificationName, null); } @Override @GraphTransaction public void deleteClassification(final String guid, final String classificationName, final String associatedEntityGuid) throws AtlasBaseException { if (StringUtils.isEmpty(classificationName)) { throw new AtlasBaseException(AtlasErrorCode.INVALID_PARAMETERS, "classifications not specified"); // verify authorization only for removal of directly associated classification and not propagated one. if (StringUtils.isEmpty(associatedEntityGuid) || guid.equals(associatedEntityGuid)) { AtlasAuthorizationUtils.verifyAccess(new AtlasEntityAccessRequest(typeRegistry, AtlasPrivilege.ENTITY_REMOVE_CLASSIFICATION, entityHeader, new AtlasClassification(classificationName)), "remove classification: guid=", guid, ", classification=", classificationName); LOG.debug("Deleting classification={} from entity={}", classificationName, guid); entityGraphMapper.deleteClassification(guid, classificationName, associatedEntityGuid);
0
import java.sql.SQLException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import com.google.inject.Inject; import com.google.inject.Injector;
0
public class FileHandle extends Handle {
0
import com.twitter.aurora.gen.TaskConfig; private final Set<TaskConfig> tasks; * Constructs a ParsedConfiguration object and populates the set of {@link TaskConfig}s for ImmutableSet.Builder<TaskConfig> builder = ImmutableSet.builder(); public Set<TaskConfig> getTaskConfigs() {
0
private static final long serialVersionUID = -4099655108069755015L;
0
maxBufferSize = AccumuloConfiguration.getFixedMemoryAsBytes(options.get(MAX_BUFFER_SIZE_OPT)); AccumuloConfiguration.getFixedMemoryAsBytes(option.getValue());
1
import javax.servlet.DispatcherType; import org.apache.felix.http.base.internal.handler.ErrorsMapping; import org.apache.felix.http.base.internal.handler.ServletHandler; import org.apache.felix.http.base.internal.handler.holder.FilterHolder; import org.apache.felix.http.base.internal.runtime.FilterInfo; private final FilterRegistry filterRegistry = new FilterRegistry(); public long getContextServiceId() { return this.serviceId; } public void removeAll() { // TODO - implement } public ServletHolder resolveServletByName(final String name) { return this.servletRegistry.resolveByName(name); } public void removeServlet(@Nonnull final ServletInfo info, final boolean destroy) { this.servletRegistry.removeServlet(info, destroy); } public void addFilter(@Nonnull final FilterHolder holder) this.filterRegistry.addFilter(holder); public void removeFilter(@Nonnull final FilterInfo info, final boolean destroy) { this.filterRegistry.removeFilter(info, destroy); } public FilterHolder[] getFilterHolders(final ServletHolder servletHolder, DispatcherType dispatcherType, String requestURI) { return this.filterRegistry.getFilterHolders(servletHolder, dispatcherType, requestURI); } public void removeErrorPage(ServletInfo servletInfo) { // TODO Auto-generated method stub } public void addErrorPage(ServletHandler handler, String[] errorPages) { // TODO Auto-generated method stub } public ErrorsMapping getErrorsMapping() { return new ErrorsMapping(); }
0
import java.util.HashSet; import java.util.Set; private Long createTime; private Set<String> changedConfigTypes = new HashSet<>(); public ConfigsUpdateEvent(ServiceConfigEntity configs, String configGroupName, List<String> hostNames, Set<String> changedConfigTypes) { this.createTime = configs.getCreateTimestamp(); this.changedConfigTypes = changedConfigTypes; public Long getCreateTime() { return createTime; public void setCreateTime(Long createTime) { this.createTime = createTime; public Set<String> getChangedConfigTypes() { return changedConfigTypes; } public void setChangedConfigTypes(Set<String> changedConfigTypes) { this.changedConfigTypes = changedConfigTypes; }
1
import java.util.List; final List<PublicSuffixList> lists = new PublicSuffixListParser().parseByType( return new PublicSuffixMatcher(lists);
0
import org.apache.commons.vfs.FileSystemManager; import org.apache.commons.vfs.FilesCache; private final FilesCache files; /** * FileSystemManager which requested this filesystem */ private final FileSystemManager manager; protected AbstractFileSystem(final FileSystemManager manager, final FileName rootName, this.manager = manager; this.files = manager.getFilesCache(); files.clear(this); files.putFile(file); // files.put(file.getName(), file); return files.getFile(this, name); // return (FileObject) files.get(name); // [email protected] ==> use getFile FileObject file = getFile(name); // FileObject file = (FileObject) files.get(name); // [email protected] ==> use putFile putFile(file); // files.put(name, file); /** * Return the FileSystemOptions used to instantiate this filesystem */ * Return the FileSystemManager used to instantiate this filesystem */ public FileSystemManager getFileSystemManager() { return manager; } /**
0
protected Map<CacheKey, WrapDynaClass> initialValue() { return new WeakHashMap<>(); } final Map cache = CLASSLOADER_CACHE.get(); return propertiesMap.get(name); return properties; return descriptorsMap.get(name); final PropertyUtilsBean propUtils = pu != null ? pu : PropertyUtilsBean.getInstance(); return dynaClass;
0
/** * Gets the described dependency. * @return the dependency. */
0
/* * Copyright 1999-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cocoon.forms.samples; /** * Description of Sex. * @version $Id$ */ public class Sex { public static final Sex MALE = new Sex("M"); public static final Sex FEMALE = new Sex("F"); private String code; private Sex(String code) { this.code = code; } public String toString() { // Will probably have some i18n support here switch(code.charAt(0)) { case 'M' : return this.getClass().getName() + ".MALE"; case 'F' : return this.getClass().getName() + ".FEMALE"; default : return "unknown"; // Should never happen } } }
0
import org.apache.beam.model.fnexecution.v1.BeamFnApi; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionRequest; import org.apache.beam.model.fnexecution.v1.BeamFnApi.InstructionResponse; import org.apache.beam.model.fnexecution.v1.BeamFnApi.LogControl; import org.apache.beam.model.fnexecution.v1.BeamFnControlGrpc; import org.apache.beam.model.fnexecution.v1.BeamFnLoggingGrpc;
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//jxpath/src/java/org/apache/commons/jxpath/ri/compiler/ExpressionPath.java,v 1.8 2003/03/11 00:59:22 dmitri Exp $ * $Revision: 1.8 $ * $Date: 2003/03/11 00:59:22 $ * Copyright (c) 1999-2003 The Apache Software Foundation. All rights * @version $Revision: 1.8 $ $Date: 2003/03/11 00:59:22 $
0
private static void removeContent(Node node) { while (node.hasChildNodes()) { node.removeChild(node.getFirstChild());
0
* <p>Allows for {@link BeamFnApi.RegisterRequest}s to occur in parallel with subsequent requests * that may lookup registered values by blocking lookups until registration occurs. BeamFnApi.InstructionResponse.Builder response = BeamFnApi.InstructionResponse.newBuilder() .setRegister(RegisterResponse.getDefaultInstance()); for (BeamFnApi.ProcessBundleDescriptor processBundleDescriptor : registerRequest.getProcessBundleDescriptorList()) { LOG.debug( "Registering {} with type {}", for (Map.Entry<String, RunnerApi.Coder> entry : processBundleDescriptor.getCodersMap().entrySet()) { LOG.debug("Registering {} with type {}", entry.getKey(), entry.getValue().getClass());
1
return new TestSuite(LocaleConvertUtilsTestCase.class);
0
if (!isZKServerRunning()) {
0
StringBuilder args = new StringBuilder("new int[] { ");
0
import org.apache.beam.runners.core.construction.TransformInputs; return (InputT) Iterables.getOnlyElement( TransformInputs.nonAdditionalInputs(getCurrentTransform(transform)));
0
import org.apache.samza.application.SamzaApplication; import org.apache.samza.application.descriptors.ApplicationDescriptor; import org.apache.samza.application.descriptors.ApplicationDescriptorImpl; import org.apache.samza.application.descriptors.ApplicationDescriptorUtil; import org.apache.samza.context.ExternalContext; import org.apache.samza.runtime.ApplicationRunner; import org.apache.samza.runtime.ContainerLaunchUtil; import org.apache.samza.util.SamzaUncaughtExceptionHandler; public class BeamContainerRunner implements ApplicationRunner { private final ApplicationDescriptorImpl<? extends ApplicationDescriptor> appDesc; public BeamContainerRunner(SamzaApplication app, Config config) { this.appDesc = ApplicationDescriptorUtil.getAppDescriptor(app, config); } @Override public void run(ExternalContext externalContext) { Thread.setDefaultUncaughtExceptionHandler( new SamzaUncaughtExceptionHandler( () -> { LOG.info("Exiting process now."); System.exit(1); })); ContainerLaunchUtil.run( appDesc, System.getenv(ShellCommandConfig.ENV_CONTAINER_ID()), ContainerCfgFactory.jobModel); } @Override public void kill() { // Do nothing. Yarn will kill the container. public ApplicationStatus status() { // The container is running during the life span of this object. // Container run is synchronous // so calling waitForFinish() after run() should return immediately // Container run is synchronous // so calling waitForFinish() after run() should return immediately
0
import java.util.Optional; Optional.empty());
0
import java.util.List; private List<RepositorySetting> repoSettings; processRepoSettings(); //todo: /** * Parse stack repo info stored in the blueprint_settings table * @return set of repositories * */ private void processRepoSettings(){ repoSettings = new ArrayList<RepositorySetting>(); if (setting != null){ Set<HashMap<String, String>> settingValue = setting.getSettingValue(Setting.SETTING_NAME_REPOSITORY_SETTINGS); for (Map<String, String> setting : settingValue) { RepositorySetting rs = parseRepositorySetting(setting); repoSettings.add(rs); } } } private RepositorySetting parseRepositorySetting(Map<String, String> setting){ RepositorySetting result = new RepositorySetting(); result.setOperatingSystem(setting.get(RepositorySetting.OPERATING_SYSTEM)); result.setOverrideStrategy(setting.get(RepositorySetting.OVERRIDE_STRATEGY)); result.setRepoId(setting.get(RepositorySetting.REPO_ID)); result.setBaseUrl(setting.get(RepositorySetting.BASE_URL)); return result; } public List<RepositorySetting> getRepositorySettings(){ return repoSettings; }
0
import java.io.FileNotFoundException; FileStatus[] files = null; try { files = fs.listStatus(instanceDirectory); } catch (FileNotFoundException ex) { // ignored }
0
} else if ( HTML_BODY.name().equals(m) ) { } else if ( JSON.name().equals(m) ) { } else if ( ZIP_FILE_TEXT.name().equals(m) ) { } else if ( ZIP_FILE_JSON.name().equals(m) ) {
0
@SuppressWarnings("all") public enum _Fields implements org.apache.thrift.TFieldIdEnum {
0
case INTEGER: intCoder.encode(value.getInteger(idx), outStream, context.nested()); break; case SMALLINT: case TINYINT: intCoder.encode((int) value.getShort(idx), outStream, context.nested()); break; case DOUBLE: doubleCoder.encode(value.getDouble(idx), outStream, context.nested()); break; case FLOAT: doubleCoder.encode(Double.parseDouble( String.valueOf(value.getFloat(idx))), outStream, context.nested()); break; case BIGINT: longCoder.encode(value.getLong(idx), outStream, context.nested()); break; case VARCHAR: stringCoder.encode(value.getString(idx), outStream, context.nested()); break; default: throw new UnsupportedDataTypeException(value.getDataType().getFieldsType().get(idx)); case INTEGER: record.addField(idx, intCoder.decode(inStream, context.nested())); break; case SMALLINT: record.addField(idx, intCoder.decode(inStream, context.nested()).shortValue()); break; case TINYINT: record.addField(idx, intCoder.decode(inStream, context.nested()).byteValue()); break; case DOUBLE: record.addField(idx, doubleCoder.decode(inStream, context.nested())); break; case FLOAT: record.addField(idx, doubleCoder.decode(inStream, context.nested()).floatValue()); break; case BIGINT: record.addField(idx, longCoder.decode(inStream, context.nested())); break; case VARCHAR: record.addField(idx, stringCoder.decode(inStream, context.nested())); break; default: throw new UnsupportedDataTypeException(type.getFieldsType().get(idx));
0
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sshd.common.channel.exception; import java.io.IOException; /** * @author <a href="mailto:[email protected]">Apache MINA SSHD Project</a> */ public class SshChannelException extends IOException { private static final long serialVersionUID = 7355720478400167933L; private final int channelId; public SshChannelException(int channelId, String message) { this(channelId, message, null); } public SshChannelException(int channelId, Throwable cause) { this(channelId, cause.getMessage(), cause); } public SshChannelException(int channelId, String message, Throwable cause) { super(message, cause); this.channelId = channelId; } public int getChannelId() { return channelId; } }
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
import org.apache.accumulo.core.util.MetadataTable; Scanner scanner = conn.createScanner(MetadataTable.NAME, Authorizations.EMPTY); MetadataTable.PREV_ROW_COLUMN.fetch(scanner); scanner.fetchColumnFamily(MetadataTable.LAST_LOCATION_COLUMN_FAMILY); scanner.fetchColumnFamily(MetadataTable.CURRENT_LOCATION_COLUMN_FAMILY); scanner.fetchColumnFamily(MetadataTable.FUTURE_LOCATION_COLUMN_FAMILY); if (key.getColumnFamily().equals(MetadataTable.LAST_LOCATION_COLUMN_FAMILY)) { if (key.getColumnFamily().equals(MetadataTable.CURRENT_LOCATION_COLUMN_FAMILY) || key.getColumnFamily().equals(MetadataTable.FUTURE_LOCATION_COLUMN_FAMILY)) { if (MetadataTable.PREV_ROW_COLUMN.hasColumns(key)) {
0
import org.w3c.dom.Attr; Attr attr = element.getAttributeNodeNS(null, "Id"); if (attr != null) { element.setIdAttributeNode(attr, true); Attr refAttr = refElem.getAttributeNodeNS(null, "Id"); if (refAttr != null) { refElem.setIdAttributeNode(refAttr, true); this.constructionElement.setIdAttributeNS(null, Constants._ATT_ID, true);
0
package org.ognl.test.objects; import java.util.HashMap; import java.util.Map; /** * */ public class TestImpl extends TestClass { public Map<String, String> getTheMap() { Map<String, String> map = new HashMap(); map.put("key", "value"); return map; } }
0
import com.google.inject.Inject; import org.apache.ambari.server.orm.dao.HostDAO; import org.apache.ambari.server.orm.entities.HostEntity; private HostEntity hostEntity; @Inject @Inject private HostDAO hostDAO; /** * Simple constructor, should be created using the Factory class. * @param hostName Host name * @param role Action to run * @param event Event on the host and component * @param command Type of command * @param hostDAO {@link org.apache.ambari.server.orm.dao.HostDAO} instance being injected */ @AssistedInject public HostRoleCommand(String hostName, Role role, ServiceComponentHostEvent event, RoleCommand command, HostDAO hostDAO, ExecutionCommandDAO executionCommandDAO) { this(hostName, role, event, command, false, hostDAO, executionCommandDAO); } /** * Simple constructor, should be created using the Factory class. * @param hostName Host name * @param role Action to run * @param event Event on the host and component * @param command Type of command * @param retryAllowed Whether the command can be repeated * @param hostDAO {@link org.apache.ambari.server.orm.dao.HostDAO} instance being injected */ @AssistedInject public HostRoleCommand(String hostName, Role role, ServiceComponentHostEvent event, RoleCommand command, boolean retryAllowed, HostDAO hostDAO, ExecutionCommandDAO executionCommandDAO) { this.hostDAO = hostDAO; this.executionCommandDAO = executionCommandDAO; this.hostEntity = this.hostDAO.findByName(hostName); public HostRoleCommand(@Assisted HostRoleCommandEntity hostRoleCommandEntity, HostDAO hostDAO, ExecutionCommandDAO executionCommandDAO) { this.hostDAO = hostDAO; this.executionCommandDAO = executionCommandDAO; this.hostEntity = hostRoleCommandEntity.getHostEntity(); hostRoleCommandEntity.setHostEntity(hostEntity); return hostEntity != null ? hostEntity.getHostName() : null;
0
* Autogenerated by Thrift Compiler (0.11.0)
0
/** * Parses a name=value specification, where the = and value are optional. * * @param buffer the buffer holding the name-value pair to parse * * @return the name-value pair, where the value is <code>null</code> * if no value is specified * * @throws ParseException in case of a parse error */ NameValuePair parseNameValuePair(CharArrayBuffer buffer, int indexFrom, int indexTo) throws ParseException ;
0
protected final AtomicInteger failedCollectorConnectionsCounter = new AtomicInteger(0); private final int RETRY_COUNT_BEFORE_COLLECTOR_FAILOVER = 3;
0
hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); hdfs.addServiceComponent(Role.NAMENODE.name()); hdfs.addServiceComponent(Role.DATANODE.name()); mapred.addServiceComponent(Role.RESOURCEMANAGER.name()); hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); hdfs.addServiceComponent(Role.NAMENODE.name()); hdfs.addServiceComponent(Role.DATANODE.name()); hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); hdfs.addServiceComponent(Role.NAMENODE.name()); hdfs.addServiceComponent(Role.DATANODE.name()); hive.addServiceComponent(Role.HIVE_SERVER.name()); hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); mapReduce.addServiceComponent(Role.MAPREDUCE_CLIENT.name()); hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); hdfs.addServiceComponent(Role.NAMENODE.name()); hdfs.addServiceComponent(Role.DATANODE.name()); mapred.addServiceComponent(Role.RESOURCEMANAGER.name()); hdfs.addServiceComponent(Role.HDFS_CLIENT.name()); hdfs.addServiceComponent(Role.NAMENODE.name()); hdfs.addServiceComponent(Role.DATANODE.name()); mapred.addServiceComponent(Role.RESOURCEMANAGER.name());
0
private long startTime; private long expiryTime; private final String host; public HostRoleCommand(String host, Role role, RoleCommand cmd) { this.host = host; public Role getRole() { return role; } public HostRoleStatus getStatus() { return status; } public long getStartTime() { return startTime; } public long getExpiryTime() { return expiryTime; } public void setExpiryTime(long t) { expiryTime = t; } public String getHostName() { return this.host; }
0
/** * Test for GroupByNullKey. */ /** * DoFn extracting user and timestamp. */
0
import org.apache.hc.core5.annotation.NotThreadSafe; import org.apache.hc.core5.util.Args;
1
* Adds a cinclude tag for a FrameLayout's source to the resulting stream. * * <h2>Example XML:</h2> * <pre> * &lt;xy:z src="coplet://copletID"/&gt; * </pre> * * <h2>Applicable to:</h2> * <ul> * <li>{@link org.apache.cocoon.portal.layout.impl.FrameLayout}</li> * </ul> * * <h2>Parameters</h2> * <table><tbody> * <tr><th>aspect-name</th><td></td><td></td><td>String</td><td><code>"frame"</code></td></tr> * <tr><th>store</th><td></td><td>req</td><td>String</td><td><code>null</code></td></tr> * </tbody></table> * @version CVS $Id: FrameAspect.java,v 1.8 2004/04/25 20:09:34 haul Exp $
0
* Cache resource backed by a byte array on the heap. * * @since 4.1 class HeapResource implements Resource { private static final long serialVersionUID = -2078599905620463394L; private final byte[] b; public HeapResource(final byte[] b) { super(); this.b = b; byte[] getByteArray() { return this.b; public InputStream getInputStream() { return new ByteArrayInputStream(this.b); public long length() { return this.b.length; public void dispose() {
0
import org.junit.rules.ExpectedException; @Rule public transient ExpectedException thrown = ExpectedException.none(); public void testMissingFieldName() { thrown.expect(IllegalArgumentException.class); pipeline .apply(Create.of(new POJO("pass", 52, 2))) .apply(Filter.<POJO>create().whereFieldName("missing", f -> true)); pipeline.run(); } @Test @Category(NeedsRunner.class) public void testMissingFieldIndex() { thrown.expect(IllegalArgumentException.class); pipeline .apply(Create.of(new POJO("pass", 52, 2))) .apply(Filter.<POJO>create().whereFieldId(23, f -> true)); pipeline.run(); } @Test @Category(NeedsRunner.class)
0
final Map<Resource.Type, String> mapIds = new HashMap<>();
0
import java.text.ParseException; import java.text.SimpleDateFormat; private final SimpleDateFormat ISO8601_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX"); return ISO8601_DATE_FORMAT.format(f.getTime()); try { Calendar cc = Calendar.getInstance(); cc.setTime(ISO8601_DATE_FORMAT.parse(f)); return cc; } catch (ParseException e) { throw new ConversionException("Cannot convert " + f + " to Date", e); } public String apply(Date d) { return ISO8601_DATE_FORMAT.format(d); try { return ISO8601_DATE_FORMAT.parse(f); } catch (ParseException e) { throw new ConversionException("Cannot convert " + f + " to Date", e); }
0
EvaluationResult res = (EvaluationResult) pipeline.run();
0
Configuration conf, CryptoService cryptoService) throws IOException { long fileLength, Configuration conf, CryptoService cryptoService) throws IOException { InputStreamType fin, Configuration conf, CryptoService cryptoService) throws IOException {
0
public ResultSetDynaClass(final ResultSet resultSet) throws SQLException { public ResultSetDynaClass(final ResultSet resultSet, final boolean lowerCase) public ResultSetDynaClass(final ResultSet resultSet, final boolean lowerCase, final boolean useColumnLabel) public Object getObjectFromResultSet(final String name) throws SQLException { protected Class<?> loadClass(final String className) throws SQLException { catch (final Exception e) {
0
import static org.apache.beam.sdk.io.elasticsearch.ElasticsearchIOTestCommon.getEsIndex; READ(getEsIndex()), WRITE(getEsIndex() + System.currentTimeMillis()), WRITE_PARTIAL(getEsIndex() + "_partial_" + System.currentTimeMillis());
0
final StandardFileSystemManager mngr = new StandardFileSystemManager(); public void subBuildStarted(final BuildEvent buildEvent) public void subBuildFinished(final BuildEvent buildEvent) public void buildFinished(final BuildEvent event) public void buildStarted(final BuildEvent event) public void messageLogged(final BuildEvent event) public void targetFinished(final BuildEvent event) public void targetStarted(final BuildEvent event) public void taskFinished(final BuildEvent event) public void taskStarted(final BuildEvent event) public void debug(final Object o, final Throwable throwable) public void error(final Object o) public void error(final Object o, final Throwable throwable) public void fatal(final Object o) public void fatal(final Object o, final Throwable throwable) public void info(final Object o) public void info(final Object o, final Throwable throwable) public void trace(final Object o) public void trace(final Object o, final Throwable throwable) public void warn(final Object o) public void warn(final Object o, final Throwable throwable)
1
import com.google.cloud.dataflow.sdk.transforms.windowing.WindowFn; /** * For internal use only. */ public WindowFn getWindowFnInternal();
0
&& (GenericUtils.safeCompare(this.getHostName(), that.getHostName(), false) == 0); return GenericUtils.hashCode(getHostName(), Boolean.FALSE) + getPort();
0
protected void setupMethod(final HttpMethod method) throws FileSystemException, URIException
0
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import org.junit.Test; public class AggregatorConfigurationTest { @Test @Test @SuppressWarnings("deprecation") org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig ac3 = new org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig(colf, "com.foo.SuperAgg"); org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig ac4 = org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig.decodeColumns( encodedCols, "com.foo.SuperAgg"); @SuppressWarnings("deprecation") org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig ac = new org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig(colf, colq, "com.foo.SuperAgg"); org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig ac2 = org.apache.accumulo.core.iterators.conf.PerColumnIteratorConfig.decodeColumns( encodedCols, "com.foo.SuperAgg");
0
String projectId, String datasetId, String location, String description, Long defaultTableExpirationMs /* ignored */)
0
import org.apache.http.nio.NHttpClientHandler;
0
import org.apache.felix.systemready.osgi.examples.CompWithoutService;
0
package aQute.libg.clauses; import java.util.*; import aQute.libg.log.*; import aQute.libg.qtokens.*; public class Clauses extends LinkedHashMap<String,Map<String,String>>{ private static final long serialVersionUID = 1L; /** * Standard OSGi header parser. This parser can handle the format clauses * ::= clause ( ',' clause ) + clause ::= name ( ';' name ) (';' key '=' * value ) * * This is mapped to a Map { name => Map { attr|directive => value } } * * @param value * @return * @throws MojoExecutionException */ static public Clauses parse(String value, Logger logger) { if (value == null || value.trim().length() == 0) return new Clauses(); Clauses result = new Clauses(); QuotedTokenizer qt = new QuotedTokenizer(value, ";=,"); char del; do { boolean hadAttribute = false; Clause clause = new Clause(); List<String> aliases = new ArrayList<String>(); aliases.add(qt.nextToken()); del = qt.getSeparator(); while (del == ';') { String adname = qt.nextToken(); if ((del = qt.getSeparator()) != '=') { if (hadAttribute) throw new IllegalArgumentException( "Header contains name field after attribute or directive: " + adname + " from " + value); aliases.add(adname); } else { String advalue = qt.nextToken(); clause.put(adname, advalue); del = qt.getSeparator(); hadAttribute = true; } } for (Iterator<String> i = aliases.iterator(); i.hasNext();) { String packageName = i.next(); if (result.containsKey(packageName)) { if (logger != null) logger .warning("Duplicate package name in header: " + packageName + ". Multiple package names in one clause not supported in Bnd."); } else result.put(packageName, clause); } } while (del == ','); return result; } }
0
import org.apache.accumulo.tserver.metrics.TabletServerScanMetricsKeys; import org.apache.accumulo.tserver.metrics.TabletServerUpdateMetricsKeys; metricsFactory = new TabletServerMetricsFactory(); scanMetrics.add(TabletServerScanMetricsKeys.SCAN, t2 - ss.startTime); scanMetrics.add(TabletServerScanMetricsKeys.RESULT_SIZE, ss.entriesReturned); updateMetrics.add(TabletServerUpdateMetricsKeys.PERMISSION_ERRORS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.UNKNOWN_TABLET_ERRORS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.PERMISSION_ERRORS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.UNKNOWN_TABLET_ERRORS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.PERMISSION_ERRORS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.MUTATION_ARRAY_SIZE, mutations.size()); updateMetrics.add(TabletServerUpdateMetricsKeys.CONSTRAINT_VIOLATIONS, 0); updateMetrics.add(TabletServerUpdateMetricsKeys.WALOG_WRITE_TIME, time); updateMetrics.add(TabletServerUpdateMetricsKeys.COMMIT_TIME, (long) ((time) / (double) size)); updateMetrics.add(TabletServerUpdateMetricsKeys.COMMIT_PREP, (long) ((time) / (double) size));
0
import java.nio.charset.Charset; private static final Charset UTF8 = Charset.forName("UTF-8"); log.trace("zookeeper contained " + zPath + " " + (data == null ? null : new String(data, UTF8))); log.trace("putting " + zPath + " " + (data == null ? null : new String(data, UTF8)) + " in cache");
0
package org.apache.http.nio.testserver;
0
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.view.hive20.resources.uploads; import org.apache.ambari.view.hive20.resources.uploads.query.TableInfo; /** * used as input in REST call */ class TableInput extends TableInfo { public Boolean isFirstRowHeader = Boolean.FALSE; public TableInput() { } public Boolean getIsFirstRowHeader() { return isFirstRowHeader; } public void setIsFirstRowHeader(Boolean isFirstRowHeader) { this.isFirstRowHeader = isFirstRowHeader; } public void validate(){ if( null == this.getHiveFileType()){ throw new IllegalArgumentException("fileType parameter cannot be null."); } if( null == this.getTableName()){ throw new IllegalArgumentException("tableName parameter cannot be null."); } if( null == this.getDatabaseName()){ throw new IllegalArgumentException("databaseName parameter cannot be null."); } } }
0
* @author <a href="mailto:[email protected]">Thomas DeWeese</a>
0
Optional.absent());
0
import org.apache.accumulo.server.zookeeper.IZooReaderWriter; IZooReaderWriter zoo = ZooReaderWriter.getInstance();
0
public void finishSpecifyingOutput( String transformName, PInput input, PTransform<?, ?> transform) { // All component PCollections will already have been finished. Update their names if // appropriate. : pcollectionMap.entrySet()) { if (pc.getName().equals(PValueBase.defaultName(transformName))) { pc.setName(String.format("%s.%s", transformName, tag.getOutName(i))); }
0
/** {@link org.apache.commons.logging} logging facility */ static org.apache.commons.logging.Log log = org.apache.commons.logging.LogFactory.getLog(CreateMerlinsExampleSixteen.class.getName());
0