Diff
stringlengths
5
2k
FaultInducingLabel
int64
0
1
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.aurora.benchmark.fakes; import com.google.common.base.Function; import com.google.common.base.Optional; import org.apache.aurora.scheduler.HostOffer; import org.apache.aurora.scheduler.async.OfferManager; import org.apache.aurora.scheduler.base.TaskGroupKey; import org.apache.aurora.scheduler.events.PubsubEvent; import org.apache.aurora.scheduler.state.TaskAssigner; import org.apache.mesos.Protos; public class FakeOfferManager implements OfferManager { @Override public void addOffer(HostOffer offer) { // no-op } @Override public void cancelOffer(Protos.OfferID offer) { // no-op } @Override public boolean launchFirst( Function<HostOffer, TaskAssigner.Assignment> acceptor, TaskGroupKey groupKey) throws LaunchException { return false; } @Override public void hostAttributesChanged(PubsubEvent.HostAttributesChanged change) { // no-op } @Override public Iterable<HostOffer> getOffers() { return null; } @Override public Optional<HostOffer> getOffer(Protos.SlaveID slaveId) { return Optional.absent(); } }
0
for (final String name : names) { } catch (final CloneNotSupportedException ex) { final BasicHttpParams clone = (BasicHttpParams) super.clone(); for (final Map.Entry<String, Object> me : this.parameters.entrySet()) {
0
LOG.trace("Putting monitor to sleep for " + threadWakeupInterval + " " + "milliseconds"); Thread.sleep(threadWakeupInterval); LOG.trace("Generated " + cmds.size() + " status commands for host: " + hostname);
0
this.heartbeatMonitor = new HeartbeatMonitor(fsm, aq, am, 60000, ambariMetaInfo);
0
package org.apache.commons.cli2.bug; import junit.framework.TestCase; import org.apache.commons.cli2.*; import org.apache.commons.cli2.builder.ArgumentBuilder; import org.apache.commons.cli2.builder.DefaultOptionBuilder; import org.apache.commons.cli2.builder.GroupBuilder; import org.apache.commons.cli2.commandline.Parser; import org.apache.commons.cli2.validation.FileValidator; /** * @author brianegge */ public class BugCLI122Test extends TestCase { public void testArgumentWhichStartsWithDash() throws OptionException { Argument wdArg = new ArgumentBuilder() .withName("anything") .withMaximum(1) .withMinimum(1) .withInitialSeparator('=') .create(); Option wdOpt = new DefaultOptionBuilder().withArgument(wdArg) .withDescription("anything, foo or -foo") .withLongName("argument") .withShortName("a") .create(); Group group = new GroupBuilder().withOption(wdOpt).create(); Parser p = new Parser(); p.setGroup(group); CommandLine normal = p.parse (new String[]{"-a", "foo"}); assertNotNull(normal); assertEquals(normal.getValue(wdOpt), "foo"); CommandLine withDash = p.parse (new String[]{"--argument", "\"-foo\""}); assertNotNull(withDash); assertEquals("-foo", withDash.getValue(wdOpt)); CommandLine withDashAndEquals = p.parse (new String[]{"--argument=-foo"}); assertNotNull(withDashAndEquals); assertEquals("-foo", withDashAndEquals.getValue(wdOpt)); } }
0
m_factoryName = cm.getAttribute("name"); if (m_isPublic) { // Exposition of the factory service m_sr = m_context.registerService(new String[] { Factory.class.getName() }, this, getProperties()); }
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/PriorityQueue.java,v 1.7 2003/08/31 17:26:44 scolebourne Exp $ * any, must include the following acknowledgement: * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear. * @version $Revision: 1.7 $ $Date: 2003/08/31 17:26:44 $
0
import java.util.Map; ServerCnxn cnxn = pendingRevalidations.remove(sessionId); Map<Long, Integer> touchTable = zk.getTouchSnapshot();
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
0
private interface Options extends PipelineOptions {
0
import junit.framework.Assert;
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/decorators/Attic/TransformedSet.java,v 1.3 2003/11/16 00:39:37 scolebourne Exp $ * @version $Revision: 1.3 $ $Date: 2003/11/16 00:39:37 $ * @deprecated TO BE REMOVED BEFORE v3.0
0
* FIXME - Remove dependency to Xerces (XPathAPI) * * @version $Id: History.java,v 1.3 2004/03/01 21:00:27 cziegeler Exp $
0
private final RegexValidator regexValidator; private final int minLength; private final int maxLength; private final CheckDigit checkdigit; if (regex != null && regex.length() > 0) { this.regexValidator = new RegexValidator(regex); } else { this.regexValidator = null; } this.minLength = minLength; this.maxLength = maxLength; this.checkdigit = checkdigit; this.regexValidator = regexValidator; this.minLength = minLength; this.maxLength = maxLength; this.checkdigit = checkdigit;
0
* <p>Implementation of <strong>org.apache.commons.digester.Digester</strong>
0
import org.slf4j.Logger; import org.slf4j.LoggerFactory; private static final Logger LOG = LoggerFactory.getLogger(FileTxnSnapLog.class);
0
import org.apache.beam.sdk.fn.channel.ManagedChannelFactory;
0
package com.twitter.mesos.scheduler.events; import com.google.common.eventbus.EventBus; import com.google.inject.Binder; import com.google.inject.PrivateModule; import com.google.inject.Provides; import com.google.inject.Singleton; import com.twitter.common.base.Closure; import com.twitter.common.inject.ProviderMethodModule; import com.twitter.mesos.scheduler.SchedulingFilter; import com.twitter.mesos.scheduler.events.NotifyingSchedulingFilter.Delegate; import com.twitter.mesos.scheduler.events.TaskPubsubEvent.EventSubscriber; /** * Binding module for plumbing event notifications. */ public final class TaskEventModule extends ProviderMethodModule { private final EventBus taskEventBus = new EventBus("TaskEvents"); private TaskEventModule() { // Must be constructed through factory. } /** * Binds a task event module. * * @param binder Binder to bind against. * @param filterClass Delegate scheduling filter implementation class. */ public static void bind(Binder binder, final Class<? extends SchedulingFilter> filterClass) { binder.install(new PrivateModule() { @Override protected void configure() { bind(SchedulingFilter.class).annotatedWith(Delegate.class).to(filterClass); bind(SchedulingFilter.class).to(NotifyingSchedulingFilter.class); bind(NotifyingSchedulingFilter.class).in(Singleton.class); expose(SchedulingFilter.class); } }); binder.install(new TaskEventModule()); } @Provides @Singleton Closure<TaskPubsubEvent> provideEventSink() { return new Closure<TaskPubsubEvent>() { @Override public void execute(TaskPubsubEvent event) { taskEventBus.post(event); } }; } @Provides @Singleton Closure<EventSubscriber> provideSubecriberSink() { return new Closure<EventSubscriber>() { @Override public void execute(EventSubscriber subscriber) { taskEventBus.register(subscriber); } }; } }
0
import org.apache.http.conn.SecureSocketFactory;
0
* ArrayList<String> list = new ArrayList<String>(); return list.toArray(new String[list.size()]);
0
package org.apache.bcel.data;
1
* Gets the summary of the task or {@code null}. * * @return the task summary or {@code null}. */ public String getSummary() { return summary; } /**
0
import org.apache.accumulo.core.file.blockfile.cache.BlockCacheManager; import org.apache.accumulo.core.file.blockfile.cache.CacheType; import org.apache.accumulo.core.file.blockfile.cache.lru.LruBlockCache; import org.apache.accumulo.core.file.blockfile.cache.lru.LruBlockCacheManager; private BlockCacheManager manager; DefaultConfiguration dc = DefaultConfiguration.getInstance(); ConfigurationCopy cc = new ConfigurationCopy(dc); cc.set(Property.TSERV_CACHE_MANAGER_IMPL, LruBlockCacheManager.class.getName()); try { manager = BlockCacheManager.getInstance(cc); } catch (Exception e) { throw new RuntimeException("Error creating BlockCacheManager", e); } cc.set(Property.TSERV_DEFAULT_BLOCKSIZE, Long.toString(100000)); cc.set(Property.TSERV_DATACACHE_SIZE, Long.toString(100000000)); cc.set(Property.TSERV_INDEXCACHE_SIZE, Long.toString(100000000)); manager.start(cc); LruBlockCache indexCache = (LruBlockCache) manager.getBlockCache(CacheType.INDEX); LruBlockCache dataCache = (LruBlockCache) manager.getBlockCache(CacheType.DATA); if (null != manager) { manager.stop(); }
0
/* $Id$ * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.digester3.annotations.failingtests; import static java.lang.String.format; import java.lang.annotation.Annotation; import java.lang.reflect.AnnotatedElement; import org.apache.commons.digester3.annotations.AnnotationHandler; import org.apache.commons.digester3.annotations.AnnotationHandlerFactory; public class FailingDigesterLoaderHandlerFactory implements AnnotationHandlerFactory { public <H extends AnnotationHandler<? extends Annotation, ? extends AnnotatedElement>> H newInstance( Class<H> type ) throws Exception { throw new Exception( format( "Impossible to create '%s' instances", type.getName() ) ); } }
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/test/org/apache/commons/collections/primitives/Attic/TestCharListIterator.java,v 1.2 2003/08/31 17:28:41 scolebourne Exp $ * any, must include the following acknowledgement: * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear. * @version $Revision: 1.2 $ $Date: 2003/08/31 17:28:41 $
0
import org.apache.commons.lang.ObjectUtils; if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID)))) { throw new AmbariException("Upgrade pack can't be changed for repository version which is " + final String upgradePackage = propertyMap.get(REPOSITORY_VERSION_UPGRADE_PACK_PROPERTY_ID).toString(); entity.setUpgradePackage(upgradePackage); } if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID)))) { final Object operatingSystems = propertyMap.get(SUBRESOURCE_OPERATING_SYSTEMS_PROPERTY_ID); final String operatingSystemsJson = gson.toJson(operatingSystems); try { repositoryVersionHelper.parseOperatingSystems(operatingSystemsJson); } catch (Exception ex) { throw new AmbariException("Json structure for operating systems is incorrect", ex); } entity.setOperatingSystems(operatingSystemsJson); if (StringUtils.isNotBlank(ObjectUtils.toString(propertyMap.get(REPOSITORY_VERSION_DISPLAY_NAME_PROPERTY_ID)))) {
0
CryptoModuleParameters encryptSecretKey(CryptoModuleParameters params); CryptoModuleParameters decryptSecretKey(CryptoModuleParameters params);
0
/* * Copyright 2013 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */
0
protected int estimateElementsLen(final HeaderElement[] elems) { boolean quote, protected int estimateHeaderElementLen(final HeaderElement elem) { protected int estimateParametersLen(final NameValuePair[] nvps) { final NameValuePair nvp, final boolean quote) { protected int estimateNameValuePairLen(final NameValuePair nvp) {
0
* Map of themes, single value currently */ private Map<String, ThemeModule> themeModules = new HashMap<String, ThemeModule>(); /** protected boolean valid = true; populateThemeModules(); mergeThemes(parentModule, allStacks, commonServices); private void populateThemeModules() { ThemeModule themeModule = new ThemeModule(serviceDirectory.getThemeFile()); if (themeModule.isValid()) { serviceInfo.setThemeInfo(themeModule.getModuleInfo()); themeModules.put(themeModule.getId(), themeModule); } //lets not fail if theme contain errors } /** * Merge theme modules. */ private void mergeThemes(ServiceModule parent, Map<String, StackModule> allStacks, Map<String, ServiceModule> commonServices) throws AmbariException { mergeChildModules(allStacks, commonServices, themeModules, parent.themeModules); }
0
public class EnableBackgroundResolver implements RelativeValueResolver { return SVGValueConstants.CSS_ENABLE_BACKGROUND_PROPERTY; return new CSSOMReadOnlyValue(SVGValueConstants.ACCUMULATE_VALUE);
0
/* * Copyright 2001-2004 The Apache Software Foundation. * Licensed under the Apache License, Version 2.0 (the "License") * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * @version $Id: HierarchicalConfiguration.java,v 1.3 2004/02/27 17:41:35 epugh Exp $
0
/** * A single key/value pair inside a Configuration. */
0
package org.apache.batik.anim.dom; import org.apache.batik.dom.svg.SVGPathSegConstants;
0
package org.apache.commons.vfs2.impl; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSelector; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.Selectors; import org.apache.commons.vfs2.VfsLog; import org.apache.commons.vfs2.provider.AbstractVfsComponent; import org.apache.commons.vfs2.provider.FileReplicator; import org.apache.commons.vfs2.provider.TemporaryFileStore; import org.apache.commons.vfs2.provider.UriParser; import org.apache.commons.vfs2.util.Messages;
1
Set<String> applicationTags = yarnApp.getApplicationTags(); if (applicationTags != null && applicationTags.size() > 0) { for (String tag : applicationTags) { int index = tag.indexOf(':'); if (index > 0 && index < tag.length() - 1) { String key = tag.substring(0, index).trim(); String value = tag.substring(index + 1).trim(); if ("name".equals(key)) app.setType(value); else if ("version".equals(key)) app.setAppVersion(value); else if ("description".equals(key)) app.setDescription(value); } } }
0
import org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableList;
0
throw (RuntimeSshException) new RuntimeSshException("Failed to get the session.", (Throwable) v);
0
() -> storage.write((NoResult.Quiet) storeProvider -> {
0
import com.google.cloud.dataflow.sdk.util.DefaultTrigger; import com.google.cloud.dataflow.sdk.util.WindowingStrategy; SerializableUtils.serializeToByteArray( new DoFnInfo(fn, WindowingStrategy.globalDefault()))); normalParDoFn.fnFactory.createDoFnInfo().getWindowingStrategy().getWindowFn(), Assert.assertThat( normalParDoFn.fnFactory.createDoFnInfo().getWindowingStrategy().getTrigger(), new IsInstanceOf(DefaultTrigger.class));
0
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.ipojo.online.manipulator; import static org.apache.felix.ipojo.manipulator.util.Streams.close; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.net.URL; /** * User: guillaume * Date: 04/03/2014 * Time: 15:33 */ public class Files { public static void dump(URL url, File target) throws IOException { dump(url.openStream(), target); } public static void dump(InputStream stream, File target) throws IOException { BufferedInputStream in = new BufferedInputStream(stream); FileOutputStream file = new FileOutputStream(target); BufferedOutputStream out = new BufferedOutputStream(file); int i; while ((i = in.read()) != -1) { out.write(i); } out.flush(); close(in, out); } }
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//jxpath/src/java/org/apache/commons/jxpath/ri/compiler/LocationPath.java,v 1.4 2002/04/24 04:05:38 dmitri Exp $ * $Revision: 1.4 $ * $Date: 2002/04/24 04:05:38 $ * @version $Revision: 1.4 $ $Date: 2002/04/24 04:05:38 $
0
import org.apache.beam.sdk.schemas.SchemaCoder; import org.apache.beam.sdk.transforms.SerializableFunctions; SchemaCoder.of( PAYLOAD_SCHEMA, SerializableFunctions.identity(), SerializableFunctions.identity()),
0
import java.util.UUID; import org.apache.ambari.server.state.alert.SourceType; long clusterId = injector.getInstance(OrmTestHelper.class).createCluster( "cluster" + System.currentTimeMillis()); Cluster cluster = clusters.getClusterById(clusterId); List<AlertDefinitionEntity> definitions = dao.findAll(clusterId); // find all enabled for the cluster should find 6 (the ones from HDFS; // it will not find the agent alert since it's not bound to the cluster) definitions = dao.findAllEnabled(cluster.getClusterId()); assertEquals(6, definitions.size()); // create new definition AlertDefinitionEntity entity = new AlertDefinitionEntity(); entity.setClusterId(clusterId); entity.setDefinitionName("bad_hdfs_alert"); entity.setLabel("Bad HDFS Alert"); entity.setDescription("A way to fake a component being removed"); entity.setEnabled(true); entity.setHash(UUID.randomUUID().toString()); entity.setScheduleInterval(1); entity.setServiceName("HDFS"); entity.setComponentName("BAD_COMPONENT"); entity.setSourceType(SourceType.METRIC); entity.setSource("{\"type\" : \"METRIC\"}"); dao.create(entity); // verify the new definition is found (6 HDFS + 1 new one) definitions = dao.findAllEnabled(cluster.getClusterId()); assertEquals(7, definitions.size()); // reconcile, which should disable our bad definition metaInfo.reconcileAlertDefinitions(clusters); // find all enabled for the cluster should find 6 definitions = dao.findAllEnabled(cluster.getClusterId()); assertEquals(6, definitions.size()); // find all should find 6 HDFS + 1 disabled + 1 agent alert definitions = dao.findAll(); assertEquals(8, definitions.size()); entity = dao.findById(entity.getDefinitionId()); assertFalse(entity.getEnabled());
0
import org.apache.accumulo.core.util.CachedConfiguration; import org.apache.accumulo.server.fs.ViewFSUtils; // if path points to a viewfs path, then resolve to underlying filesystem if (ViewFSUtils.isViewFS(ns)) { Path newSource = ViewFSUtils.resolvePath(ns, source); if (!newSource.equals(source) && newSource.toUri().getScheme() != null) { ns = newSource.getFileSystem(CachedConfiguration.getInstance()); source = newSource; } } throw new IllegalStateException("Don't know how to recover a lease for " + ns.getClass().getName());
0
import org.apache.zookeeper.ZooKeeper; /** * this is only for testing purposes. * should never be useed otherwise */ public static boolean failCreate = false; if (lastSlash == -1 || path.indexOf('\0') != -1 || failCreate) { LOG.warn("Invalid path " + path + " with session " + Long.toHexString(request.sessionId)); throw new KeeperException.BadArgumentsException(); } try { ZooKeeper.validatePath(path); } catch(IllegalArgumentException ie) { LOG.warn("Invalid path " + path + " with session " + Long.toHexString(request.sessionId));
0
protected class CanvasSVGListener extends ExtendedSVGListener {
1
import java.io.File; import java.net.URISyntaxException; import javax.imageio.IIOException; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; Path path; File tempFile = new File(CUTFILE_KEY); if (tempFile.exists()) { path = new Path(CUTFILE_KEY); } else { path = new Path(cutFileName); } if (path == null) throw new FileNotFoundException("Cut point file not found in distributed cache"); TreeSet<Text> cutPoints = new TreeSet<>(); FileSystem fs = FileSystem.get(conf); FSDataInputStream inputStream = fs.open(path); try (Scanner in = new Scanner(inputStream)) { while (in.hasNextLine()) { cutPoints.add(new Text(Base64.getDecoder().decode(in.nextLine()))); cutPointArray = cutPoints.toArray(new Text[cutPoints.size()]); throw new IIOException("Cutpoint array not properly created from file" + path.getName()); URI uri; try { uri = new URI(file + "#" + CUTFILE_KEY); } catch (URISyntaxException e) { throw new IllegalStateException( "Unable to add split file \"" + CUTFILE_KEY + "\" to distributed cache."); }
0
import org.apache.beam.sdk.io.DynamicFileDestinations; import org.apache.beam.sdk.transforms.SerializableFunctions; StreamingShardedWriteFactory<Object, Void, Object> factory = WriteFiles<Object, Void, Object> original = WriteFiles.to(new TestSink(tmpFolder.toString()), SerializableFunctions.identity()); AppliedPTransform<PCollection<Object>, PDone, WriteFiles<Object, Void, Object>> originalApplication = AppliedPTransform.of( "writefiles", objs.expand(), Collections.<TupleTag<?>, PValue>emptyMap(), original, p); WriteFiles<Object, Void, Object> replacement = (WriteFiles<Object, Void, Object>) factory.getReplacementTransform(originalApplication).getTransform(); private static class TestSink extends FileBasedSink<Object, Void> { super( StaticValueProvider.of(FileSystems.matchNewResource(tmpFolder, true)), DynamicFileDestinations.constant(null)); public WriteOperation<Object, Void> createWriteOperation() {
0
/** * A specialized instance which is configured to suppress the special {@code class} * properties of Java beans. Unintended access to the property {@code class} (which is * common to all Java objects) can be a security risk because it also allows access to * the class loader. Adding this instance as {@code BeanIntrospector} to an instance * of {@code PropertyUtilsBean} suppresses the {@code class} property; it can then no * longer be accessed. */ public static final SuppressPropertiesBeanIntrospector SUPPRESS_CLASS = new SuppressPropertiesBeanIntrospector(Collections.singleton("class"));
0
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * This package contains implementations of the * {@link org.apache.commons.collections.Trie Trie} interface. * <p> * The implementations are in the form of direct implementations and decorators. * A decorator wraps another implementation of the interface to add some * specific additional functionality. * <p> * The following implementations are provided in the package: * <ul> * <li>PatriciaTrie - an implementation of a PATRICIA trie * </ul> * <p> * The following decorators are provided: * <ul> * <li>Synchronized - synchronizes method access for multi-threaded environments * <li>Unmodifiable - ensures the collection cannot be altered * </ul> * * @version $Id$ */ package org.apache.commons.collections.trie;
0
import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.TableExistsException; import org.apache.accumulo.core.client.TableNotFoundException; connector = ensureTraceTableExists(conf); int port = conf.getPort(Property.TRACE_PORT); final ServerSocket sock = ServerSocketChannel.open().socket(); sock.setReuseAddress(true); sock.bind(new InetSocketAddress(hostname, port)); final TServerTransport transport = new TServerSocket(sock); TThreadPoolServer.Args options = new TThreadPoolServer.Args(transport); options.processor(new Processor<Iface>(new Receiver())); server = new TThreadPoolServer(options); registerInZooKeeper(sock.getInetAddress().getHostAddress() + ":" + sock.getLocalPort(), conf.get(Property.TRACE_ZK_PATH)); writer = new AtomicReference<>(this.connector.createBatchWriter(table, new BatchWriterConfig().setMaxLatency(BATCH_WRITER_MAX_LATENCY, TimeUnit.SECONDS))); } /** * Exceptions thrown out of here should be things that cause service failure (e.g. misconfigurations that aren't likely to change on retry). * * @return a working Connection that can be reused * @throws ClassNotFoundException * if TRACE_TOKEN_TYPE is set to a class that we can't load. * @throws InstantiationException * if we fail to create an instance of TRACE_TOKEN_TYPE. * @throws IllegalAccessException * if the class pointed to by TRACE_TOKEN_TYPE is private. * @throws AccumuloSecurityException * if the trace user has the wrong permissions */ private Connector ensureTraceTableExists(final AccumuloConfiguration conf) throws AccumuloSecurityException, ClassNotFoundException, InstantiationException, IllegalAccessException { } catch (AccumuloException | TableExistsException | TableNotFoundException | IOException | RuntimeException ex) { return connector;
0
* Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at
0
import java.util.Optional; assertEquals(Optional.empty(), select());
0
pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, myIntegerCoder); pipeline.getCoderRegistry().registerCoderForClass(MyInteger.class, MyIntegerCoder.of());
0
if (nextLineTabStop >= width) { // stops infinite loop happening throw new IllegalStateException("Total width is less than the width of the argument and indent " + "- no room for the description"); }
0
import org.apache.beam.runners.core.KeyedWorkItem; import org.apache.beam.runners.core.KeyedWorkItems;
0
private GroupByKeyIterator<String, Integer, GlobalWindow> createGbkIterator() { ItemFactory(
0
public InetAddress[] resolve(final String host) throws UnknownHostException {
0
return ASTArrayAccess.evaluateExpr(obj, val, getUberspect());
0
import org.apache.ambari.server.orm.entities.RepoOsEntity; RepoOsEntity osEntity = repoVersionHelper.getOSEntityForHost(host, repoVersion);
0
package org.apache.felix.moduleloader;
1
import org.apache.atlas.TestModules; @Guice(modules = {TestModules.TestOnlyModule.class}) // AtlasGraphProvider.cleanup();
0
protected void map(Key key, Value value, Context context) { protected void cleanup(Context context) {
0
import org.apache.hc.core5.http.impl.io.DefaultBHttpClientConnection; import org.apache.hc.core5.http.io.HttpRequestHandler; import org.apache.hc.core5.http.testserver.io.HttpClient; import org.apache.hc.core5.http.testserver.io.HttpServer;
1
import org.apache.avalon.framework.activity.Startable; Startable, public void start() throws Exception { public void stop( ) {
0
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.server.util; import org.apache.log4j.Logger; public class Profiler { private static final Logger LOG = Logger.getLogger(Profiler.class); public interface Operation<T> { public T execute() throws Exception; } public static <T> T profile(Operation<T> op, long timeout, String message) throws Exception { long start = System.currentTimeMillis(); T res = op.execute(); long end = System.currentTimeMillis(); if (end - start > timeout) { LOG.warn("Elapsed "+(end - start) + " ms: " + message); } return res; } }
0
Set<ByteBuffer> splits = new HashSet<ByteBuffer>(); splits.add(ByteBuffer.wrap("a".getBytes())); splits.add(ByteBuffer.wrap("c".getBytes())); splits.add(ByteBuffer.wrap("z".getBytes())); tpc.proxy().tableOperations_merge(userpass, testtable, ByteBuffer.wrap("b".getBytes()), ByteBuffer.wrap("d".getBytes())); List<ByteBuffer> tableSplits = tpc.proxy().tableOperations_getSplits(userpass, testtable, 10); for (ByteBuffer split : tableSplits) Set<ByteBuffer> splits = new HashSet<ByteBuffer>(); splits.add(ByteBuffer.wrap("a".getBytes())); splits.add(ByteBuffer.wrap("b".getBytes())); splits.add(ByteBuffer.wrap("z".getBytes())); List<ByteBuffer> tableSplits = tpc.proxy().tableOperations_getSplits(userpass, testtable, 10); for (ByteBuffer split : tableSplits) assertEquals(tpc.proxy().tableOperations_getMaxRow(userpass, testtable, auths, null, true, null, true), ByteBuffer.wrap("9".getBytes())); tpc.proxy().tableOperations_deleteRows(userpass,testtable,ByteBuffer.wrap("51".getBytes()), ByteBuffer.wrap("99".getBytes())); assertEquals(tpc.proxy().tableOperations_getMaxRow(userpass, testtable, auths, null, true, null, true), ByteBuffer.wrap("5".getBytes()));
0
public static ZooKeeperConfig create(Iterable<InetSocketAddress> servers) {
0
/** * Returns the document descriptor associated with the latest created * document. * @return null if no document or descriptor was previously generated. */ DocumentDescriptor getDocumentDescriptor();
0
private HttpCache mockCache; HttpCache cache = new BasicHttpCache(MAX_ENTRIES);
0
* nor may "Apache" appear in their name, without import org.apache.commons.net.io.SocketInputStream;
0
import org.apache.beam.sdk.runners.PTransformOverrideFactory.ReplacementOutput; import org.apache.beam.sdk.values.PValue; WriteFilesResult<Void> originalResult = objs.apply(original); WriteFilesResult<Void> replacementResult = objs.apply(replacement); Map<PValue, ReplacementOutput> res = factory .mapOutputs(originalResult.expand(), replacementResult); assertEquals(1, res.size()); assertEquals( originalResult.getPerDestinationOutputFilenames(), res.get(replacementResult.getPerDestinationOutputFilenames()).getOriginal().getValue()); return new WriteOperation<Void, Object>(this) { @Override public Writer<Void, Object> createWriter() { throw new UnsupportedOperationException(); } };
0
} catch (NumberFormatException nfEx ) { (ctx, filterElement, nfEx, ERR_ATTRIBUTE_VALUE_MALFORMED, new Object[] {SVG_RADIUS_ATTRIBUTE, s, nfEx });
0
private static final Runnable DUMMY_RUNNABLE = new Runnable() { public void run() {}; }; // for now just put some non-null value in there so we can never // get a race condition when two threads enter this section after // one another (causing sheduleNext() to be invoked twice below) m_active = DUMMY_RUNNABLE;
0
/** Apex {@link PipelineRunner} for testing. */ return delegate.run(pipeline).getApexDAG(); @SuppressWarnings("Finally")
0
* @author <A HREF="mailto:[email protected]">M. Dahm</A>
0
public interface MetadataService {
0
package org.apache.felix.sigil.eclipse.ui.internal.editors.project;
0
public void testOptionalAttributesInMetaData() throws IOException, XmlPullParserException { String name = "myattribute"; String value = "working"; String localization = "test"; String empty = "<MetaData " + name + "=\"" + value + "\" localization=\"" + localization + "\" />"; MetaData mti = read( empty ); assertEquals( localization, mti.getLocalePrefix() ); assertNull( mti.getObjectClassDefinitions() ); assertNotNull( mti.getOptionalAttributes() ); assertEquals( 1, mti.getOptionalAttributes().size() ); assertEquals( value, mti.getOptionalAttributes().get(name) ); }
0
IZooReaderWriter zk = ZooReaderWriter.getInstance();
0
// !!! a) if we are reverting, that can only happen via PATCH or MAINT // b) if orchestration is a revertible type (on upgrade) summary.isSwitchBits = m_isRevert || m_orchestration.isRevertable(); /** * MAINT or PATCH upgrades are meant to just be switching the bits and no other * incompatible changes. */ @SerializedName("isSwitchBits") public boolean isSwitchBits = false;
0
log.warn("Could not find class named '" + className + "' in testClassLoad.", e);
0
import org.apache.flink.runtime.checkpoint.OperatorSubtaskState; OperatorSubtaskState snapshot = testHarness.snapshot(0, 0);
0
import cz.seznam.euphoria.core.annotation.audience.Audience; import cz.seznam.euphoria.core.executor.graph.DAG; @Audience(Audience.Type.CLIENT)
0
package org.apache.xml.security.test.signature; import junit.framework.Test; import junit.framework.TestSuite; public class AllTests { public static Test suite() { TestSuite suite = new TestSuite( "Test for org.apache.xml.security.test.signature"); //$JUnit-BEGIN$ suite.addTest(CreateSignatureTest.suite()); suite.addTestSuite(X509DataTest.class); suite.addTest(XMLSignatureInputTest.suite()); suite.addTest(UnknownAlgoSignatureTest.suite()); //$JUnit-END$ return suite; } }
0
import org.apache.flink.streaming.api.functions.sink.SinkFunction; top.addSink(new SinkFunction<Tuple3<Long, String, Double>>() { @Override public void invoke(Tuple3<Long, String, Double> value, SinkFunction.Context context) throws Exception { Date now = new Date(); Date stamp = new Date(value.f0); System.out.println(now + ": " + stamp + ", " + value.f1 + ", " + value.f2); } private static class TopAllWindow implements AllWindowFunction< Tuple2<String, Double>, Tuple3<Long, String, Double>, TimeWindow> { private static class Joiner implements FlatJoinFunction<Tuple2< String, Integer>, Tuple2<String, Integer>, Tuple2<String, Double>> {
0
* any, must include the following acknowledgement: * Alternately, this acknowledgement may appear in the software itself, * if and wherever such third-party acknowledgements normally appear.
0
package org.apache.xml.security.test.algorithms; import junit.framework.Test; import junit.framework.TestSuite; public class AllTests { public static Test suite() { TestSuite suite = new TestSuite( "Test for org.apache.xml.security.test.algorithms"); //$JUnit-BEGIN$ suite.addTest(SignatureAlgorithmTest.suite()); //$JUnit-END$ return suite; } }
0
import org.apache.hc.core5.http.nio.SessionInputBuffer;
1
package org.apache.felix.sigil.eclipse.property; import org.apache.felix.sigil.eclipse.SigilCore;
0
import static org.apache.hadoop.yarn.server.applicationhistoryservice.metrics.timeline.PhoenixTransactSQL.DefaultCondition; Condition condition = new DefaultCondition(null, null, null, null, startTime, Condition condition = new DefaultCondition(null, null, null, null, startTime,
0
import static org.junit.Assert.assertEquals; import org.apache.accumulo.tracer.thrift.RemoteSpan; import org.apache.htrace.HTraceConfiguration; import org.apache.htrace.Span; import org.apache.htrace.impl.MilliSpan; import org.junit.Test; protected String getSpanKey(Map<String,String> data) {
0
return (getState().isRemovableState());
0
IResourceAggregate.build(new ResourceAggregate(3, 3, 3)), quotaInfo.getProdConsumption()); IResourceAggregate.build(new ResourceAggregate(2, 2, 2)), quotaInfo.getNonProdConsumption()); assertEquals(ResourceAggregates.none(), quotaInfo.getProdConsumption()); assertEquals(ResourceAggregates.none(), quotaInfo.getNonProdConsumption());
0
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/functors/FunctorUtils.java,v 1.3 2003/11/23 23:25:33 scolebourne Exp $ import org.apache.commons.collections.Transformer; * @version $Revision: 1.3 $ $Date: 2003/11/23 23:25:33 $ /** * Copy method * * @param transformers the transformers to copy */ static Transformer[] copy(Transformer[] transformers) { if (transformers == null) { return null; } return (Transformer[]) transformers.clone(); } /** * Validate method * * @param transformers the transformers to validate */ static void validate(Transformer[] transformers) { if (transformers == null) { throw new IllegalArgumentException("The transformer array must not be null"); } for (int i = 0; i < transformers.length; i++) { if (transformers[i] == null) { throw new IllegalArgumentException( "The transformer array must not contain a null transformer, index " + i + " was null"); } } }
0
printConfigurationInventory( pw, PrinterMode.TEXT, handler ); printConfigurationInventory( pw, PrinterMode.JSON, handler ); if ( handler.supports(PrinterMode.HTML_FRAGMENT) ) { handler.print(PrinterMode.HTML_FRAGMENT, pw, false); } else if ( handler.supports(PrinterMode.TEXT) ) { pw.enableFilter( true ); handler.print(PrinterMode.TEXT, pw, false); pw.enableFilter( false ); handler.print(PrinterMode.JSON, pw, false); pw.print("<button type=\"button\" class=\"downloadZip\" style=\"float: right; margin-right: 30px; margin-top: 5px;\">Download As Zip</button>"); if ( handler.supports(PrinterMode.HTML_FRAGMENT) ) { handler.print(PrinterMode.HTML_FRAGMENT, pw, false); handler.print(PrinterMode.TEXT, pw, false); handler.print(mode, this, false); if ( mode == PrinterMode.TEXT ) { final ZipEntry entry = new ZipEntry( handler.getName().concat(".txt") ); zip.putNextEntry( entry ); handler.print(mode, this, false); flush(); zip.closeEntry(); handler.addAttachments(handler.getName().concat("/"), this.zip); } else if ( mode == PrinterMode.JSON ) { final String name = "json/".concat(handler.getName()).concat(".json"); handler.print(PrinterMode.JSON, this, true); if ( !handler.supports(PrinterMode.TEXT) ) { handler.addAttachments(handler.getName().concat("/"), this.zip);
0
import org.apache.aurora.scheduler.storage.entities.ITaskEvent; private static ITaskEvent getSecondToLatestEvent(IScheduledTask task) { return task.getTaskEvents().get(task.getTaskEvents().size() - 2); } updateStats(stats, getSecondToLatestEvent(task).getStatus());
0
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ambari.view.hive.client; public interface ColumnDescription { public abstract String getName(); public abstract void setName(String name); public abstract String getType(); public abstract void setType(String type); public abstract int getPosition(); public abstract void setPosition(int position); }
0