Diff
stringlengths 5
2k
| FaultInducingLabel
int64 0
1
|
---|---|
/*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
/**
* Commands for HTTP transports based on asynchronous, event driven I/O model.
*/
package org.apache.hc.core5.http.nio.command; | 0 |
import com.google.common.base.Predicates;
import com.twitter.common.zookeeper.Candidate;
import com.twitter.common.zookeeper.Group;
if (options.zooKeeperEndpoints == null) {
LOG.info("ZooKeeper endpoints not specified, ZooKeeper interaction disabled.");
return null;
} else {
return new ZooKeeperClient(Amount.of(options.zooKeeperSessionTimeoutSecs, Time.SECONDS),
ImmutableSet.copyOf(options.zooKeeperEndpoints));
}
if (zkClient == null) {
LOG.info("No ZooKeeper client, service registration disabled.");
return null;
} else {
return new ServerSet(zkClient, ZooDefs.Ids.OPEN_ACL_UNSAFE, options.nexusSchedulerNameSpec);
}
// TODO(wfarner): Make this more durable - should connect to new master if the
// candidacy changes.
LOG.info("Fetching elected nexus master.");
Group masterGroup = new Group(zkClient, ZooDefs.Ids.OPEN_ACL_UNSAFE,
options.nexusMasterNameSpec);
masterGroup.setGroupNodeNameFilter(Predicates.<String>alwaysTrue());
final Candidate masterCandidate = new Candidate(masterGroup);
masterCandidate.watchLeader(new Candidate.LeaderChangeListener() {
@Override public void onLeaderChange(String leaderId) {
LOG.info("Received notification of nexus master group change: " + leaderId);
}
});
nexusMaster = masterCandidate.getLeaderId();
LOG.info("Elected master id: " + nexusMaster);
} catch (Group.WatchException e) {
LOG.log(Level.SEVERE, "Failed to watch master server set for leader changes.", e); | 0 |
import java.lang.reflect.InvocationTargetException;
protected void fireStartedEvent() throws InterruptedException {
final SVGDocumentLoaderEvent ev =
new SVGDocumentLoaderEvent(this, null);
SVGDocumentLoaderListener dl =
(SVGDocumentLoaderListener)dll[i];
try {
EventQueue.invokeAndWait(new Runnable() {
public void run() {
for (int i = 0; i < dll.length; i++) {
SVGDocumentLoaderListener dl =
(SVGDocumentLoaderListener)dll[i];
dl.documentLoadingStarted(ev);
}
}
});
} catch (InvocationTargetException e) {
}
final SVGDocumentLoaderEvent ev =
new SVGDocumentLoaderEvent(this, doc);
SVGDocumentLoaderListener dl =
(SVGDocumentLoaderListener)dll[i];
final SVGDocumentLoaderEvent ev =
new SVGDocumentLoaderEvent(this, null);
SVGDocumentLoaderListener dl =
(SVGDocumentLoaderListener)dll[i];
final SVGDocumentLoaderEvent ev =
new SVGDocumentLoaderEvent(this, null);
SVGDocumentLoaderListener dl =
(SVGDocumentLoaderListener)dll[i]; | 0 |
import org.apache.accumulo.minicluster.impl.MiniAccumuloConfigImpl;
import org.apache.hadoop.conf.Configuration;
@Override
public void configureMiniCluster(MiniAccumuloConfigImpl cfg, Configuration hadoopCoreSite) {
cfg.setNumTservers(1);
}
ClientConfiguration clientConf = cluster.getClientConfig();
AccumuloInputFormat.setConnectorInfo(job, getAdminPrincipal(), getAdminToken()); | 0 |
* Tests for {@link SideInputContainer}.
public class SideInputContainerTest {
private EvaluationContext context;
private SideInputContainer container;
container = SideInputContainer.create( | 0 |
@Deprecated | 0 |
return TypesUtil.getTypesDef(ImmutableList.of(enumTypeDefinition), | 0 |
* @version $Id$ | 0 |
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. | 0 |
private static final WeakHashMap<Text,WeakReference<Text>> tableIds = new WeakHashMap<Text,WeakReference<Text>>(); | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 1 |
import org.apache.accumulo.core.security.thrift.TCredentials;
import org.apache.accumulo.core.security.tokens.AuthenticationToken;
* @throws AccumuloSecurityException
public static void setConnectorInfo(Job job, String principal, AuthenticationToken token) throws AccumuloSecurityException {
* @see #setConnectorInfo(Job, String, AuthenticationToken)
* @see #setConnectorInfo(Job, String, AuthenticationToken)
scanner = new OfflineScanner(instance, new TCredentials(principal, tokenClass, ByteBuffer.wrap(token), instance.getInstanceID()), Tables.getTableId(
instance, getInputTableName(attempt)), authorizations);
InputConfigurator.setConnectorInfo(CLASS, conf, user, new PasswordToken(passwd)); | 1 |
import java.util.List;
import javax.persistence.EntityManager;
import javax.persistence.NoResultException;
import javax.persistence.TypedQuery;
import org.apache.ambari.server.orm.RequiresSession;
import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntity;
import org.apache.ambari.server.orm.entities.RequestScheduleBatchRequestEntityPK;
| 0 |
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//collections/src/java/org/apache/commons/collections/primitives/adapters/Attic/AbstractByteListList.java,v 1.3 2003/11/07 20:09:15 rwaldhoff Exp $
*
* @deprecated This code has been moved to Jakarta Commons Primitives (http://jakarta.apache.org/commons/primitives/)
*
* @version $Revision: 1.3 $ $Date: 2003/11/07 20:09:15 $ | 0 |
/** {@link AutoService} registrar for the {@link LocalFileSystem}. */ | 1 |
/** Tests for {@link TransformHierarchy}. */
Entry<TupleTag<?>, PValue> replacementLongs =
Iterables.getOnlyElement(replacementOutput.expand().entrySet());
* Tests that visiting the {@link TransformHierarchy} after replacing nodes does not visit any of
* the original nodes or inaccessible values but does visit all of the replacement nodes, new
* inaccessible replacement values, and the original output values.
Final Graph:
Upstream -> Upstream.out -> Composite -> (ReplacementParDo -> OriginalParDo.out)
*/
hierarchy.pushNode("creates_one_and_two", PBegin.in(pipeline), producer);
pipeline, WindowingStrategy.globalDefault(), IsBounded.BOUNDED, StringUtf8Coder.of());
pipeline, WindowingStrategy.globalDefault(), IsBounded.UNBOUNDED, VarIntCoder.of()); | 1 |
import org.slf4j.LoggerFactory;
LoggerFactory.getLogger(FinishCreateNamespace.class).debug("Created table " + namespaceInfo.namespaceId + " " + namespaceInfo.namespaceName);
LoggerFactory.getLogger(FinishCreateNamespace.class).error(e.getMessage(), e); | 0 |
import java.lang.reflect.Constructor;
import org.apache.commons.logging.Log; | 0 |
cal.set(Calendar.MILLISECOND, 0);
cal.set(Calendar.SECOND, 0);
cal.set(Calendar.MINUTE, 0);
cal.set(Calendar.HOUR_OF_DAY, 0);
cal.set(Calendar.YEAR, Integer.parseInt(yr));
cal.set(Calendar.YEAR, year);
cal.set(Calendar.HOUR_OF_DAY, Integer.parseInt(hr));
cal.set(Calendar.MINUTE, Integer.parseInt(min));
cal.set(Calendar.MONTH, month);
cal.set(Calendar.DATE, Integer.parseInt(da)); | 0 |
package org.apache.felix.sigil.ui.eclipse.ui.util;
import org.apache.felix.sigil.eclipse.SigilCore;
import org.apache.felix.sigil.eclipse.model.repository.IRepositoryModel;
import org.apache.felix.sigil.model.IModelElement;
import org.apache.felix.sigil.model.eclipse.ISigilBundle;
import org.apache.felix.sigil.model.osgi.IBundleModelElement;
import org.apache.felix.sigil.model.osgi.IPackageExport;
import org.apache.felix.sigil.model.osgi.IPackageImport;
import org.apache.felix.sigil.model.osgi.IRequiredBundle;
import org.apache.felix.sigil.repository.IBundleRepository;
import org.apache.felix.sigil.ui.eclipse.ui.SigilUI; | 0 |
/** @since 2.2 */
/** @since 2.2 */
/** @since 2.2 */
/** @since 2.2 */
/** @since 2.2 */
/** @since 2.2 */
/** @since 2.2 */ | 0 |
public void testIsAttributeMapsEqual() {
AmbariManagementControllerImpl controllerImpl = null;
if (controller instanceof AmbariManagementControllerImpl){
controllerImpl = (AmbariManagementControllerImpl)controller;
}
Map<String, Map<String, String>> requestConfigAttributes = new HashMap<String, Map<String,String>>();
Map<String, Map<String, String>> clusterConfigAttributes = new HashMap<String, Map<String,String>>();
Assert.assertTrue(controllerImpl.isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes));
requestConfigAttributes.put("final", new HashMap<String, String>());
requestConfigAttributes.get("final").put("c", "true");
clusterConfigAttributes.put("final", new HashMap<String, String>());
clusterConfigAttributes.get("final").put("c", "true");
Assert.assertTrue(controllerImpl.isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes));
clusterConfigAttributes.put("final2", new HashMap<String, String>());
clusterConfigAttributes.get("final2").put("a", "true");
Assert.assertFalse(controllerImpl.isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes));
requestConfigAttributes.put("final2", new HashMap<String, String>());
requestConfigAttributes.get("final2").put("a", "false");
Assert.assertFalse(controllerImpl.isAttributeMapsEqual(requestConfigAttributes, clusterConfigAttributes));
}
@Test | 0 |
* ResponseConnControl is responsible for adding <code>Connection</code> header
* to the outgoing responses, which is essential for managing persistence of
* <code>HTTP/1.0</code> connections. This interceptor is recommended for
*
public void process(final HttpResponse response, final HttpContext context)
if (entity.getContentLength() < 0 &&
| 0 |
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamSqlRelUtils;
return BeamSqlRelUtils.toPCollection(input.getPipeline(), sqlEnv.parseQuery(queryString())); | 0 |
* @version CVS $Id: EP_Draft.java,v 1.2 2003/03/11 19:05:01 vgritsenko Exp $ | 0 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.sdk.util.state;
import java.io.Serializable;
import org.apache.beam.sdk.annotations.Experimental;
import org.apache.beam.sdk.annotations.Experimental.Kind;
import org.apache.beam.sdk.util.TimerInternals;
/**
* A factory for providing {@link TimerInternals} for a particular key.
*
* <p>Because it will generally be embedded in a {@link org.apache.beam.sdk.transforms.DoFn DoFn},
* albeit at execution time, it is marked {@link Serializable}.
*/
@Experimental(Kind.STATE)
public interface TimerInternalsFactory<K> {
/** Returns {@link TimerInternals} for the provided key. */
TimerInternals timerInternalsForKey(K key);
} | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 0 |
import org.apache.accumulo.server.util.Halt;
String msg = "error removing tablet server lock";
log.fatal(msg, e);
Halt.halt(msg, -1); | 0 |
* @version $Id: JexlTest.java,v 1.27 2003/02/18 20:22:41 geirm Exp $
jc.getVars().put("set", (new HashMap()).keySet());
e = ExpressionFactory.createExpression("empty set");
o = e.evaluate(jc);
assertTrue("8 : o incorrect", o.equals(Boolean.TRUE));
fail("this test should have thrown an exception" );
}
public List getCheeseList()
public boolean isSimple()
public int square(int value)
* Asserts that the given expression returns the given value when applied to the
protected void assertExpression(JexlContext jc, String expression, Object expected) throws Exception
jt.testEmpty(); | 0 |
GremlinQueryResult queryResult = evaluate(dslQuery);
return queryResult.toJson();
}
public GremlinQueryResult evaluate(String dslQuery) throws DiscoveryException {
LOG.info("Executing dsl query={}", dslQuery);
return evaluate(expression); | 0 |
Collections.singletonMap("negate", "default false keeps k/v that pass"
+ " accept method, true rejects k/v that pass accept method"), | 0 |
private Tailer tailer;
if (tailer != null) {
tailer.stop();
Thread.sleep(100);
}
tailer = new Tailer(file, listener, delay, false);
tailer=null;
tailer = Tailer.create(file, listener, delay, false);
tailer=null;
tailer = new Tailer(file, listener, delay, false);
tailer=null; | 0 |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Autogenerated by Thrift Compiler (0.9.0)
*
* DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
* @generated
*/
package org.apache.accumulo.proxy.thrift;
import java.util.Map;
import java.util.HashMap;
import org.apache.thrift.TEnum;
@SuppressWarnings("all") public enum PCompactionType implements org.apache.thrift.TEnum {
MINOR(0),
MERGE(1),
MAJOR(2),
FULL(3);
private final int value;
private PCompactionType(int value) {
this.value = value;
}
/**
* Get the integer value of this enum value, as defined in the Thrift IDL.
*/
public int getValue() {
return value;
}
/**
* Find a the enum type by its integer value, as defined in the Thrift IDL.
* @return null if the value is not found.
*/
public static PCompactionType findByValue(int value) {
switch (value) {
case 0:
return MINOR;
case 1:
return MERGE;
case 2:
return MAJOR;
case 3:
return FULL;
default:
return null;
}
}
} | 0 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.dsls.sql.schema;
import org.apache.beam.sdk.Pipeline;
import org.apache.beam.sdk.transforms.PTransform;
import org.apache.beam.sdk.values.PCollection;
import org.apache.beam.sdk.values.PDone;
/**
* This interface defines a Beam Sql Table.
*/
public interface BeamSqlTable {
/**
* In Beam SQL, there's no difference between a batch query and a streaming
* query. {@link BeamIOType} is used to validate the sources.
*/
BeamIOType getSourceType();
/**
* create a {@code PCollection<BeamSqlRow>} from source.
*
*/
PCollection<BeamSqlRow> buildIOReader(Pipeline pipeline);
/**
* create a {@code IO.write()} instance to write to target.
*
*/
PTransform<? super PCollection<BeamSqlRow>, PDone> buildIOWriter();
/**
* Get the schema info of the table.
*/
BeamSqlRecordType getRecordType();
} | 0 |
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
private static final Logger LOG = LoggerFactory.getLogger(MultiTransactionTest.class);
LOG.error("STACKTRACE: ", e);
LOG.info("RESULT==> {}", r);
LOG.info("ERROR RESULT: {} ERR=>{}", er, KeeperException.Code.get(er.getErr())); | 0 |
import com.sun.javadoc.*;
import org.cyberneko.html.parsers.SAXParser;
parseHTML(commentText);
// cm.characters(commentText.toCharArray(), 0, commentText.length());
protected void parseHTML(String text) throws SAXException {
SAXParser parser = new SAXParser();
parser.setProperty(
"http://cyberneko.org/html/properties/names/elems",
"lower"
);
parser.setProperty(
"http://cyberneko.org/html/properties/names/attrs",
"lower"
);
parser.setContentHandler(
new DefaultHandler() {
public void startElement(String namespaceURI, String localName, String qName, Attributes atts) throws SAXException {
if ( validDocElementName( localName ) ) {
cm.startElement(namespaceURI, localName, qName, atts);
}
}
public void endElement(String namespaceURI, String localName, String qName) throws SAXException {
if ( validDocElementName( localName ) ) {
cm.endElement(namespaceURI, localName, qName);
}
}
public void characters(char[] ch, int start, int length) throws SAXException {
cm.characters(ch, start, length);
}
}
);
try {
parser.parse( new InputSource(new StringReader( text )) );
}
catch (IOException e) {
System.err.println( "This should never happen!" + e );
}
}
/**
* @return true if the given name is a valid HTML markup element.
*/
protected boolean validDocElementName(String name) {
return ! name.equalsIgnoreCase( "html" ) && ! name.equalsIgnoreCase( "body" );
}
| 0 |
if(element == null){
return false;
}
return Constants.SignatureSpecNS.equals(element.getNamespaceURI()) && element.getLocalName().equals(localName);
if(element == null){
return false;
}
return EncryptionConstants.EncryptionSpecNS.equals(element.getNamespaceURI()) && element.getLocalName().equals(localName);
if (!namespaceNs.equals(currentAttr.getNamespaceURI()))
if (Constants.SignatureSpecNS.equals(sibling.getNamespaceURI()) && sibling.getLocalName().equals(nodeName)) {
if (EncryptionConstants.EncryptionSpecNS.equals(sibling.getNamespaceURI()) && sibling.getLocalName().equals(nodeName)) {
if (sibling.getNamespaceURI() != null && sibling.getNamespaceURI().equals(uri) && sibling.getLocalName().equals(nodeName)) {
if (sibling.getNamespaceURI() != null && sibling.getNamespaceURI().equals(uri) && sibling.getLocalName().equals(nodeName)) { | 0 |
public static void main(String[] args) {
if (args.length > 0) {
if (args[0].equals("success")) System.exit(0);
if (args[0].equals("throw")) throw new RuntimeException("This is an exception");
System.exit(-1);
} | 1 |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
/**
* Represents a behavior used during upgrade for property
*/
@XmlAccessorType(XmlAccessType.FIELD)
public class PropertyUpgradeBehavior {
@XmlAttribute(name="add", required = true)
private boolean add;
@XmlAttribute(name="delete", required = true)
private boolean delete;
@XmlAttribute(name="change", required = true)
private boolean change;
public PropertyUpgradeBehavior() {}
public PropertyUpgradeBehavior(boolean add, boolean delete, boolean change) {
this.add = add;
this.delete = delete;
this.change = change;
}
public void setAdd( boolean add )
{
this.add = add;
}
public void setDelete( boolean delete )
{
this.delete = delete;
}
public void setChange( boolean change )
{
this.change = change;
}
public boolean isAdd() {
return add;
}
public boolean isDelete() {
return delete;
}
public boolean isChange() {
return change;
}
} | 0 |
import javax.ws.rs.core.Response;
* @response.representation.200.doc Successful.
* @response.representation.200.mediaType application/json
* @response.representation.404.doc Node does not exist
* @response.representation.200.example
*
@DefaultValue("") @QueryParam("alive") String alive) throws Exception {
List<Node> list;
list = Nodes.getInstance().getNodesByState(allocated, alive);
if (list.isEmpty()) {
throw new WebApplicationException(Response.Status.NO_CONTENT);
}
return list;
* @response.representation.200.doc Successful.
* @response.representation.200.mediaType application/json
* @response.representation.404.doc Node does not exist
* @response.representation.200.example
* | 0 |
import org.apache.accumulo.tserver.tablet.CommitSession;
return commitSession.getUseWAL(); | 1 |
* <p>A Sink is written to by getting a SinkWriter and adding values to | 0 |
cocoonMap.put("settings", this.settings); | 0 |
*
* @since 1.6 | 0 |
private static final Schema SIMPLE_SCHEMA =
Schema.builder()
.addStringField("field0")
.addStringField("field1")
.addInt32Field("field2")
.addInt32Field("field3")
.build();
private static final Schema NESTED_SCHEMA1 =
Schema.builder().addStringField("field0").addRowField("field1", SIMPLE_SCHEMA).build();
private static Schema NESTED_SCHEMA2 =
Schema.builder().addStringField("field0").addRowField("field1", NESTED_SCHEMA1).build();
private static Schema NESTED_ARRAY_SCHEMA =
Schema.builder()
.addStringField("field0")
.addArrayField("field1", FieldType.row(SIMPLE_SCHEMA))
.build();
private static Schema NESTED_MAP_SCHEMA =
Schema.builder()
.addStringField("field0")
.addMapField("field1", FieldType.STRING, FieldType.row(SIMPLE_SCHEMA))
.build();
.withNestedField("field1", FieldAccessDescriptor.withAllFields());
FieldAccessDescriptor level2 =
FieldAccessDescriptor.withFieldNames("field1").withNestedField("field1", level1);
FieldAccessDescriptor level3 =
FieldAccessDescriptor.withFieldNames("field1").withNestedField("field1", level2);
FieldAccessDescriptor level2 =
FieldAccessDescriptor.withFieldNames("field1").withNestedField("field1", level1);
FieldAccessDescriptor level2 =
FieldAccessDescriptor.withFieldNames("field1").withNestedField("field1", level1); | 0 |
} catch (final URISyntaxException usx) {
final List<Proxy> proxies = psel.select(targetURI);
final Proxy p = chooseProxy(proxies, target, request, context);
final Proxy p = proxies.get(i); | 0 |
package org.apache.beam.sdk.extensions.sql.impl.rule;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamJoinRel;
import org.apache.beam.sdk.extensions.sql.impl.rel.BeamLogicalConvention; | 0 |
import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.Status;
import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.StatusRuntimeException;
import org.apache.beam.vendor.grpc.v1p21p0.io.grpc.stub.StreamObserver; | 0 |
private static final String INTERNAL_TYPENAME = "__internal";
private boolean isInternalType = false;
if (INTERNAL_TYPENAME.equals(superTypeName)) {
isInternalType = true;
}
public boolean isInternalType() {
return isInternalType;
}
| 1 |
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//digester/src/test/org/apache/commons/digester/Employee.java,v 1.3 2002/01/23 22:38:01 sanders Exp $
* $Revision: 1.3 $
* $Date: 2002/01/23 22:38:01 $
| 0 |
*
*
} else
*
*
*
* | 0 |
Map<String, Class<? extends Predicate>> map = new HashMap<>(); | 0 |
package org.apache.beam.runners.flink.translation.wrappers.streaming.state;
import org.apache.flink.runtime.state.KeyedStateBackend;
* {@link StateInternals} that uses a Flink {@link KeyedStateBackend} to manage state.
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
private Coder<K> keyCoder;
public FlinkStateInternals(KeyedStateBackend<ByteBuffer> flinkStateBackend, Coder<K> keyCoder) {
ByteBuffer keyBytes = flinkStateBackend.getCurrentKey();
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend,
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend,
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend,
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend,
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend,
private final KeyedStateBackend<ByteBuffer> flinkStateBackend;
KeyedStateBackend<ByteBuffer> flinkStateBackend, | 0 |
/*
* Copyright (C) 2015 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.cloud.dataflow.examples.common;
import com.google.cloud.dataflow.sdk.options.DataflowPipelineOptions;
import com.google.cloud.dataflow.sdk.options.Default;
import com.google.cloud.dataflow.sdk.options.Description;
/**
* Options which can be used to configure the Dataflow examples.
*/
public interface DataflowExampleOptions extends DataflowPipelineOptions {
@Description("Whether to keep jobs running on the Dataflow service after local process exit")
@Default.Boolean(false)
boolean getKeepJobsRunning();
void setKeepJobsRunning(boolean keepJobsRunning);
} | 0 |
* Check for emptiness of various types: Number, Collection, Array, Map, String.
* @param object the object to check the emptiness of
* @return the boolean or null if there is no arithmetic solution | 0 |
} catch (Exception e) {
LOG.error("Error loading deferred resources", e); | 0 |
import org.apache.http.protocol.HttpCoreContext;
Assert.assertSame(request, this.context.getAttribute(HttpCoreContext.HTTP_REQUEST));
Assert.assertSame(this.conn, this.context.getAttribute(HttpCoreContext.HTTP_CONNECTION));
Assert.assertSame(response, this.context.getAttribute(HttpCoreContext.HTTP_RESPONSE)); | 0 |
statuses = new HashSet<>(); | 0 |
<T> void invokeUpdatedMethod( DependencyManager<S, T> dependencyManager, RefPair<T> ref, int trackingCount ) | 0 |
* @version $Revision$ $Date$ | 0 |
databaseName.toLowerCase(), clusterName);
HiveDataTypes.HIVE_TABLE.getName(), tableName.toLowerCase(), dbName.toLowerCase(), clusterName);
tableType, tableName.toLowerCase(), tableType, dbType, dbName.toLowerCase(), dbType, clusterName); | 0 |
class CopyOnAccessInMemoryStateInternals<K> implements StateInternals { | 0 |
if (DEBUG) {
} | 0 |
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
private static final Logger LOG = LoggerFactory.getLogger(ParDoMultiEvaluatorFactory.class);
private final LoadingCache<AppliedPTransform<?, ?, BoundMulti<?, ?>>, DoFnLifecycleManager>
fnClones = CacheBuilder.newBuilder()
.build(new CacheLoader<AppliedPTransform<?, ?, BoundMulti<?, ?>>, DoFnLifecycleManager>() {
@Override
public DoFnLifecycleManager load(AppliedPTransform<?, ?, BoundMulti<?, ?>> key)
throws Exception {
return DoFnLifecycleManager.of(key.getTransform().getFn());
}
});
EvaluationContext evaluationContext) throws Exception {
public void cleanup() throws Exception {
for (DoFnLifecycleManager lifecycleManager : fnClones.asMap().values()) {
lifecycleManager.removeAll();
}
EvaluationContext evaluationContext) throws Exception {
DoFnLifecycleManager fnLocal = fnClones.getUnchecked((AppliedPTransform) application);
@SuppressWarnings({"unchecked", "rawtypes"})
(OldDoFn) fnLocal.get(),
return DoFnLifecycleManagerRemovingTransformEvaluator.wrapping(parDoEvaluator, fnLocal);
try {
fnLocal.remove();
} catch (Exception removalException) {
LOG.error("Exception encountered while cleaning up in ParDo evaluator construction",
removalException);
e.addSuppressed(removalException);
} | 0 |
* or more contributor license agreements. See the NOTICE file
* regarding copyright ownership. The ASF licenses this file
* with the License. You may obtain a copy of the License at
* KIND, either express or implied. See the License for the | 0 |
package org.apache.bcel.generic;
import org.apache.bcel.Const; | 1 |
public final class ASTMapLiteral extends JexlNode {
protected boolean isConstant(boolean literal) {
return constant;
/** {@inheritDoc} */
@Override
public void jjtClose() {
constant = true;
if (children != null) {
for (int c = 0; c < children.length && constant; ++c) {
JexlNode child = children[c];
if (child instanceof ASTMapEntry) {
constant = child.isConstant(true);
} else if (!child.isConstant()) {
constant = false;
} | 1 |
* @return
* @param kanonce
* @return | 1 |
public void testEncodedTypeDescriptor() { | 0 |
String path = ZooUtil.getRoot(client.instanceOperations().getInstanceID())
+ Constants.ZTABLES + "/" + tableId + "/merge"; | 0 |
exec("notable", true); | 0 |
padding = buffer[off + 9] & 0xff; | 0 |
* @version $Id: UnixFTPEntryParser.java,v 1.20 2004/11/23 12:52:20 rwinston Exp $
cal.set(Calendar.MILLISECOND, 0); | 0 |
return 0; | 0 |
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
public void writeTo(final OutputStream out, int mode) throws IOException {
InputStream in = new ByteArrayInputStream(this.content);
byte[] tmp = new byte[4096];
int l;
while ((l = in.read(tmp)) != -1) {
out.write(tmp, 0, l);
}
out.flush();
public String getCharset() {
return this.charset.name();
public String getMediaType() {
return "text";
}
public String getSubType() {
return "plain";
}
public Map<?, ?> getContentTypeParameters() {
Map<Object, Object> map = new HashMap<Object, Object>();
map.put("charset", this.charset.name());
return map;
}
| 0 |
AccumuloVFSClassLoader.printClassPath(s -> {
try {
reader.print(s);
} catch (IOException ex) {
throw new RuntimeException(ex);
}, true); | 0 |
* @version CVS $Id: Processor.java,v 1.5 2003/08/16 13:30:04 sylvain Exp $
/**
* Get the root processor parent of this processor.
*
* @sicne 2.1.1
*/
Processor getRootProcessor(); | 0 |
|| (!transforms.stream() | 0 |
* @version $Id$
* @throws java.util.NoSuchElementException if the iteration is finished | 0 |
import java.io.IOException;
public EmbeddedServer(int port, String path) throws IOException {
public static EmbeddedServer newServer(int port, String path, boolean secure) throws IOException {
protected Connector getConnector(int port) throws IOException { | 0 |
/* ====================================================================
* Copyright (c) 2001-2004 The Apache Software Foundation. All rights
* @version $Revision: 1.6 $ $Date: 2004/01/14 21:34:26 $ | 0 |
/**
* Copyright 2013 Apache Software Foundation | 0 |
OutputTimeFn<BoundedWindow> outputTimeFn =
OutputTimeFns.outputAtEarliestInputTimestamp(); | 0 |
return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,?,?,'','job','','','Unknown',?,'','Worksheet')";
return "INSERT INTO ds_savedquery_" + id + " values (?,?,?,?,?,?)";
@Override
protected String getSqlSequenceNoFromAmbariSequence(int id) {
return "select sequence_value from ambari_sequences where sequence_name ='ds_savedquery_"+id+"_id_seq'";
}
@Override
protected String getSqlUpdateSequenceNo(int id) {
return "update ambari_sequences set sequence_value=? where sequence_name='ds_savedquery_"+id+"_id_seq'";
} | 0 |
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at | 1 |
ValidationResourceProvider provider = spy(new ValidationResourceProvider(ambariManagementController)); | 0 |
package com.twitter.mesos.scheduler.zk;
import java.lang.annotation.Retention;
import java.lang.annotation.Target;
import com.google.inject.BindingAnnotation;
import static java.lang.annotation.ElementType.FIELD;
import static java.lang.annotation.ElementType.METHOD;
import static java.lang.annotation.ElementType.PARAMETER;
import static java.lang.annotation.RetentionPolicy.RUNTIME;
/**
* Binding annotation for zookeeper clients and endpoints.
*/
@BindingAnnotation
@Target({FIELD, PARAMETER, METHOD}) @Retention(RUNTIME)
public @interface ZooKeeper {} | 0 |
} catch (URISyntaxException | IOException | IllegalArgumentException e) { | 0 |
import org.osgi.converter.Converter;
import org.osgi.converter.StandardConverter;
import org.osgi.converter.TypeReference; | 0 |
import org.apache.commons.lang.StringUtils;
private static final String AMBARI_SERVER_CONFIGURATIONS_PROPERTY = "ambari-server-properties";
populateAmbariServerInfo(root);
protected void populateAmbariServerInfo(ObjectNode root) throws StackAdvisorException {
Map<String, String> serverProperties = metaInfo.getAmbariServerProperties();
if (serverProperties != null && !serverProperties.isEmpty()) {
JsonNode serverPropertiesNode = mapper.convertValue(serverProperties, JsonNode.class);
root.put(AMBARI_SERVER_CONFIGURATIONS_PROPERTY, serverPropertiesNode);
}
}
| 0 |
import org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableList;
import org.apache.beam.vendor.guava.v20_0.com.google.common.collect.ImmutableSet;
import org.apache.beam.vendor.guava.v20_0.com.google.common.collect.Sets;
import org.apache.beam.vendor.guava.v20_0.com.google.common.graph.MutableNetwork;
import org.apache.beam.vendor.guava.v20_0.com.google.common.graph.NetworkBuilder; | 0 |
import org.apache.beam.sdk.testing.PAssert;
import org.apache.beam.sdk.transforms.Sum;
@Description("Whether to send output to checksum Transform.")
@Default.Boolean(true)
Boolean getOutputToChecksum();
void setOutputToChecksum(Boolean value);
@Description("Expected result of the checksum transform.")
Long getExpectedChecksum();
void setExpectedChecksum(Long value);
public static void runAutocompletePipeline(Options options) throws IOException {
if (options.getOutputToChecksum()) {
PCollection<Long> checksum =
toWrite
.apply(
ParDo.of(
new DoFn<KV<String, List<CompletionCandidate>>, Long>() {
@ProcessElement
public void process(ProcessContext c) {
KV<String, List<CompletionCandidate>> elm = c.element();
Long listHash =
c.element().getValue().stream().mapToLong(cc -> cc.hashCode()).sum();
c.output(Long.valueOf(elm.getKey().hashCode()) + listHash);
}
}))
.apply(Sum.longsGlobally());
PAssert.that(checksum).containsInAnyOrder(options.getExpectedChecksum());
}
public static void main(String[] args) throws IOException {
Options options = PipelineOptionsFactory.fromArgs(args).withValidation().as(Options.class);
runAutocompletePipeline(options);
} | 0 |
return new String[] { recursiveTest }; | 0 |
case ALERT_EXECUTION_COMMAND: {
response.addAlertExecutionCommand((AlertExecutionCommand) ac);
break;
} | 0 |
// FIXME make this configurable through the FlinkExecutor | 0 |
*
* @deprecated (4.4) use {@link org.apache.http.ssl.PrivateKeyStrategy}.
@Deprecated | 0 |
final Configuration conf = new BaseConfiguration();
final ConfigurationLookup lookup = new ConfigurationLookup(conf);
final Configuration conf = new BaseConfiguration();
final ConfigurationLookup lookup = new ConfigurationLookup(conf);
final BaseConfiguration conf = new BaseConfiguration();
final ConfigurationLookup lookup = new ConfigurationLookup(conf);
final Configuration conf = new BaseConfiguration();
final ConfigurationLookup lookup = new ConfigurationLookup(conf);
final Collection<?> col = (Collection<?>) lookup.lookup(VAR);
final Iterator<?> it = col.iterator(); | 0 |
import org.apache.accumulo.core.client.sample.SamplerConfiguration; | 0 |
/** The source of this expression (see {@link UnifiedJEXL.Expression#prepare}). */
* @throws ParseException (only for nested & composite)
* @throws UnifiedJEXL.Exception if an error occurs and the {@link JexlEngine} is not silent
* @throws UnifiedJEXL.Exception if an error occurs and the {@link JexlEngine} is not silent
* @throws ParseException if an error occur during parsing
* @throws ParseException if an error occur during parsing
* @return an exception containing an explicit error message
* @throws ParseException if an error occur during parsing | 0 |
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import org.apache.avalon.excalibur.logger.LoggerLoggerManager;
* @version CVS $Id: SitemapLanguage.java,v 1.6 2003/09/17 01:13:44 joerg Exp $ | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.