code
stringlengths 4
1.01M
| language
stringclasses 2
values |
---|---|
package rule
import (
"fmt"
"github.com/mgechev/revive/lint"
)
// ImportsBlacklistRule lints given else constructs.
type ImportsBlacklistRule struct {
blacklist map[string]bool
}
// Apply applies the rule to given file.
func (r *ImportsBlacklistRule) Apply(file *lint.File, arguments lint.Arguments) []lint.Failure {
var failures []lint.Failure
if file.IsTest() {
return failures // skip, test file
}
if r.blacklist == nil {
r.blacklist = make(map[string]bool, len(arguments))
for _, arg := range arguments {
argStr, ok := arg.(string)
if !ok {
panic(fmt.Sprintf("Invalid argument to the imports-blacklist rule. Expecting a string, got %T", arg))
}
// we add quotes if not present, because when parsed, the value of the AST node, will be quoted
if len(argStr) > 2 && argStr[0] != '"' && argStr[len(argStr)-1] != '"' {
argStr = fmt.Sprintf(`%q`, argStr)
}
r.blacklist[argStr] = true
}
}
for _, is := range file.AST.Imports {
path := is.Path
if path != nil && r.blacklist[path.Value] {
failures = append(failures, lint.Failure{
Confidence: 1,
Failure: "should not use the following blacklisted import: " + path.Value,
Node: is,
Category: "imports",
})
}
}
return failures
}
// Name returns the rule name.
func (r *ImportsBlacklistRule) Name() string {
return "imports-blacklist"
}
| Java |
#!/bin/bash
# Copyright 2014 The Kubernetes Authors All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
## Contains configuration values for interacting with the Vagrant cluster
# Number of minions in the cluster
NUM_MINIONS=${NUM_MINIONS-"1"}
export NUM_MINIONS
# The IP of the master
export MASTER_IP="10.245.1.2"
export KUBE_MASTER_IP="10.245.1.2"
export INSTANCE_PREFIX="kubernetes"
export MASTER_NAME="${INSTANCE_PREFIX}-master"
# Map out the IPs, names and container subnets of each minion
export MINION_IP_BASE="10.245.1."
MINION_CONTAINER_SUBNET_BASE="10.246"
MASTER_CONTAINER_NETMASK="255.255.255.0"
MASTER_CONTAINER_ADDR="${MINION_CONTAINER_SUBNET_BASE}.0.1"
MASTER_CONTAINER_SUBNET="${MINION_CONTAINER_SUBNET_BASE}.0.1/24"
CONTAINER_SUBNET="${MINION_CONTAINER_SUBNET_BASE}.0.0/16"
for ((i=0; i < NUM_MINIONS; i++)) do
MINION_IPS[$i]="${MINION_IP_BASE}$((i+3))"
MINION_NAMES[$i]="${INSTANCE_PREFIX}-minion-$((i+1))"
MINION_CONTAINER_SUBNETS[$i]="${MINION_CONTAINER_SUBNET_BASE}.$((i+1)).1/24"
MINION_CONTAINER_ADDRS[$i]="${MINION_CONTAINER_SUBNET_BASE}.$((i+1)).1"
MINION_CONTAINER_NETMASKS[$i]="255.255.255.0"
VAGRANT_MINION_NAMES[$i]="minion-$((i+1))"
done
PORTAL_NET=10.247.0.0/16
# Since this isn't exposed on the network, default to a simple user/passwd
MASTER_USER=vagrant
MASTER_PASSWD=vagrant
# Admission Controllers to invoke prior to persisting objects in cluster
ADMISSION_CONTROL=NamespaceLifecycle,NamespaceAutoProvision,LimitRanger,SecurityContextDeny,ResourceQuota
# Optional: Install node monitoring.
ENABLE_NODE_MONITORING=true
# Optional: Enable node logging.
ENABLE_NODE_LOGGING=false
LOGGING_DESTINATION=elasticsearch
# Optional: When set to true, Elasticsearch and Kibana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_LOGGING=false
ELASTICSEARCH_LOGGING_REPLICAS=1
# Optional: When set to true, heapster, Influxdb and Grafana will be setup as part of the cluster bring up.
ENABLE_CLUSTER_MONITORING="${KUBE_ENABLE_CLUSTER_MONITORING:-true}"
# Extra options to set on the Docker command line. This is useful for setting
# --insecure-registry for local registries.
DOCKER_OPTS=""
# Optional: Install cluster DNS.
ENABLE_CLUSTER_DNS=true
DNS_SERVER_IP="10.247.0.10"
DNS_DOMAIN="kubernetes.local"
DNS_REPLICAS=1
# Optional: Enable setting flags for kube-apiserver to turn on behavior in active-dev
#RUNTIME_CONFIG=""
RUNTIME_CONFIG="api/v1beta3"
| Java |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef __TESTS_ENVIRONMENT_HPP__
#define __TESTS_ENVIRONMENT_HPP__
#include <gtest/gtest.h>
namespace mesos {
namespace internal {
namespace tests {
// Used to set up our particular test environment.
class Environment : public ::testing::Environment {
public:
virtual void SetUp();
virtual void TearDown();
};
} // namespace tests {
} // namespace internal {
} // namespace mesos {
#endif // __TESTS_ENVIRONMENT_HPP__
| Java |
<?php
/**
* 模板操作
*
* [WeEngine System] Copyright (c) 2013 WE7.CC
*/
defined('IN_IA') or exit('Access Denied');
/**
* 导入全局变量,并直接显示模板页内容。
* @var int
*/
define('TEMPLATE_DISPLAY', 0);
/**
* 导入全局变量,并返回模板页内容的字符串
* @var int
*/
define('TEMPLATE_FETCH', 1);
/**
* 返回模板编译文件的包含路径
* @var int
*/
define('TEMPLATE_INCLUDEPATH', 2);
/**
* 缓存输出信息,@todo 未完成
* @var int
*/
define('TEMPLATE_CACHE', 3);
function template($filename, $flag = TEMPLATE_DISPLAY) {
global $_W;
$source = "{$_W['template']['source']}/{$_W['template']['current']}/{$filename}.html";
// exit($source);
if(!is_file($source)) {
$source = "{$_W['template']['source']}/default/{$filename}.html";
}
if(!is_file($source)) {
exit("Error: template source '{$filename}' is not exist!");
}
$compile = "{$_W['template']['compile']}/{$_W['template']['current']}/{$filename}.tpl.php";
if (DEVELOPMENT || !is_file($compile) || filemtime($source) > filemtime($compile)) {
template_compile($source, $compile);
}
switch ($flag) {
case TEMPLATE_DISPLAY:
default:
extract($GLOBALS, EXTR_SKIP);
include $compile;
break;
case TEMPLATE_FETCH:
extract($GLOBALS, EXTR_SKIP);
ob_start();
ob_clean();
include $compile;
$contents = ob_get_contents();
ob_clean();
return $contents;
break;
case TEMPLATE_INCLUDEPATH:
return $compile;
break;
case TEMPLATE_CACHE:
exit('暂未支持');
break;
}
}
function template_compile($from, $to) {
$path = dirname($to);
if (!is_dir($path))
mkdirs($path);
$content = template_parse(file_get_contents($from));
file_put_contents($to, $content);
}
function template_parse($str) {
$str = preg_replace('/<!--{(.+?)}-->/s', '{$1}', $str);
$str = preg_replace('/{template\s+(.+?)}/', '<?php include template($1, TEMPLATE_INCLUDEPATH);?>', $str);
$str = preg_replace('/{php\s+(.+?)}/', '<?php $1?>', $str);
$str = preg_replace('/{if\s+(.+?)}/', '<?php if($1) { ?>', $str);
$str = preg_replace('/{else}/', '<?php } else { ?>', $str);
$str = preg_replace('/{else ?if\s+(.+?)}/', '<?php } else if($1) { ?>', $str);
$str = preg_replace('/{\/if}/', '<?php } ?>', $str);
$str = preg_replace('/{loop\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2) { ?>', $str);
$str = preg_replace('/{loop\s+(\S+)\s+(\S+)\s+(\S+)}/', '<?php if(is_array($1)) { foreach($1 as $2 => $3) { ?>', $str);
$str = preg_replace('/{\/loop}/', '<?php } } ?>', $str);
$str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff]*)}/', '<?php echo $1;?>', $str);
$str = preg_replace('/{(\$[a-zA-Z_\x7f-\xff][a-zA-Z0-9_\x7f-\xff\[\]\'\"\$]*)}/', '<?php echo $1;?>', $str);
$str = preg_replace('/<\?php([^\?]+)\?>/es', "template_addquote('<?php$1?>')", $str);
$str = preg_replace('/{([A-Z_\x7f-\xff][A-Z0-9_\x7f-\xff]*)}/s', '<?php echo $1;?>', $str);
$str = str_replace('{##', '{', $str);
$str = str_replace('##}', '}', $str);
$str = "<?php defined('IN_IA') or exit('Access Denied');?>" . $str;
return $str;
}
function template_addquote($code) {
$code = preg_replace('/\[([a-zA-Z0-9_\-\.\x7f-\xff]+)\]/s', "['$1']", $code);
return str_replace('\\\"', '\"', $code);
}
| Java |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.openapi.diff.impl.settings;
import com.intellij.icons.AllIcons;
import com.intellij.idea.ActionsBundle;
import com.intellij.openapi.actionSystem.ActionGroup;
import com.intellij.openapi.actionSystem.AnAction;
import com.intellij.openapi.actionSystem.AnActionEvent;
import com.intellij.openapi.actionSystem.ToggleAction;
import com.intellij.openapi.editor.Editor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collection;
/**
* The "gear" action allowing to configure merge tool visual preferences, such as displaying whitespaces, line numbers and soft wraps.
*
* @see DiffMergeSettings
*/
public class DiffMergeSettingsAction extends ActionGroup {
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
public DiffMergeSettingsAction(@NotNull Collection<Editor> editors, @NotNull DiffMergeSettings settings) {
super("Settings", null, AllIcons.General.GearPlain);
setPopup(true);
myEditors = editors;
mySettings = settings;
}
@NotNull
@Override
public AnAction[] getChildren(@Nullable AnActionEvent e) {
return new AnAction[] {
new DiffMergeToggleAction("EditorToggleShowWhitespaces", DiffMergeEditorSetting.WHITESPACES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowLineNumbers", DiffMergeEditorSetting.LINE_NUMBERS, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleShowIndentLines", DiffMergeEditorSetting.INDENT_LINES, myEditors, mySettings),
new DiffMergeToggleAction("EditorToggleUseSoftWraps", DiffMergeEditorSetting.SOFT_WRAPS, myEditors, mySettings)
};
}
private static class DiffMergeToggleAction extends ToggleAction {
@NotNull private final DiffMergeEditorSetting mySetting;
@NotNull private final Collection<Editor> myEditors;
@NotNull private final DiffMergeSettings mySettings;
private DiffMergeToggleAction(@NotNull String actionId, @NotNull DiffMergeEditorSetting setting, @NotNull Collection<Editor> editors,
@NotNull DiffMergeSettings settings) {
super(ActionsBundle.actionText(actionId), ActionsBundle.actionDescription(actionId), null);
mySetting = setting;
myEditors = editors;
mySettings = settings;
}
@Override
public boolean isSelected(@NotNull AnActionEvent e) {
return getPreference(mySetting);
}
@Override
public void setSelected(@NotNull AnActionEvent e, boolean state) {
setPreference(mySetting, state);
for (Editor editor : myEditors) {
mySetting.apply(editor, state);
}
}
private void setPreference(DiffMergeEditorSetting preference, boolean state) {
mySettings.setPreference(preference, state);
}
private boolean getPreference(DiffMergeEditorSetting preference) {
return mySettings.getPreference(preference);
}
}
}
| Java |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2019 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.test.response.cookies;
import java.util.Arrays;
import java.util.Comparator;
import javax.servlet.ServletException;
import io.undertow.servlet.api.ServletInfo;
import io.undertow.servlet.test.util.DeploymentUtils;
import io.undertow.testutils.DefaultServer;
import io.undertow.testutils.HttpClientUtils;
import io.undertow.testutils.TestHttpClient;
import io.undertow.util.StatusCodes;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.client.methods.HttpGet;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
/**
* Test for response.addCookie
*
* @author Flavia Rainone
*/
@RunWith(DefaultServer.class)
public class ResponseCookiesTestCase {
@BeforeClass
public static void setup() throws ServletException {
DeploymentUtils.setupServlet(
new ServletInfo("add-cookies", AddCookiesServlet.class)
.addMapping("/add-cookies"),
new ServletInfo("duplicate-cookies", DuplicateCookiesServlet.class)
.addMapping("/duplicate-cookies"),
new ServletInfo("overwrite-cookies", OverwriteCookiesServlet.class)
.addMapping("/overwrite-cookies"),
new ServletInfo("jsessionid-cookies", JSessionIDCookiesServlet.class)
.addMapping("/jsessionid-cookies"));
}
@Test
public void addCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/add-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(2, setCookieHeaders.length);
assertEquals("test1=test1; path=/test", setCookieHeaders[0].getValue());
assertEquals("test2=test2", setCookieHeaders[1].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void duplicateCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/duplicate-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(7, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertEquals("test1=test1; path=/test1_1", setCookieHeaders[0].getValue());
assertEquals("test1=test1; path=/test1_2", setCookieHeaders[1].getValue());
assertEquals("test2=test2; path=/test2", setCookieHeaders[2].getValue());
assertEquals("test2=test2; path=/test2; domain=www.domain2.com", setCookieHeaders[3].getValue());
assertEquals("test3=test3", setCookieHeaders[4].getValue());
assertEquals("test3=test3; domain=www.domain3-1.com", setCookieHeaders[5].getValue());
assertEquals("test3=test3; domain=www.domain3-2.com", setCookieHeaders[6].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void overwriteCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/overwrite-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(5, setCookieHeaders.length);
Arrays.sort(setCookieHeaders, Comparator.comparing(Object::toString));
assertTrue("Header " + setCookieHeaders[0] + "didn't match expected regex",
setCookieHeaders[0].getValue().matches("JSESSIONID=.*; path=/servletContext"));
assertEquals("test=test10; domain=www.domain.com", setCookieHeaders[1].getValue());
assertEquals("test=test2; path=/test", setCookieHeaders[2].getValue());
assertEquals("test=test5", setCookieHeaders[3].getValue());
assertEquals("test=test8; path=/test; domain=www.domain.com", setCookieHeaders[4].getValue());
} finally {
client.getConnectionManager().shutdown();
}
}
@Test
public void jsessionIdCookies() throws Exception {
final TestHttpClient client = new TestHttpClient();
try {
final HttpGet get = new HttpGet(DefaultServer.getDefaultServerURL() + "/servletContext/jsessionid-cookies");
final HttpResponse result = client.execute(get);
assertEquals(StatusCodes.OK, result.getStatusLine().getStatusCode());
final String response = HttpClientUtils.readResponse(result);
assertEquals("Served at: /servletContext", response);
final Header[] setCookieHeaders = result.getHeaders("Set-Cookie");
assertEquals(3, setCookieHeaders.length);
assertTrue("Header " + setCookieHeaders[0] + "didn't start with expected prefix",
setCookieHeaders[0].getValue().startsWith("JSESSIONID=_bug_fix; path=/path3; Max-Age=500; Expires="));
assertTrue("Header " + setCookieHeaders[1] + "didn't start with expected prefix",
setCookieHeaders[1].getValue().startsWith("JSESSIONID=_bug_fix; path=/path4; Max-Age=1000; Expires="));
assertTrue("Header " + setCookieHeaders[2] + "didn't match expected regex",
setCookieHeaders[2].getValue().matches("JSESSIONID=.*; path=/servletContext"));
} finally {
client.getConnectionManager().shutdown();
}
}
}
| Java |
// Licensed to the Software Freedom Conservancy (SFC) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The SFC licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.openqa.selenium.grid.node.local;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import org.junit.Test;
import org.openqa.selenium.Capabilities;
import org.openqa.selenium.ImmutableCapabilities;
import org.openqa.selenium.events.local.GuavaEventBus;
import org.openqa.selenium.grid.data.CreateSessionRequest;
import org.openqa.selenium.grid.data.CreateSessionResponse;
import org.openqa.selenium.grid.data.Session;
import org.openqa.selenium.grid.node.Node;
import org.openqa.selenium.grid.testing.TestSessionFactory;
import org.openqa.selenium.json.Json;
import org.openqa.selenium.remote.ErrorCodes;
import org.openqa.selenium.remote.http.HttpRequest;
import org.openqa.selenium.remote.tracing.DefaultTestTracer;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Map;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.openqa.selenium.json.Json.MAP_TYPE;
import static org.openqa.selenium.remote.Dialect.OSS;
import static org.openqa.selenium.remote.Dialect.W3C;
import static org.openqa.selenium.remote.http.Contents.utf8String;
import static org.openqa.selenium.remote.http.HttpMethod.POST;
public class CreateSessionTest {
private final Json json = new Json();
private final Capabilities stereotype = new ImmutableCapabilities("cheese", "brie");
@Test
public void shouldAcceptAW3CPayload() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void shouldOnlyAcceptAJWPPayloadIfConfiguredTo() {
// TODO: implement shouldOnlyAcceptAJWPPayloadIfConfiguredTo test
}
@Test
public void ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured() {
// TODO: implement ifOnlyW3CPayloadSentAndRemoteEndIsJWPOnlyFailSessionCreationIfJWPNotConfigured test
}
@Test
public void ifOnlyJWPPayloadSentResponseShouldBeJWPOnlyIfJWPConfigured()
throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of("cheese", "brie")));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(OSS),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// The status field is used by local ends to determine whether or not the session is a JWP one.
assertThat(all.get("status")).matches(obj -> ((Number) obj).intValue() == ErrorCodes.SUCCESS);
// The session id is a top level field
assertThat(all.get("sessionId")).isInstanceOf(String.class);
// And the value should contain the capabilities.
assertThat(all.get("value")).isInstanceOf(Map.class);
}
@Test
public void shouldPreferUsingTheW3CProtocol() throws URISyntaxException {
String payload = json.toJson(ImmutableMap.of(
"desiredCapabilities", ImmutableMap.of(
"cheese", "brie"),
"capabilities", ImmutableMap.of(
"alwaysMatch", ImmutableMap.of("cheese", "brie"))));
HttpRequest request = new HttpRequest(POST, "/session");
request.setContent(utf8String(payload));
URI uri = new URI("http://example.com");
Node node = LocalNode.builder(
DefaultTestTracer.createTracer(),
new GuavaEventBus(),
uri,
uri,
null)
.add(stereotype, new TestSessionFactory((id, caps) -> new Session(id, uri, caps)))
.build();
CreateSessionResponse sessionResponse = node.newSession(
new CreateSessionRequest(
ImmutableSet.of(W3C),
stereotype,
ImmutableMap.of()))
.orElseThrow(() -> new AssertionError("Unable to create session"));
Map<String, Object> all = json.toType(
new String(sessionResponse.getDownstreamEncodedResponse(), UTF_8),
MAP_TYPE);
// Ensure that there's no status field (as this is used by the protocol handshake to determine
// whether the session is using the JWP or the W3C dialect.
assertThat(all.containsKey("status")).isFalse();
// Now check the fields required by the spec
Map<?, ?> value = (Map<?, ?>) all.get("value");
assertThat(value.get("sessionId")).isInstanceOf(String.class);
assertThat(value.get("capabilities")).isInstanceOf(Map.class);
}
@Test
public void sessionDataShouldBeCorrectRegardlessOfPayloadProtocol() {
// TODO: implement sessionDataShouldBeCorrectRegardlessOfPayloadProtocol test
}
@Test
public void shouldSupportProtocolConversion() {
// TODO: implement shouldSupportProtocolConversion test
}
}
| Java |
package frc.team5333.lib;
import java.util.HashMap;
/**
* A static class that contains all kinds of Launch data for the robot,
* such as network ports, current state and more
*
* @author Jaci
*/
public class RobotData {
/**
* A blackboard containing objects that are common throughout the
* program, along with their String Identifier
*/
public static HashMap<String, Object> blackboard = new HashMap<String, Object>();
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openshift.build_configs;
import java.util.Map;
import io.fabric8.kubernetes.client.Watch;
import io.fabric8.kubernetes.client.dsl.FilterWatchListMultiDeletable;
import io.fabric8.kubernetes.client.dsl.NonNamespaceOperation;
import io.fabric8.openshift.api.model.Build;
import io.fabric8.openshift.api.model.BuildConfig;
import io.fabric8.openshift.api.model.BuildConfigList;
import io.fabric8.openshift.api.model.DoneableBuildConfig;
import io.fabric8.openshift.client.OpenShiftClient;
import io.fabric8.openshift.client.dsl.BuildConfigResource;
import org.apache.camel.Exchange;
import org.apache.camel.component.kubernetes.AbstractKubernetesEndpoint;
import org.apache.camel.component.kubernetes.KubernetesConstants;
import org.apache.camel.component.kubernetes.KubernetesOperations;
import org.apache.camel.support.DefaultProducer;
import org.apache.camel.support.MessageHelper;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class OpenshiftBuildConfigsProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(OpenshiftBuildConfigsProducer.class);
public OpenshiftBuildConfigsProducer(AbstractKubernetesEndpoint endpoint) {
super(endpoint);
}
@Override
public AbstractKubernetesEndpoint getEndpoint() {
return (AbstractKubernetesEndpoint) super.getEndpoint();
}
@Override
public void process(Exchange exchange) throws Exception {
String operation;
if (ObjectHelper.isEmpty(getEndpoint().getKubernetesConfiguration().getOperation())) {
operation = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_OPERATION, String.class);
} else {
operation = getEndpoint().getKubernetesConfiguration().getOperation();
}
switch (operation) {
case KubernetesOperations.LIST_BUILD_CONFIGS:
doList(exchange, operation);
break;
case KubernetesOperations.LIST_BUILD_CONFIGS_BY_LABELS_OPERATION:
doListBuildConfigsByLabels(exchange, operation);
break;
case KubernetesOperations.GET_BUILD_CONFIG_OPERATION:
doGetBuildConfig(exchange, operation);
break;
default:
throw new IllegalArgumentException("Unsupported operation " + operation);
}
}
protected void doList(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace().list();
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doListBuildConfigsByLabels(Exchange exchange, String operation) throws Exception {
BuildConfigList buildConfigsList = null;
Map<String, String> labels = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIGS_LABELS, Map.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (!ObjectHelper.isEmpty(namespaceName)) {
NonNamespaceOperation<BuildConfig, BuildConfigList, DoneableBuildConfig, BuildConfigResource<BuildConfig, DoneableBuildConfig, Void, Build>> buildConfigs;
buildConfigs = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs()
.inNamespace(namespaceName);
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
} else {
FilterWatchListMultiDeletable<BuildConfig, BuildConfigList, Boolean, Watch> buildConfigs
= getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inAnyNamespace();
for (Map.Entry<String, String> entry : labels.entrySet()) {
buildConfigs.withLabel(entry.getKey(), entry.getValue());
}
buildConfigsList = buildConfigs.list();
}
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfigsList.getItems());
}
protected void doGetBuildConfig(Exchange exchange, String operation) throws Exception {
BuildConfig buildConfig = null;
String buildConfigName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_BUILD_CONFIG_NAME, String.class);
String namespaceName = exchange.getIn().getHeader(KubernetesConstants.KUBERNETES_NAMESPACE_NAME, String.class);
if (ObjectHelper.isEmpty(buildConfigName)) {
LOG.error("Get a specific Build Config require specify a Build Config name");
throw new IllegalArgumentException("Get a specific Build Config require specify a Build Config name");
}
if (ObjectHelper.isEmpty(namespaceName)) {
LOG.error("Get a specific Build Config require specify a namespace name");
throw new IllegalArgumentException("Get a specific Build Config require specify a namespace name");
}
buildConfig = getEndpoint().getKubernetesClient().adapt(OpenShiftClient.class).buildConfigs().inNamespace(namespaceName)
.withName(buildConfigName).get();
MessageHelper.copyHeaders(exchange.getIn(), exchange.getOut(), true);
exchange.getOut().setBody(buildConfig);
}
}
| Java |
function ChangeTo(to) {
if (to == "text") {
$('#admincommentlinks').ghide();
$('#admincomment').gshow();
resize('admincomment');
var buttons = document.getElementsByName('admincommentbutton');
for (var i = 0; i < buttons.length; i++) {
buttons[i].setAttribute('onclick',"ChangeTo('links'); return false;");
}
} else if (to == "links") {
ajax.post("ajax.php?action=preview","form", function(response) {
$('#admincommentlinks').raw().innerHTML = response;
$('#admincomment').ghide();
$('#admincommentlinks').gshow();
var buttons = document.getElementsByName('admincommentbutton');
for (var i = 0; i < buttons.length; i++) {
buttons[i].setAttribute('onclick',"ChangeTo('text'); return false;");
}
})
}
}
function UncheckIfDisabled(checkbox) {
if (checkbox.disabled) {
checkbox.checked = false;
}
}
function AlterParanoia() {
// Required Ratio is almost deducible from downloaded, the count of seeding and the count of snatched
// we will "warn" the user by automatically checking the required ratio box when they are
// revealing that information elsewhere
if (!$('input[name=p_ratio]').raw()) {
return;
}
var showDownload = $('input[name=p_downloaded]').raw().checked || ($('input[name=p_uploaded]').raw().checked && $('input[name=p_ratio]').raw().checked);
if (($('input[name=p_seeding_c]').raw().checked) && ($('input[name=p_snatched_c]').raw().checked) && showDownload) {
$('input[type=checkbox][name=p_requiredratio]').raw().checked = true;
$('input[type=checkbox][name=p_requiredratio]').raw().disabled = true;
} else {
$('input[type=checkbox][name=p_requiredratio]').raw().disabled = false;
}
$('input[name=p_torrentcomments_l]').raw().disabled = !$('input[name=p_torrentcomments_c]').raw().checked;
$('input[name=p_collagecontribs_l]').raw().disabled = !$('input[name=p_collagecontribs_c]').raw().checked;
$('input[name=p_requestsfilled_list]').raw().disabled = !($('input[name=p_requestsfilled_count]').raw().checked && $('input[name=p_requestsfilled_bounty]').raw().checked);
$('input[name=p_requestsvoted_list]').raw().disabled = !($('input[name=p_requestsvoted_count]').raw().checked && $('input[name=p_requestsvoted_bounty]').raw().checked);
$('input[name=p_uploads_l]').raw().disabled = !$('input[name=p_uploads_c]').raw().checked;
$('input[name=p_uniquegroups_l]').raw().disabled = !$('input[name=p_uniquegroups_c]').raw().checked;
$('input[name=p_perfectflacs_l]').raw().disabled = !$('input[name=p_perfectflacs_c]').raw().checked;
$('input[name=p_seeding_l]').raw().disabled = !$('input[name=p_seeding_c]').raw().checked;
$('input[name=p_leeching_l]').raw().disabled = !$('input[name=p_leeching_c]').raw().checked;
$('input[name=p_snatched_l]').raw().disabled = !$('input[name=p_snatched_c]').raw().checked;
UncheckIfDisabled($('input[name=p_torrentcomments_l]').raw());
UncheckIfDisabled($('input[name=p_collagecontribs_l]').raw());
UncheckIfDisabled($('input[name=p_requestsfilled_list]').raw());
UncheckIfDisabled($('input[name=p_requestsvoted_list]').raw());
UncheckIfDisabled($('input[name=p_uploads_l]').raw());
UncheckIfDisabled($('input[name=p_uniquegroups_l]').raw());
UncheckIfDisabled($('input[name=p_perfectflacs_l]').raw());
UncheckIfDisabled($('input[name=p_seeding_l]').raw());
UncheckIfDisabled($('input[name=p_leeching_l]').raw());
UncheckIfDisabled($('input[name=p_snatched_l]').raw());
// unique groups, "Perfect" FLACs and artists added are deducible from the list of uploads
if ($('input[name=p_uploads_l]').raw().checked) {
$('input[name=p_uniquegroups_c]').raw().checked = true;
$('input[name=p_uniquegroups_l]').raw().checked = true;
$('input[name=p_uniquegroups_c]').raw().disabled = true;
$('input[name=p_uniquegroups_l]').raw().disabled = true;
$('input[name=p_perfectflacs_c]').raw().checked = true;
$('input[name=p_perfectflacs_l]').raw().checked = true;
$('input[name=p_perfectflacs_c]').raw().disabled = true;
$('input[name=p_perfectflacs_l]').raw().disabled = true;
$('input[type=checkbox][name=p_artistsadded]').raw().checked = true;
$('input[type=checkbox][name=p_artistsadded]').raw().disabled = true;
} else {
$('input[name=p_uniquegroups_c]').raw().disabled = false;
$('input[name=p_uniquegroups_l]').raw().checked = false;
$('input[name=p_uniquegroups_l]').raw().disabled = true;
$('input[name=p_perfectflacs_c]').raw().disabled = false;
$('input[type=checkbox][name=p_artistsadded]').raw().disabled = false;
}
if ($('input[name=p_collagecontribs_l]').raw().checked) {
$('input[name=p_collages_c]').raw().disabled = true;
$('input[name=p_collages_l]').raw().disabled = true;
$('input[name=p_collages_c]').raw().checked = true;
$('input[name=p_collages_l]').raw().checked = true;
} else {
$('input[name=p_collages_c]').raw().disabled = false;
$('input[name=p_collages_l]').raw().disabled = !$('input[name=p_collages_c]').raw().checked;
UncheckIfDisabled($('input[name=p_collages_l]').raw());
}
}
function ParanoiaReset(checkbox, drops) {
var selects = $('select');
for (var i = 0; i < selects.results(); i++) {
if (selects.raw(i).name.match(/^p_/)) {
if (drops == 0) {
selects.raw(i).selectedIndex = 0;
} else if (drops == 1) {
selects.raw(i).selectedIndex = selects.raw(i).options.length - 2;
} else if (drops == 2) {
selects.raw(i).selectedIndex = selects.raw(i).options.length - 1;
}
AlterParanoia();
}
}
var checkboxes = $(':checkbox');
for (var i = 0; i < checkboxes.results(); i++) {
if (checkboxes.raw(i).name.match(/^p_/) && (checkboxes.raw(i).name != 'p_lastseen')) {
if (checkbox == 3) {
checkboxes.raw(i).checked = !(checkboxes.raw(i).name.match(/_list$/) || checkboxes.raw(i).name.match(/_l$/));
} else {
checkboxes.raw(i).checked = checkbox;
}
AlterParanoia();
}
}
}
function ParanoiaResetOff() {
ParanoiaReset(true, 0);
}
function ParanoiaResetStats() {
ParanoiaReset(3, 0);
$('input[name=p_collages_l]').raw().checked = false;
}
function ParanoiaResetOn() {
ParanoiaReset(false, 0);
$('input[name=p_collages_c]').raw().checked = false;
$('input[name=p_collages_l]').raw().checked = false;
}
addDOMLoadEvent(AlterParanoia);
function ToggleWarningAdjust(selector) {
if (selector.options[selector.selectedIndex].value == '---') {
$('#ReduceWarningTR').gshow();
$('#ReduceWarning').raw().disabled = false;
} else {
$('#ReduceWarningTR').ghide();
$('#ReduceWarning').raw().disabled = true;
}
}
addDOMLoadEvent(ToggleIdenticons);
function ToggleIdenticons() {
var disableAvatars = $('#disableavatars');
if (disableAvatars.size()) {
var selected = disableAvatars[0].selectedIndex;
if (selected == 2 || selected == 3) {
$('#identicons').gshow();
} else {
$('#identicons').ghide();
}
}
}
function userform_submit() {
if ($('#resetpasskey').is(':checked')) {
if (!confirm('Are you sure you want to reset your passkey?')) {
return false;
}
}
return formVal();
}
function togglePassKey(key) {
if ($('#passkey').raw().innerHTML == 'View') {
$('#passkey').raw().innerHTML = key;
} else {
$('#passkey').raw().innerHTML = 'View';
}
}
function commStats(userid) {
$('.user_commstats').html('Loading...');
ajax.get('ajax.php?action=community_stats&userid=' + userid, function(JSONresponse) {
var response = JSON.parse(JSONresponse) || false;
if (!response || response.status == 'failure') {
$('.user_commstats').html('An error occurred');
return;
}
displayCommStats(response.response);
});
}
function displayCommStats(stats) {
var baseid = '#user_commstats_';
for (x in stats) {
if (stats[x] === false) {
continue;
}
switch (x) {
case 'leeching':
$(baseid + x).html(stats[x]);
break;
case 'seeding':
$(baseid + x).html(stats[x]);
break;
case 'downloaded':
$(baseid + x).html(stats[x]);
break;
case 'snatched':
$(baseid + x).html(stats[x]);
break;
case 'usnatched':
$(baseid + x).html('(' + stats[x] + ')');
break;
case 'udownloaded':
$(baseid + x).html('(' + stats[x] + ')');
break;
case 'seedingperc':
$(baseid + x).html('(' + stats[x] + '%)');
break;
}
}
}
$(document).ready(function() {
$("#random_password").click(function() {
var length = 15,
charset = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_+-=<>?",
password = "";
for (var i = 0, n = charset.length; i < length; ++i) {
password += charset.charAt(Math.floor(Math.random() * n));
}
$('#change_password').val(password);
});
});
| Java |
#!/bin/bash
rm -r /var/zootmp/*
echo "1" > /var/zootmp/myid
rm -r /var/lib/cassandra/*
rm ./AcaZoo-CompactionStats.dat
ant build
| Java |
#!/usr/bin/env perl
# $Source: /cvsroot/ensembl/ensembl-personal/genebuilders/ccds/scripts/store_ccds_xrefs.pl,v $
# $Revision: 1.13 $
=pod
=head1 NAME
store_ccds_xrefs.pl
=head1 SYNOPSIS
Make CCDS Xrefs.
=head1 DESCRIPTION
Will store the Ensembl transcript stable_id that matches the ccds structure.
Originally written for Ian Longden. Based on make_enst_to_ccds.pl
=head1 ARGUMENTS
perl store_ccds_xrefs.pl
-ccds_dbname
-ccds_host
-ccds_port
-ccds_user
-ccds_pass
-dbname
-host
-port
-user
-verbose
-species
-path
-write
-delete_old
=head1 EXAMPLE
perl $ENSEMBL_PERSONAL/genebuilders/ccds/scripts/store_ccds_xrefs.pl -ccds_dbname db8_human_vega_61 \
-ccds_host genebuild7 -ccds_port 3306 -ccds_user user -ccds_pass password \
-dbname homo_sapiens_core_61_37f -host ens-staging1 -port 3306 -user ensro -verbose \
-species human -path GRCh37 -write -delete_old
=cut
use warnings;
use strict;
use Getopt::Long;
use Bio::EnsEMBL::DBSQL::DBAdaptor;
use Bio::EnsEMBL::Utils::Exception qw(warning throw);
# db of CCDS strcutures
my $ccds_host = '';
my $ccds_port = '3306';
my $ccds_user = 'ensro';
my $ccds_pass = undef;
my $ccds_dbname = '';
# db of Ensembl (protein_coding) genes
my $host = 'ens-staging';
my $port = '';
my $user = 'ensro';
my $dbname = '';
my $path = 'GRCh37';
my $species = 'human';
my $verbose;
my $write;
my $delete_old;
&GetOptions( 'ccds_host=s' => \$ccds_host,
'ccds_port=s' => \$ccds_port,
'ccds_user=s' => \$ccds_user,
'ccds_pass=s' => \$ccds_pass,
'ccds_dbname=s' => \$ccds_dbname,
'host=s' => \$host,
'port=s' => \$port,
'user=s' => \$user,
'dbname=s' => \$dbname,
'path=s' => \$path,
'species=s' => \$species,
'verbose' => \$verbose,
'delete_old' => \$delete_old,
'write' => \$write, );
if ( !defined $species ) {
throw("Please define species as human or mouse");
} else {
$species =~ s/\s//g;
if ( $species =~ /^human$/i || $species =~ /^mouse$/i ) {
# we're ok
print "Species is *$species*\n";
} else {
throw("Species must be defined as human or mouse");
}
}
# we want to keep a record of any polymorphic pseudogenes for havana
# let's not write a file until the end though since they are not
# common
my @polymorphic_pseudogene;
# connect to dbs
my $db =
new Bio::EnsEMBL::DBSQL::DBAdaptor( -host => $host,
-user => $user,
-port => $port,
-dbname => $dbname );
my $ccds_db =
new Bio::EnsEMBL::DBSQL::DBAdaptor( -host => $ccds_host,
-user => $ccds_user,
-pass => $ccds_pass,
-port => $ccds_port,
-dbname => $ccds_dbname );
$ccds_db->dnadb($db);
my $ccds_sa = $ccds_db->get_SliceAdaptor;
my $outdea = $ccds_db->get_DBEntryAdaptor;
my $sa = $db->get_SliceAdaptor;
###
# delete old ones if delete_old set
###
if($write and $delete_old){
my $sth = $outdea->prepare('delete ox from xref x, object_xref ox, external_db e where x.xref_id = ox.xref_id and x.external_db_id = e.external_db_id and e.db_name like "Ens_%"');
$sth->execute || die "Could not delete old object_xrefs";
$sth = $outdea->prepare('delete x from xref x, external_db e where x.external_db_id = e.external_db_id and e.db_name like "Ens_%"');
$sth->execute || die "Could not delete ols xrefs";
}
# # #
# Loop thru toplevels
# # #
# maybe should use toplevel instead of chromosome?
foreach my $chr ( @{ $ccds_sa->fetch_all('chromosome') } ) {
print "Doing chromosome " . $chr->name . "\n" if ($verbose);
# fetch all CCDS structures on slice
foreach my $ccds_gene ( @{ $chr->get_all_Genes( undef, undef, 1 ) } ) {
# make sure genes are al on chr level
$ccds_gene = $ccds_gene->transform( 'chromosome', $path );
# loop thru all CCDS transcripts
foreach my $ccds_trans ( @{ $ccds_gene->get_all_Transcripts() } ) {
print "=> doing ccds trans "
. $ccds_trans->dbID
. ": start "
. $ccds_trans->start
. " stop "
. $ccds_trans->end
. " strand "
. $ccds_trans->strand . " \n"
if ($verbose);
# find the ccds_id
my $ccds_id;
my @db_entries = @{ $ccds_trans->get_all_DBEntries('CCDS') };
my %xref_hash;
foreach my $dbe (@db_entries) {
print "dbe " . $dbe->display_id . " " . $dbe->dbname . "\n";
}
# store unique CCDS xrefs for the transcript
foreach my $entry (@db_entries) {
$xref_hash{ $entry->display_id() } = 1;
}
# we should not have more than one CCDS id
# associated with a transcript
if ( scalar keys %xref_hash != 1 ) {
foreach my $entry ( keys %xref_hash ) {
print " Dodgy xref : " . $entry . "\n";
}
throw( "Something odd going on: Transcript dbID "
. $ccds_trans->dbID . " has "
. scalar( keys %xref_hash )
. " xrefs" );
} else {
# all is good; CCDS transcript only has 1 CCDS xref
foreach my $entry ( keys %xref_hash ) {
$ccds_id = $entry;
print "=> on ccds $ccds_id\n" if ($verbose);
}
}
# define the genomic location that we're working in
# ie. where the CCDS transcript is
my $chr_name = $ccds_trans->slice->seq_region_name;
my $start = $ccds_trans->start();
my $end = $ccds_trans->end();
# now fetch the slice out of ensembl db
my $slice =
$sa->fetch_by_region( 'chromosome', $chr_name, $start, $end, '1',
$path );
print " Ensembl slice name " . $slice->name . "\n" if ($verbose);
# get ccds coding exons
my @ccds_exons = @{ $ccds_trans->get_all_translateable_Exons() };
print " have " . @ccds_exons . " ccds coding exons\n" if ($verbose);
# get all Ensembl genes overlapping the CCDS regions
foreach my $gene ( @{ $slice->get_all_Genes( undef, undef, 1 ) } ) {
# only look at protein_coding genes
next unless ( $gene->biotype =~ /protein_coding/ || $gene->biotype =~ /polymorphic_pseudogene/);
# debug
# next if $gene->biotype =~ /protein_coding/ ;
# keep a record if it is a polymorphic pseudogene - these will need to be sent to havana
if ($gene->biotype =~ /polymorphic_pseudogene/) {
print STDERR " found a poly pseudo gene\n" if ($verbose);
push @polymorphic_pseudogene, $ccds_id;
}
# make sure ensembl gene also on chr level
print " on ensembl gene " . $gene->display_id . "\n" if ($verbose);
$gene = $gene->transform( 'chromosome', $path );
# loop thru ensembl transcripts
foreach my $trans ( @{ $gene->get_all_Transcripts } ) {
print " on ensembl trans " . $trans->display_id . "\n"
if ($verbose);
# get ensembl coding exons
my @exons = @{ $trans->get_all_translateable_Exons() };
print " have " . @exons . " ensembl coding exons\n" if ($verbose);
# loop thru ensembl coding exons and make sure they all match the ccds
# exons exactly
my $match = 0;
if ( scalar @exons == scalar @ccds_exons ) {
for ( my $i = 0 ; $i < scalar(@exons) ; $i++ ) {
# print " Ensembl start ".$exons[$i]->start." end ".$exons[$i]->end.
# " CCDS start ".$ccds_exons[$i]->start." end ".$ccds_exons[$i]->end."\n";
if ( $ccds_exons[$i]->start == $exons[$i]->start
&& $ccds_exons[$i]->end == $exons[$i]->end
&& $ccds_exons[$i]->strand == $exons[$i]->strand )
{
$match++;
} #else {
# print "no match ".$ccds_exons[$i]->start." != ".$exons[$i]->start." or ".
# $ccds_exons[$i]->end." != ".$exons[$i]->end."\n";
#}
}
if ( $match == scalar @exons ) {
print "MATCH\t" . $trans->stable_id . "\t" . $ccds_id . "\n"
if ($verbose);
store_ensembl_xref( $outdea, $species, $ccds_trans,
$trans->stable_id, $write );
store_ensembl_xref( $outdea,
$species,
$ccds_trans->translation,
$trans->translation->stable_id,
$write );
} else {
print " no match ($match)\t"
. $trans->stable_id . "\t"
. $ccds_id . "\n"
if ($verbose);
}
} ## end if ( scalar @exons == ...)
} ## end foreach my $trans ( @{ $gene...})
} ## end foreach my $gene ( @{ $slice...})
} ## end foreach my $ccds_trans ( @{...})
} ## end foreach my $ccds_gene ( @{ ...})
} ## end foreach my $chr ( @{ $ccds_sa...})
# report polymorphic pseudogenes
if (@polymorphic_pseudogene) {
for my $display_id (@polymorphic_pseudogene) {
print STDERR $display_id." matches a polymorphic pseudogene\n";
}
} else {
print STDERR "Found 0 polymorphic pseudogenes\n";
}
sub store_ensembl_xref {
my ( $dbea, $species, $ccds_trans, $ensembl_trans_stable_id, $write ) = @_;
if ( ref($ccds_trans) eq "Bio::EnsEMBL::Transcript" ) {
my $external_db;
if ( $species =~ /^human$/i ) {
$external_db = 'Ens_Hs_transcript';
} elsif ( $species =~ /^mouse$/i ) {
$external_db = 'Ens_Mm_transcript';
}
# make an xref
my $entry =
new Bio::EnsEMBL::DBEntry( -adaptor => $dbea,
-primary_id => $ensembl_trans_stable_id,
-display_id => $ensembl_trans_stable_id,
-version => 0,
-dbname => $external_db);
# store xref
$dbea->store( $entry, $ccds_trans->dbID, 'Transcript' ) if ($write);
} elsif ( ref($ccds_trans) eq "Bio::EnsEMBL::Translation" ) {
my $external_db;
if ( $species =~ /^human$/i ) {
$external_db = 'Ens_Hs_translation';
} elsif ( $species =~ /^mouse$/i ) {
$external_db = 'Ens_Mm_translation';
}
# make an xref
my $entry =
new Bio::EnsEMBL::DBEntry( -adaptor => $dbea,
-primary_id => $ensembl_trans_stable_id,
-display_id => $ensembl_trans_stable_id,
-version => 0,
-dbname => $external_db);
# store xref
$dbea->store( $entry, $ccds_trans->dbID, 'Translation' ) if ($write);
} else {
throw("Not a Transcript or Translation ");
}
return;
} ## end sub store_ensembl_xref
| Java |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Reflection.Metadata;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeGen;
using Microsoft.CodeAnalysis.Collections;
using Microsoft.CodeAnalysis.CSharp.Emit;
using Microsoft.CodeAnalysis.CSharp.Symbols;
using Microsoft.CodeAnalysis.CSharp.Syntax;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Emit;
using Microsoft.CodeAnalysis.Symbols;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.CSharp
{
/// <summary>
/// The compilation object is an immutable representation of a single invocation of the
/// compiler. Although immutable, a compilation is also on-demand, and will realize and cache
/// data as necessary. A compilation can produce a new compilation from existing compilation
/// with the application of small deltas. In many cases, it is more efficient than creating a
/// new compilation from scratch, as the new compilation can reuse information from the old
/// compilation.
/// </summary>
public sealed partial class CSharpCompilation : Compilation
{
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
//
// Changes to the public interface of this class should remain synchronized with the VB
// version. Do not make any changes to the public interface without making the corresponding
// change to the VB version.
//
// !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
internal static readonly ParallelOptions DefaultParallelOptions = new ParallelOptions();
private readonly CSharpCompilationOptions _options;
private readonly Lazy<Imports> _globalImports;
private readonly Lazy<AliasSymbol> _globalNamespaceAlias; // alias symbol used to resolve "global::".
private readonly Lazy<ImplicitNamedTypeSymbol> _scriptClass;
private readonly CSharpCompilation _previousSubmission;
// All imports (using directives and extern aliases) in syntax trees in this compilation.
// NOTE: We need to de-dup since the Imports objects that populate the list may be GC'd
// and re-created.
private ConcurrentSet<ImportInfo> _lazyImportInfos;
// Cache the CLS diagnostics for the whole compilation so they aren't computed repeatedly.
// NOTE: Presently, we do not cache the per-tree diagnostics.
private ImmutableArray<Diagnostic> _lazyClsComplianceDiagnostics;
private Conversions _conversions;
internal Conversions Conversions
{
get
{
if (_conversions == null)
{
Interlocked.CompareExchange(ref _conversions, new BuckStopsHereBinder(this).Conversions, null);
}
return _conversions;
}
}
/// <summary>
/// Manages anonymous types declared in this compilation. Unifies types that are structurally equivalent.
/// </summary>
private readonly AnonymousTypeManager _anonymousTypeManager;
private NamespaceSymbol _lazyGlobalNamespace;
internal readonly BuiltInOperators builtInOperators;
/// <summary>
/// The <see cref="SourceAssemblySymbol"/> for this compilation. Do not access directly, use Assembly property
/// instead. This field is lazily initialized by ReferenceManager, ReferenceManager.CacheLockObject must be locked
/// while ReferenceManager "calculates" the value and assigns it, several threads must not perform duplicate
/// "calculation" simultaneously.
/// </summary>
private SourceAssemblySymbol _lazyAssemblySymbol;
/// <summary>
/// Holds onto data related to reference binding.
/// The manager is shared among multiple compilations that we expect to have the same result of reference binding.
/// In most cases this can be determined without performing the binding. If the compilation however contains a circular
/// metadata reference (a metadata reference that refers back to the compilation) we need to avoid sharing of the binding results.
/// We do so by creating a new reference manager for such compilation.
/// </summary>
private ReferenceManager _referenceManager;
private readonly SyntaxAndDeclarationManager _syntaxAndDeclarations;
/// <summary>
/// Contains the main method of this assembly, if there is one.
/// </summary>
private EntryPoint _lazyEntryPoint;
/// <summary>
/// The set of trees for which a <see cref="CompilationUnitCompletedEvent"/> has been added to the queue.
/// </summary>
private HashSet<SyntaxTree> _lazyCompilationUnitCompletedTrees;
public override string Language
{
get
{
return LanguageNames.CSharp;
}
}
public override bool IsCaseSensitive
{
get
{
return true;
}
}
/// <summary>
/// The options the compilation was created with.
/// </summary>
public new CSharpCompilationOptions Options
{
get
{
return _options;
}
}
internal AnonymousTypeManager AnonymousTypeManager
{
get
{
return _anonymousTypeManager;
}
}
internal override CommonAnonymousTypeManager CommonAnonymousTypeManager
{
get
{
return AnonymousTypeManager;
}
}
/// <summary>
/// True when the compiler is run in "strict" mode, in which it enforces the language specification
/// in some cases even at the expense of full compatibility. Such differences typically arise when
/// earlier versions of the compiler failed to enforce the full language specification.
/// </summary>
internal bool FeatureStrictEnabled => Feature("strict") != null;
/// <summary>
/// The language version that was used to parse the syntax trees of this compilation.
/// </summary>
public LanguageVersion LanguageVersion
{
get;
}
public override INamedTypeSymbol CreateErrorTypeSymbol(INamespaceOrTypeSymbol container, string name, int arity)
{
return new ExtendedErrorTypeSymbol((NamespaceOrTypeSymbol)container, name, arity, null);
}
#region Constructors and Factories
private static readonly CSharpCompilationOptions s_defaultOptions = new CSharpCompilationOptions(OutputKind.ConsoleApplication);
private static readonly CSharpCompilationOptions s_defaultSubmissionOptions = new CSharpCompilationOptions(OutputKind.DynamicallyLinkedLibrary);
/// <summary>
/// Creates a new compilation from scratch. Methods such as AddSyntaxTrees or AddReferences
/// on the returned object will allow to continue building up the Compilation incrementally.
/// </summary>
/// <param name="assemblyName">Simple assembly name.</param>
/// <param name="syntaxTrees">The syntax trees with the source code for the new compilation.</param>
/// <param name="references">The references for the new compilation.</param>
/// <param name="options">The compiler options to use.</param>
/// <returns>A new compilation.</returns>
public static CSharpCompilation Create(
string assemblyName,
IEnumerable<SyntaxTree> syntaxTrees = null,
IEnumerable<MetadataReference> references = null,
CSharpCompilationOptions options = null)
{
return Create(
assemblyName,
options ?? s_defaultOptions,
syntaxTrees,
references,
previousSubmission: null,
returnType: null,
hostObjectType: null,
isSubmission: false);
}
/// <summary>
/// Creates a new compilation that can be used in scripting.
/// </summary>
public static CSharpCompilation CreateSubmission(
string assemblyName,
SyntaxTree syntaxTree = null,
IEnumerable<MetadataReference> references = null,
CSharpCompilationOptions options = null,
Compilation previousSubmission = null,
Type returnType = null,
Type hostObjectType = null)
{
CheckSubmissionOptions(options);
return Create(
assemblyName,
options ?? s_defaultSubmissionOptions,
(syntaxTree != null) ? new[] { syntaxTree } : SpecializedCollections.EmptyEnumerable<SyntaxTree>(),
references,
(CSharpCompilation)previousSubmission,
returnType,
hostObjectType,
isSubmission: true);
}
private static CSharpCompilation Create(
string assemblyName,
CSharpCompilationOptions options,
IEnumerable<SyntaxTree> syntaxTrees,
IEnumerable<MetadataReference> references,
CSharpCompilation previousSubmission,
Type returnType,
Type hostObjectType,
bool isSubmission)
{
Debug.Assert(options != null);
CheckAssemblyName(assemblyName);
var validatedReferences = ValidateReferences<CSharpCompilationReference>(references);
ValidateSubmissionParameters(previousSubmission, returnType, ref hostObjectType);
var compilation = new CSharpCompilation(
assemblyName,
options,
validatedReferences,
previousSubmission,
returnType,
hostObjectType,
isSubmission,
referenceManager: null,
reuseReferenceManager: false,
syntaxAndDeclarations: new SyntaxAndDeclarationManager(
ImmutableArray<SyntaxTree>.Empty,
options.ScriptClassName,
options.SourceReferenceResolver,
CSharp.MessageProvider.Instance,
isSubmission,
state: null));
if (syntaxTrees != null)
{
compilation = compilation.AddSyntaxTrees(syntaxTrees);
}
Debug.Assert((object)compilation._lazyAssemblySymbol == null);
return compilation;
}
private CSharpCompilation(
string assemblyName,
CSharpCompilationOptions options,
ImmutableArray<MetadataReference> references,
CSharpCompilation previousSubmission,
Type submissionReturnType,
Type hostObjectType,
bool isSubmission,
ReferenceManager referenceManager,
bool reuseReferenceManager,
SyntaxAndDeclarationManager syntaxAndDeclarations,
AsyncQueue<CompilationEvent> eventQueue = null)
: base(assemblyName, references, SyntaxTreeCommonFeatures(syntaxAndDeclarations.ExternalSyntaxTrees), submissionReturnType, hostObjectType, isSubmission, eventQueue)
{
_wellKnownMemberSignatureComparer = new WellKnownMembersSignatureComparer(this);
_options = options;
this.builtInOperators = new BuiltInOperators(this);
_scriptClass = new Lazy<ImplicitNamedTypeSymbol>(BindScriptClass);
_globalImports = new Lazy<Imports>(BindGlobalUsings);
_globalNamespaceAlias = new Lazy<AliasSymbol>(CreateGlobalNamespaceAlias);
_anonymousTypeManager = new AnonymousTypeManager(this);
this.LanguageVersion = CommonLanguageVersion(syntaxAndDeclarations.ExternalSyntaxTrees);
if (isSubmission)
{
Debug.Assert(previousSubmission == null || previousSubmission.HostObjectType == hostObjectType);
_previousSubmission = previousSubmission;
}
else
{
Debug.Assert(previousSubmission == null && submissionReturnType == null && hostObjectType == null);
}
if (reuseReferenceManager)
{
referenceManager.AssertCanReuseForCompilation(this);
_referenceManager = referenceManager;
}
else
{
_referenceManager = new ReferenceManager(
MakeSourceAssemblySimpleName(),
this.Options.AssemblyIdentityComparer,
observedMetadata: referenceManager?.ObservedMetadata);
}
_syntaxAndDeclarations = syntaxAndDeclarations;
Debug.Assert((object)_lazyAssemblySymbol == null);
if (EventQueue != null) EventQueue.Enqueue(new CompilationStartedEvent(this));
}
internal override void ValidateDebugEntryPoint(IMethodSymbol debugEntryPoint, DiagnosticBag diagnostics)
{
Debug.Assert(debugEntryPoint != null);
// Debug entry point has to be a method definition from this compilation.
var methodSymbol = debugEntryPoint as MethodSymbol;
if (methodSymbol?.DeclaringCompilation != this || !methodSymbol.IsDefinition)
{
diagnostics.Add(ErrorCode.ERR_DebugEntryPointNotSourceMethodDefinition, Location.None);
}
}
private static LanguageVersion CommonLanguageVersion(ImmutableArray<SyntaxTree> syntaxTrees)
{
LanguageVersion? result = null;
foreach (var tree in syntaxTrees)
{
var version = ((CSharpParseOptions)tree.Options).LanguageVersion;
if (result == null)
{
result = version;
}
else if (result != version)
{
throw new ArgumentException(CodeAnalysisResources.InconsistentLanguageVersions, nameof(syntaxTrees));
}
}
return result ?? CSharpParseOptions.Default.LanguageVersion;
}
/// <summary>
/// Create a duplicate of this compilation with different symbol instances.
/// </summary>
public new CSharpCompilation Clone()
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
private CSharpCompilation Update(
ReferenceManager referenceManager,
bool reuseReferenceManager,
SyntaxAndDeclarationManager syntaxAndDeclarations)
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
referenceManager,
reuseReferenceManager,
syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified name.
/// </summary>
public new CSharpCompilation WithAssemblyName(string assemblyName)
{
CheckAssemblyName(assemblyName);
// Can't reuse references since the source assembly name changed and the referenced symbols might
// have internals-visible-to relationship with this compilation or they might had a circular reference
// to this compilation.
return new CSharpCompilation(
assemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: assemblyName == this.AssemblyName,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified references.
/// </summary>
/// <remarks>
/// The new <see cref="CSharpCompilation"/> will query the given <see cref="MetadataReference"/> for the underlying
/// metadata as soon as the are needed.
///
/// The new compilation uses whatever metadata is currently being provided by the <see cref="MetadataReference"/>.
/// E.g. if the current compilation references a metadata file that has changed since the creation of the compilation
/// the new compilation is going to use the updated version, while the current compilation will be using the previous (it doesn't change).
/// </remarks>
public new CSharpCompilation WithReferences(IEnumerable<MetadataReference> references)
{
// References might have changed, don't reuse reference manager.
// Don't even reuse observed metadata - let the manager query for the metadata again.
return new CSharpCompilation(
this.AssemblyName,
_options,
ValidateReferences<CSharpCompilationReference>(references),
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
referenceManager: null,
reuseReferenceManager: false,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation with the specified references.
/// </summary>
public new CSharpCompilation WithReferences(params MetadataReference[] references)
{
return this.WithReferences((IEnumerable<MetadataReference>)references);
}
/// <summary>
/// Creates a new compilation with the specified compilation options.
/// </summary>
public CSharpCompilation WithOptions(CSharpCompilationOptions options)
{
var oldOptions = this.Options;
bool reuseReferenceManager = oldOptions.CanReuseCompilationReferenceManager(options);
bool reuseSyntaxAndDeclarationManager = oldOptions.ScriptClassName == options.ScriptClassName &&
oldOptions.SourceReferenceResolver == options.SourceReferenceResolver;
return new CSharpCompilation(
this.AssemblyName,
options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager,
reuseSyntaxAndDeclarationManager ?
_syntaxAndDeclarations :
new SyntaxAndDeclarationManager(
_syntaxAndDeclarations.ExternalSyntaxTrees,
options.ScriptClassName,
options.SourceReferenceResolver,
_syntaxAndDeclarations.MessageProvider,
_syntaxAndDeclarations.IsSubmission,
state: null));
}
/// <summary>
/// Returns a new compilation with the given compilation set as the previous submission.
/// </summary>
internal CSharpCompilation WithPreviousSubmission(CSharpCompilation newPreviousSubmission)
{
if (!this.IsSubmission)
{
throw new InvalidOperationException(CSharpResources.CannotHavePreviousSubmission);
}
// Reference binding doesn't depend on previous submission so we can reuse it.
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
newPreviousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations);
}
/// <summary>
/// Returns a new compilation with a given event queue.
/// </summary>
internal override Compilation WithEventQueue(AsyncQueue<CompilationEvent> eventQueue)
{
return new CSharpCompilation(
this.AssemblyName,
_options,
this.ExternalReferences,
_previousSubmission,
this.SubmissionReturnType,
this.HostObjectType,
this.IsSubmission,
_referenceManager,
reuseReferenceManager: true,
syntaxAndDeclarations: _syntaxAndDeclarations,
eventQueue: eventQueue);
}
#endregion
#region Submission
internal new CSharpCompilation PreviousSubmission
{
get { return _previousSubmission; }
}
// TODO (tomat): consider moving this method to SemanticModel
/// <summary>
/// Returns the type of the submission return value.
/// </summary>
/// <returns>
/// The type of the last expression of the submission.
/// Null if the type of the last expression is unknown (null).
/// Void type if the type of the last expression statement is void or
/// the submission ends with a declaration or statement that is not an expression statement.
/// </returns>
/// <remarks>
/// Note that the return type is System.Void for both compilations "System.Console.WriteLine();" and "System.Console.WriteLine()",
/// and <paramref name="hasValue"/> is <c>False</c> for the former and <c>True</c> for the latter.
/// </remarks>
/// <param name="hasValue">True if the submission has value, i.e. if it ends with a statement that is an expression statement.</param>
/// <exception cref="InvalidOperationException">The compilation doesn't represent a submission (<see cref="Compilation.IsSubmission"/> return false).</exception>
internal new TypeSymbol GetSubmissionResultType(out bool hasValue)
{
if (!IsSubmission)
{
throw new InvalidOperationException(CSharpResources.ThisCompilationNotInteractive);
}
hasValue = false;
// A submission may be empty or comprised of a single script file.
var tree = _syntaxAndDeclarations.ExternalSyntaxTrees.SingleOrDefault();
if (tree == null || tree.Options.Kind != SourceCodeKind.Interactive)
{
return GetSpecialType(SpecialType.System_Void);
}
var lastStatement = (GlobalStatementSyntax)tree.GetCompilationUnitRoot().Members.LastOrDefault(decl => decl.Kind() == SyntaxKind.GlobalStatement);
if (lastStatement == null || lastStatement.Statement.Kind() != SyntaxKind.ExpressionStatement)
{
return GetSpecialType(SpecialType.System_Void);
}
var expressionStatement = (ExpressionStatementSyntax)lastStatement.Statement;
if (!expressionStatement.SemicolonToken.IsMissing)
{
return GetSpecialType(SpecialType.System_Void);
}
var model = GetSemanticModel(tree);
hasValue = true;
var expression = expressionStatement.Expression;
var info = model.GetTypeInfo(expression);
return (TypeSymbol)info.ConvertedType;
}
#endregion
#region Syntax Trees (maintain an ordered list)
/// <summary>
/// The syntax trees (parsed from source code) that this compilation was created with.
/// </summary>
public new ImmutableArray<SyntaxTree> SyntaxTrees
{
get { return _syntaxAndDeclarations.GetLazyState().SyntaxTrees; }
}
/// <summary>
/// Returns true if this compilation contains the specified tree. False otherwise.
/// </summary>
public new bool ContainsSyntaxTree(SyntaxTree syntaxTree)
{
var cstree = syntaxTree as SyntaxTree;
return cstree != null && _syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(cstree);
}
/// <summary>
/// Creates a new compilation with additional syntax trees.
/// </summary>
public new CSharpCompilation AddSyntaxTrees(params SyntaxTree[] trees)
{
return AddSyntaxTrees((IEnumerable<SyntaxTree>)trees);
}
/// <summary>
/// Creates a new compilation with additional syntax trees.
/// </summary>
public new CSharpCompilation AddSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
if (trees == null)
{
throw new ArgumentNullException(nameof(trees));
}
if (trees.IsEmpty())
{
return this;
}
// This HashSet is needed so that we don't allow adding the same tree twice
// with a single call to AddSyntaxTrees. Rather than using a separate HashSet,
// ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we
// only allow replacing a single tree at a time.
var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance();
var syntaxAndDeclarations = _syntaxAndDeclarations;
externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees);
bool reuseReferenceManager = true;
int i = 0;
foreach (var tree in trees.Cast<CSharpSyntaxTree>())
{
if (tree == null)
{
throw new ArgumentNullException($"{nameof(trees)}[{i}]");
}
if (!tree.HasCompilationUnitRoot)
{
throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, $"{nameof(trees)}[{i}]");
}
if (externalSyntaxTrees.Contains(tree))
{
throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, $"{nameof(trees)}[{i}]");
}
if (this.IsSubmission && tree.Options.Kind == SourceCodeKind.Regular)
{
throw new ArgumentException(CSharpResources.SubmissionCanOnlyInclude, $"{nameof(trees)}[{i}]");
}
externalSyntaxTrees.Add(tree);
reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives;
i++;
}
externalSyntaxTrees.Free();
if (this.IsSubmission && i > 1)
{
throw new ArgumentException(CSharpResources.SubmissionCanHaveAtMostOne, nameof(trees));
}
syntaxAndDeclarations = syntaxAndDeclarations.AddSyntaxTrees(trees);
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees
/// added later.
/// </summary>
public new CSharpCompilation RemoveSyntaxTrees(params SyntaxTree[] trees)
{
return RemoveSyntaxTrees((IEnumerable<SyntaxTree>)trees);
}
/// <summary>
/// Creates a new compilation without the specified syntax trees. Preserves metadata info for use with trees
/// added later.
/// </summary>
public new CSharpCompilation RemoveSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
if (trees == null)
{
throw new ArgumentNullException(nameof(trees));
}
if (trees.IsEmpty())
{
return this;
}
var removeSet = PooledHashSet<SyntaxTree>.GetInstance();
// This HashSet is needed so that we don't allow adding the same tree twice
// with a single call to AddSyntaxTrees. Rather than using a separate HashSet,
// ReplaceSyntaxTrees can just check against ExternalSyntaxTrees, because we
// only allow replacing a single tree at a time.
var externalSyntaxTrees = PooledHashSet<SyntaxTree>.GetInstance();
var syntaxAndDeclarations = _syntaxAndDeclarations;
externalSyntaxTrees.AddAll(syntaxAndDeclarations.ExternalSyntaxTrees);
bool reuseReferenceManager = true;
int i = 0;
foreach (var tree in trees.Cast<CSharpSyntaxTree>())
{
if (!externalSyntaxTrees.Contains(tree))
{
// Check to make sure this is not a #load'ed tree.
var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap;
if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(tree, loadedSyntaxTreeMap))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, tree), $"{nameof(trees)}[{i}]");
}
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, tree), $"{nameof(trees)}[{i}]");
}
removeSet.Add(tree);
reuseReferenceManager &= !tree.HasReferenceOrLoadDirectives;
i++;
}
externalSyntaxTrees.Free();
syntaxAndDeclarations = syntaxAndDeclarations.RemoveSyntaxTrees(removeSet);
removeSet.Free();
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
/// <summary>
/// Creates a new compilation without any syntax trees. Preserves metadata info
/// from this compilation for use with trees added later.
/// </summary>
public new CSharpCompilation RemoveAllSyntaxTrees()
{
var syntaxAndDeclarations = _syntaxAndDeclarations;
return Update(
_referenceManager,
reuseReferenceManager: !syntaxAndDeclarations.MayHaveReferenceDirectives(),
syntaxAndDeclarations: syntaxAndDeclarations.WithExternalSyntaxTrees(ImmutableArray<SyntaxTree>.Empty));
}
/// <summary>
/// Creates a new compilation without the old tree but with the new tree.
/// </summary>
public new CSharpCompilation ReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree)
{
// this is just to force a cast exception
oldTree = (CSharpSyntaxTree)oldTree;
newTree = (CSharpSyntaxTree)newTree;
if (oldTree == null)
{
throw new ArgumentNullException(nameof(oldTree));
}
if (newTree == null)
{
return this.RemoveSyntaxTrees(oldTree);
}
else if (newTree == oldTree)
{
return this;
}
if (!newTree.HasCompilationUnitRoot)
{
throw new ArgumentException(CSharpResources.TreeMustHaveARootNodeWith, nameof(newTree));
}
var syntaxAndDeclarations = _syntaxAndDeclarations;
var externalSyntaxTrees = syntaxAndDeclarations.ExternalSyntaxTrees;
if (!externalSyntaxTrees.Contains(oldTree))
{
// Check to see if this is a #load'ed tree.
var loadedSyntaxTreeMap = syntaxAndDeclarations.GetLazyState().LoadedSyntaxTreeMap;
if (SyntaxAndDeclarationManager.IsLoadedSyntaxTree(oldTree, loadedSyntaxTreeMap))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeFromLoadNoRemoveReplace, oldTree), nameof(oldTree));
}
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, oldTree), nameof(oldTree));
}
if (externalSyntaxTrees.Contains(newTree))
{
throw new ArgumentException(CSharpResources.SyntaxTreeAlreadyPresent, nameof(newTree));
}
// TODO(tomat): Consider comparing #r's of the old and the new tree. If they are exactly the same we could still reuse.
// This could be a perf win when editing a script file in the IDE. The services create a new compilation every keystroke
// that replaces the tree with a new one.
var reuseReferenceManager = !oldTree.HasReferenceOrLoadDirectives() && !newTree.HasReferenceOrLoadDirectives();
syntaxAndDeclarations = syntaxAndDeclarations.ReplaceSyntaxTree(oldTree, newTree);
return Update(_referenceManager, reuseReferenceManager, syntaxAndDeclarations);
}
internal override int GetSyntaxTreeOrdinal(SyntaxTree tree)
{
Debug.Assert(this.ContainsSyntaxTree(tree));
return _syntaxAndDeclarations.GetLazyState().OrdinalMap[tree];
}
#endregion
#region References
internal override CommonReferenceManager CommonGetBoundReferenceManager()
{
return GetBoundReferenceManager();
}
internal new ReferenceManager GetBoundReferenceManager()
{
if ((object)_lazyAssemblySymbol == null)
{
_referenceManager.CreateSourceAssemblyForCompilation(this);
Debug.Assert((object)_lazyAssemblySymbol != null);
}
// referenceManager can only be accessed after we initialized the lazyAssemblySymbol.
// In fact, initialization of the assembly symbol might change the reference manager.
return _referenceManager;
}
// for testing only:
internal bool ReferenceManagerEquals(CSharpCompilation other)
{
return ReferenceEquals(_referenceManager, other._referenceManager);
}
public override ImmutableArray<MetadataReference> DirectiveReferences
{
get
{
return GetBoundReferenceManager().DirectiveReferences;
}
}
internal override IDictionary<string, MetadataReference> ReferenceDirectiveMap
{
get
{
return GetBoundReferenceManager().ReferenceDirectiveMap;
}
}
// for testing purposes
internal IEnumerable<string> ExternAliases
{
get
{
return GetBoundReferenceManager().ExternAliases;
}
}
/// <summary>
/// Gets the <see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> for a metadata reference used to create this compilation.
/// </summary>
/// <returns><see cref="AssemblySymbol"/> or <see cref="ModuleSymbol"/> corresponding to the given reference or null if there is none.</returns>
/// <remarks>
/// Uses object identity when comparing two references.
/// </remarks>
internal new Symbol GetAssemblyOrModuleSymbol(MetadataReference reference)
{
if (reference == null)
{
throw new ArgumentNullException(nameof(reference));
}
if (reference.Properties.Kind == MetadataImageKind.Assembly)
{
return GetBoundReferenceManager().GetReferencedAssemblySymbol(reference);
}
else
{
Debug.Assert(reference.Properties.Kind == MetadataImageKind.Module);
int index = GetBoundReferenceManager().GetReferencedModuleIndex(reference);
return index < 0 ? null : this.Assembly.Modules[index];
}
}
public override IEnumerable<AssemblyIdentity> ReferencedAssemblyNames
{
get
{
return Assembly.Modules.SelectMany(module => module.GetReferencedAssemblies());
}
}
/// <summary>
/// All reference directives used in this compilation.
/// </summary>
internal override IEnumerable<ReferenceDirective> ReferenceDirectives
{
get { return this.Declarations.ReferenceDirectives; }
}
/// <summary>
/// Returns a metadata reference that a given #r resolves to.
/// </summary>
/// <param name="directive">#r directive.</param>
/// <returns>Metadata reference the specified directive resolves to.</returns>
public MetadataReference GetDirectiveReference(ReferenceDirectiveTriviaSyntax directive)
{
return ReferenceDirectiveMap[directive.File.ValueText];
}
/// <summary>
/// Creates a new compilation with additional metadata references.
/// </summary>
public new CSharpCompilation AddReferences(params MetadataReference[] references)
{
return (CSharpCompilation)base.AddReferences(references);
}
/// <summary>
/// Creates a new compilation with additional metadata references.
/// </summary>
public new CSharpCompilation AddReferences(IEnumerable<MetadataReference> references)
{
return (CSharpCompilation)base.AddReferences(references);
}
/// <summary>
/// Creates a new compilation without the specified metadata references.
/// </summary>
public new CSharpCompilation RemoveReferences(params MetadataReference[] references)
{
return (CSharpCompilation)base.RemoveReferences(references);
}
/// <summary>
/// Creates a new compilation without the specified metadata references.
/// </summary>
public new CSharpCompilation RemoveReferences(IEnumerable<MetadataReference> references)
{
return (CSharpCompilation)base.RemoveReferences(references);
}
/// <summary>
/// Creates a new compilation without any metadata references
/// </summary>
public new CSharpCompilation RemoveAllReferences()
{
return (CSharpCompilation)base.RemoveAllReferences();
}
/// <summary>
/// Creates a new compilation with an old metadata reference replaced with a new metadata reference.
/// </summary>
public new CSharpCompilation ReplaceReference(MetadataReference oldReference, MetadataReference newReference)
{
return (CSharpCompilation)base.ReplaceReference(oldReference, newReference);
}
public override CompilationReference ToMetadataReference(ImmutableArray<string> aliases = default(ImmutableArray<string>), bool embedInteropTypes = false)
{
return new CSharpCompilationReference(this, aliases, embedInteropTypes);
}
/// <summary>
/// Get all modules in this compilation, including the source module, added modules, and all
/// modules of referenced assemblies that do not come from an assembly with an extern alias.
/// Metadata imported from aliased assemblies is not visible at the source level except through
/// the use of an extern alias directive. So exclude them from this list which is used to construct
/// the global namespace.
/// </summary>
private void GetAllUnaliasedModules(ArrayBuilder<ModuleSymbol> modules)
{
// NOTE: This includes referenced modules - they count as modules of the compilation assembly.
modules.AddRange(Assembly.Modules);
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.DeclarationsAccessibleWithoutAlias(i))
{
modules.AddRange(referenceManager.ReferencedAssemblies[i].Modules);
}
}
}
/// <summary>
/// Return a list of assembly symbols than can be accessed without using an alias.
/// For example:
/// 1) /r:A.dll /r:B.dll -> A, B
/// 2) /r:Foo=A.dll /r:B.dll -> B
/// 3) /r:Foo=A.dll /r:A.dll -> A
/// </summary>
internal void GetUnaliasedReferencedAssemblies(ArrayBuilder<AssemblySymbol> assemblies)
{
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.DeclarationsAccessibleWithoutAlias(i))
{
assemblies.Add(referenceManager.ReferencedAssemblies[i]);
}
}
}
/// <summary>
/// Gets the <see cref="MetadataReference"/> that corresponds to the assembly symbol.
/// </summary>
public new MetadataReference GetMetadataReference(IAssemblySymbol assemblySymbol)
{
return base.GetMetadataReference(assemblySymbol);
}
#endregion
#region Symbols
/// <summary>
/// The AssemblySymbol that represents the assembly being created.
/// </summary>
internal SourceAssemblySymbol SourceAssembly
{
get
{
GetBoundReferenceManager();
return _lazyAssemblySymbol;
}
}
/// <summary>
/// The AssemblySymbol that represents the assembly being created.
/// </summary>
internal new AssemblySymbol Assembly
{
get
{
return SourceAssembly;
}
}
/// <summary>
/// Get a ModuleSymbol that refers to the module being created by compiling all of the code.
/// By getting the GlobalNamespace property of that module, all of the namespaces and types
/// defined in source code can be obtained.
/// </summary>
internal new ModuleSymbol SourceModule
{
get
{
return Assembly.Modules[0];
}
}
/// <summary>
/// Gets the root namespace that contains all namespaces and types defined in source code or in
/// referenced metadata, merged into a single namespace hierarchy.
/// </summary>
internal new NamespaceSymbol GlobalNamespace
{
get
{
if ((object)_lazyGlobalNamespace == null)
{
// Get the root namespace from each module, and merge them all together
// Get all modules in this compilation, ones referenced directly by the compilation
// as well as those referenced by all referenced assemblies.
var modules = ArrayBuilder<ModuleSymbol>.GetInstance();
GetAllUnaliasedModules(modules);
var result = MergedNamespaceSymbol.Create(
new NamespaceExtent(this),
null,
modules.SelectDistinct(m => m.GlobalNamespace));
modules.Free();
Interlocked.CompareExchange(ref _lazyGlobalNamespace, result, null);
}
return _lazyGlobalNamespace;
}
}
/// <summary>
/// Given for the specified module or assembly namespace, gets the corresponding compilation
/// namespace (merged namespace representation for all namespace declarations and references
/// with contributions for the namespaceSymbol). Can return null if no corresponding
/// namespace can be bound in this compilation with the same name.
/// </summary>
internal new NamespaceSymbol GetCompilationNamespace(INamespaceSymbol namespaceSymbol)
{
if (namespaceSymbol is NamespaceSymbol &&
namespaceSymbol.NamespaceKind == NamespaceKind.Compilation &&
namespaceSymbol.ContainingCompilation == this)
{
return (NamespaceSymbol)namespaceSymbol;
}
var containingNamespace = namespaceSymbol.ContainingNamespace;
if (containingNamespace == null)
{
return this.GlobalNamespace;
}
var current = GetCompilationNamespace(containingNamespace);
if ((object)current != null)
{
return current.GetNestedNamespace(namespaceSymbol.Name);
}
return null;
}
private ConcurrentDictionary<string, NamespaceSymbol> _externAliasTargets;
internal bool GetExternAliasTarget(string aliasName, out NamespaceSymbol @namespace)
{
if (_externAliasTargets == null)
{
Interlocked.CompareExchange(ref _externAliasTargets, new ConcurrentDictionary<string, NamespaceSymbol>(), null);
}
else if (_externAliasTargets.TryGetValue(aliasName, out @namespace))
{
return !(@namespace is MissingNamespaceSymbol);
}
ArrayBuilder<NamespaceSymbol> builder = null;
var referenceManager = GetBoundReferenceManager();
for (int i = 0; i < referenceManager.ReferencedAssemblies.Length; i++)
{
if (referenceManager.AliasesOfReferencedAssemblies[i].Contains(aliasName))
{
builder = builder ?? ArrayBuilder<NamespaceSymbol>.GetInstance();
builder.Add(referenceManager.ReferencedAssemblies[i].GlobalNamespace);
}
}
bool foundNamespace = builder != null;
// We want to cache failures as well as successes so that subsequent incorrect extern aliases with the
// same alias will have the same target.
@namespace = foundNamespace
? MergedNamespaceSymbol.Create(new NamespaceExtent(this), namespacesToMerge: builder.ToImmutableAndFree(), containingNamespace: null, nameOpt: null)
: new MissingNamespaceSymbol(new MissingModuleSymbol(new MissingAssemblySymbol(new AssemblyIdentity(System.Guid.NewGuid().ToString())), ordinal: -1));
// Use GetOrAdd in case another thread beat us to the punch (i.e. should return the same object for the same alias, every time).
@namespace = _externAliasTargets.GetOrAdd(aliasName, @namespace);
Debug.Assert(foundNamespace == !(@namespace is MissingNamespaceSymbol));
return foundNamespace;
}
/// <summary>
/// A symbol representing the implicit Script class. This is null if the class is not
/// defined in the compilation.
/// </summary>
internal new NamedTypeSymbol ScriptClass
{
get { return _scriptClass.Value; }
}
/// <summary>
/// Resolves a symbol that represents script container (Script class). Uses the
/// full name of the container class stored in <see cref="CompilationOptions.ScriptClassName"/> to find the symbol.
/// </summary>
/// <returns>The Script class symbol or null if it is not defined.</returns>
private ImplicitNamedTypeSymbol BindScriptClass()
{
if (_options.ScriptClassName == null || !_options.ScriptClassName.IsValidClrTypeName())
{
return null;
}
var namespaceOrType = this.Assembly.GlobalNamespace.GetNamespaceOrTypeByQualifiedName(_options.ScriptClassName.Split('.')).AsSingleton();
return namespaceOrType as ImplicitNamedTypeSymbol;
}
internal Imports GlobalImports
{
get { return _globalImports.Value; }
}
internal IEnumerable<NamespaceOrTypeSymbol> GlobalUsings
{
get
{
return GlobalImports.Usings.Select(u => u.NamespaceOrType);
}
}
internal AliasSymbol GlobalNamespaceAlias
{
get
{
return _globalNamespaceAlias.Value;
}
}
/// <summary>
/// Get the symbol for the predefined type from the COR Library referenced by this compilation.
/// </summary>
internal new NamedTypeSymbol GetSpecialType(SpecialType specialType)
{
if (specialType <= SpecialType.None || specialType > SpecialType.Count)
{
throw new ArgumentOutOfRangeException(nameof(specialType));
}
var result = Assembly.GetSpecialType(specialType);
Debug.Assert(result.SpecialType == specialType);
return result;
}
/// <summary>
/// Get the symbol for the predefined type member from the COR Library referenced by this compilation.
/// </summary>
internal Symbol GetSpecialTypeMember(SpecialMember specialMember)
{
return Assembly.GetSpecialTypeMember(specialMember);
}
internal TypeSymbol GetTypeByReflectionType(Type type, DiagnosticBag diagnostics)
{
var result = Assembly.GetTypeByReflectionType(type, includeReferences: true);
if ((object)result == null)
{
var errorType = new ExtendedErrorTypeSymbol(this, type.Name, 0, CreateReflectionTypeNotFoundError(type));
diagnostics.Add(errorType.ErrorInfo, NoLocation.Singleton);
result = errorType;
}
return result;
}
private static CSDiagnosticInfo CreateReflectionTypeNotFoundError(Type type)
{
// The type or namespace name '{0}' could not be found in the global namespace (are you missing an assembly reference?)
return new CSDiagnosticInfo(
ErrorCode.ERR_GlobalSingleTypeNameNotFound,
new object[] { type.AssemblyQualifiedName },
ImmutableArray<Symbol>.Empty,
ImmutableArray<Location>.Empty
);
}
// The type of host object model if available.
private TypeSymbol _lazyHostObjectTypeSymbol;
internal TypeSymbol GetHostObjectTypeSymbol()
{
if (HostObjectType != null && (object)_lazyHostObjectTypeSymbol == null)
{
TypeSymbol symbol = Assembly.GetTypeByReflectionType(HostObjectType, includeReferences: true);
if ((object)symbol == null)
{
MetadataTypeName mdName = MetadataTypeName.FromNamespaceAndTypeName(HostObjectType.Namespace ?? String.Empty,
HostObjectType.Name,
useCLSCompliantNameArityEncoding: true);
symbol = new MissingMetadataTypeSymbol.TopLevelWithCustomErrorInfo(
new MissingAssemblySymbol(AssemblyIdentity.FromAssemblyDefinition(HostObjectType.GetTypeInfo().Assembly)).Modules[0],
ref mdName,
CreateReflectionTypeNotFoundError(HostObjectType),
SpecialType.None);
}
Interlocked.CompareExchange(ref _lazyHostObjectTypeSymbol, symbol, null);
}
return _lazyHostObjectTypeSymbol;
}
internal SynthesizedInteractiveInitializerMethod GetSubmissionInitializer()
{
return (IsSubmission && (object)ScriptClass != null) ?
ScriptClass.GetScriptInitializer() :
null;
}
/// <summary>
/// Gets the type within the compilation's assembly and all referenced assemblies (other than
/// those that can only be referenced via an extern alias) using its canonical CLR metadata name.
/// </summary>
internal new NamedTypeSymbol GetTypeByMetadataName(string fullyQualifiedMetadataName)
{
return this.Assembly.GetTypeByMetadataName(fullyQualifiedMetadataName, includeReferences: true, isWellKnownType: false);
}
/// <summary>
/// The TypeSymbol for the type 'dynamic' in this Compilation.
/// </summary>
internal new TypeSymbol DynamicType
{
get
{
return AssemblySymbol.DynamicType;
}
}
/// <summary>
/// The NamedTypeSymbol for the .NET System.Object type, which could have a TypeKind of
/// Error if there was no COR Library in this Compilation.
/// </summary>
internal new NamedTypeSymbol ObjectType
{
get
{
return this.Assembly.ObjectType;
}
}
internal bool DeclaresTheObjectClass
{
get
{
return SourceAssembly.DeclaresTheObjectClass;
}
}
internal new MethodSymbol GetEntryPoint(CancellationToken cancellationToken)
{
EntryPoint entryPoint = GetEntryPointAndDiagnostics(cancellationToken);
return entryPoint == null ? null : entryPoint.MethodSymbol;
}
internal EntryPoint GetEntryPointAndDiagnostics(CancellationToken cancellationToken)
{
if (!this.Options.OutputKind.IsApplication() && ((object)this.ScriptClass == null))
{
return null;
}
if (this.Options.MainTypeName != null && !this.Options.MainTypeName.IsValidClrTypeName())
{
Debug.Assert(!this.Options.Errors.IsDefaultOrEmpty);
return new EntryPoint(null, ImmutableArray<Diagnostic>.Empty);
}
if (_lazyEntryPoint == null)
{
ImmutableArray<Diagnostic> diagnostics;
var entryPoint = FindEntryPoint(cancellationToken, out diagnostics);
Interlocked.CompareExchange(ref _lazyEntryPoint, new EntryPoint(entryPoint, diagnostics), null);
}
return _lazyEntryPoint;
}
private MethodSymbol FindEntryPoint(CancellationToken cancellationToken, out ImmutableArray<Diagnostic> sealedDiagnostics)
{
var diagnostics = DiagnosticBag.GetInstance();
var entryPointCandidates = ArrayBuilder<MethodSymbol>.GetInstance();
try
{
NamedTypeSymbol mainType;
string mainTypeName = this.Options.MainTypeName;
NamespaceSymbol globalNamespace = this.SourceModule.GlobalNamespace;
if (mainTypeName != null)
{
// Global code is the entry point, ignore all other Mains.
var scriptClass = this.ScriptClass;
if (scriptClass != null)
{
// CONSIDER: we could use the symbol instead of just the name.
diagnostics.Add(ErrorCode.WRN_MainIgnored, NoLocation.Singleton, mainTypeName);
return scriptClass.GetScriptEntryPoint();
}
var mainTypeOrNamespace = globalNamespace.GetNamespaceOrTypeByQualifiedName(mainTypeName.Split('.')).OfMinimalArity();
if ((object)mainTypeOrNamespace == null)
{
diagnostics.Add(ErrorCode.ERR_MainClassNotFound, NoLocation.Singleton, mainTypeName);
return null;
}
mainType = mainTypeOrNamespace as NamedTypeSymbol;
if ((object)mainType == null || mainType.IsGenericType || (mainType.TypeKind != TypeKind.Class && mainType.TypeKind != TypeKind.Struct))
{
diagnostics.Add(ErrorCode.ERR_MainClassNotClass, mainTypeOrNamespace.Locations.First(), mainTypeOrNamespace);
return null;
}
EntryPointCandidateFinder.FindCandidatesInSingleType(mainType, entryPointCandidates, cancellationToken);
}
else
{
mainType = null;
EntryPointCandidateFinder.FindCandidatesInNamespace(globalNamespace, entryPointCandidates, cancellationToken);
// Global code is the entry point, ignore all other Mains.
var scriptClass = this.ScriptClass;
if (scriptClass != null)
{
foreach (var main in entryPointCandidates)
{
diagnostics.Add(ErrorCode.WRN_MainIgnored, main.Locations.First(), main);
}
return scriptClass.GetScriptEntryPoint();
}
}
DiagnosticBag warnings = DiagnosticBag.GetInstance();
var viableEntryPoints = ArrayBuilder<MethodSymbol>.GetInstance();
foreach (var candidate in entryPointCandidates)
{
if (!candidate.HasEntryPointSignature())
{
// a single error for partial methods:
warnings.Add(ErrorCode.WRN_InvalidMainSig, candidate.Locations.First(), candidate);
continue;
}
if (candidate.IsGenericMethod || candidate.ContainingType.IsGenericType)
{
// a single error for partial methods:
warnings.Add(ErrorCode.WRN_MainCantBeGeneric, candidate.Locations.First(), candidate);
continue;
}
if (candidate.IsAsync)
{
diagnostics.Add(ErrorCode.ERR_MainCantBeAsync, candidate.Locations.First(), candidate);
}
viableEntryPoints.Add(candidate);
}
if ((object)mainType == null || viableEntryPoints.Count == 0)
{
diagnostics.AddRange(warnings);
}
warnings.Free();
MethodSymbol entryPoint = null;
if (viableEntryPoints.Count == 0)
{
if ((object)mainType == null)
{
diagnostics.Add(ErrorCode.ERR_NoEntryPoint, NoLocation.Singleton);
}
else
{
diagnostics.Add(ErrorCode.ERR_NoMainInClass, mainType.Locations.First(), mainType);
}
}
else if (viableEntryPoints.Count > 1)
{
viableEntryPoints.Sort(LexicalOrderSymbolComparer.Instance);
var info = new CSDiagnosticInfo(
ErrorCode.ERR_MultipleEntryPoints,
args: SpecializedCollections.EmptyArray<object>(),
symbols: viableEntryPoints.OfType<Symbol>().AsImmutable(),
additionalLocations: viableEntryPoints.Select(m => m.Locations.First()).OfType<Location>().AsImmutable());
diagnostics.Add(new CSDiagnostic(info, viableEntryPoints.First().Locations.First()));
}
else
{
entryPoint = viableEntryPoints[0];
}
viableEntryPoints.Free();
return entryPoint;
}
finally
{
entryPointCandidates.Free();
sealedDiagnostics = diagnostics.ToReadOnlyAndFree();
}
}
internal class EntryPoint
{
public readonly MethodSymbol MethodSymbol;
public readonly ImmutableArray<Diagnostic> Diagnostics;
public EntryPoint(MethodSymbol methodSymbol, ImmutableArray<Diagnostic> diagnostics)
{
this.MethodSymbol = methodSymbol;
this.Diagnostics = diagnostics;
}
}
internal bool MightContainNoPiaLocalTypes()
{
return SourceAssembly.MightContainNoPiaLocalTypes();
}
// NOTE(cyrusn): There is a bit of a discoverability problem with this method and the same
// named method in SyntaxTreeSemanticModel. Technically, i believe these are the appropriate
// locations for these methods. This method has no dependencies on anything but the
// compilation, while the other method needs a bindings object to determine what bound node
// an expression syntax binds to. Perhaps when we document these methods we should explain
// where a user can find the other.
public Conversion ClassifyConversion(ITypeSymbol source, ITypeSymbol destination)
{
// Note that it is possible for there to be both an implicit user-defined conversion
// and an explicit built-in conversion from source to destination. In that scenario
// this method returns the implicit conversion.
if ((object)source == null)
{
throw new ArgumentNullException(nameof(source));
}
if ((object)destination == null)
{
throw new ArgumentNullException(nameof(destination));
}
var cssource = source.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("source");
var csdest = destination.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("destination");
HashSet<DiagnosticInfo> useSiteDiagnostics = null;
return Conversions.ClassifyConversion(cssource, csdest, ref useSiteDiagnostics);
}
/// <summary>
/// Returns a new ArrayTypeSymbol representing an array type tied to the base types of the
/// COR Library in this Compilation.
/// </summary>
internal ArrayTypeSymbol CreateArrayTypeSymbol(TypeSymbol elementType, int rank = 1)
{
if ((object)elementType == null)
{
throw new ArgumentNullException(nameof(elementType));
}
return ArrayTypeSymbol.CreateCSharpArray(this.Assembly, elementType, ImmutableArray<CustomModifier>.Empty, rank);
}
/// <summary>
/// Returns a new PointerTypeSymbol representing a pointer type tied to a type in this Compilation.
/// </summary>
internal PointerTypeSymbol CreatePointerTypeSymbol(TypeSymbol elementType)
{
if ((object)elementType == null)
{
throw new ArgumentNullException(nameof(elementType));
}
return new PointerTypeSymbol(elementType);
}
#endregion
#region Binding
/// <summary>
/// Gets a new SyntaxTreeSemanticModel for the specified syntax tree.
/// </summary>
public new SemanticModel GetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility)
{
if (syntaxTree == null)
{
throw new ArgumentNullException(nameof(syntaxTree));
}
if (!_syntaxAndDeclarations.GetLazyState().RootNamespaces.ContainsKey(syntaxTree))
{
throw new ArgumentException(string.Format(CSharpResources.SyntaxTreeNotFoundTo, syntaxTree), nameof(syntaxTree));
}
return new SyntaxTreeSemanticModel(this, (SyntaxTree)syntaxTree, ignoreAccessibility);
}
// When building symbols from the declaration table (lazily), or inside a type, or when
// compiling a method body, we may not have a BinderContext in hand for the enclosing
// scopes. Therefore, we build them when needed (and cache them) using a ContextBuilder.
// Since a ContextBuilder is only a cache, and the identity of the ContextBuilders and
// BinderContexts have no semantic meaning, we can reuse them or rebuild them, whichever is
// most convenient. We store them using weak references so that GC pressure will cause them
// to be recycled.
private WeakReference<BinderFactory>[] _binderFactories;
internal BinderFactory GetBinderFactory(SyntaxTree syntaxTree)
{
var treeNum = GetSyntaxTreeOrdinal(syntaxTree);
var binderFactories = _binderFactories;
if (binderFactories == null)
{
binderFactories = new WeakReference<BinderFactory>[this.SyntaxTrees.Length];
binderFactories = Interlocked.CompareExchange(ref _binderFactories, binderFactories, null) ?? binderFactories;
}
BinderFactory previousFactory;
var previousWeakReference = binderFactories[treeNum];
if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory))
{
return previousFactory;
}
return AddNewFactory(syntaxTree, ref binderFactories[treeNum]);
}
private BinderFactory AddNewFactory(SyntaxTree syntaxTree, ref WeakReference<BinderFactory> slot)
{
var newFactory = new BinderFactory(this, syntaxTree);
var newWeakReference = new WeakReference<BinderFactory>(newFactory);
while (true)
{
BinderFactory previousFactory;
WeakReference<BinderFactory> previousWeakReference = slot;
if (previousWeakReference != null && previousWeakReference.TryGetTarget(out previousFactory))
{
return previousFactory;
}
if (Interlocked.CompareExchange(ref slot, newWeakReference, previousWeakReference) == previousWeakReference)
{
return newFactory;
}
}
}
internal Binder GetBinder(SyntaxReference reference)
{
return GetBinderFactory(reference.SyntaxTree).GetBinder((CSharpSyntaxNode)reference.GetSyntax());
}
internal Binder GetBinder(CSharpSyntaxNode syntax)
{
return GetBinderFactory(syntax.SyntaxTree).GetBinder(syntax);
}
/// <summary>
/// Returns imported symbols for the given declaration.
/// </summary>
internal Imports GetImports(SingleNamespaceDeclaration declaration)
{
return GetBinderFactory(declaration.SyntaxReference.SyntaxTree).GetImportsBinder((CSharpSyntaxNode)declaration.SyntaxReference.GetSyntax()).GetImports();
}
internal Imports GetSubmissionImports()
{
return ((SourceNamespaceSymbol)SourceModule.GlobalNamespace).GetBoundImportsMerged().SingleOrDefault() ?? Imports.Empty;
}
internal InteractiveUsingsBinder GetInteractiveUsingsBinder()
{
Debug.Assert(IsSubmission);
// empty compilation:
if ((object)ScriptClass == null)
{
Debug.Assert(_syntaxAndDeclarations.ExternalSyntaxTrees.Length == 0);
return null;
}
return GetBinderFactory(_syntaxAndDeclarations.ExternalSyntaxTrees.Single()).GetInteractiveUsingsBinder();
}
private Imports BindGlobalUsings()
{
return Imports.FromGlobalUsings(this);
}
private AliasSymbol CreateGlobalNamespaceAlias()
{
return AliasSymbol.CreateGlobalNamespaceAlias(this.GlobalNamespace, new InContainerBinder(this.GlobalNamespace, new BuckStopsHereBinder(this)));
}
private void CompleteTree(SyntaxTree tree)
{
bool completedCompilationUnit = false;
bool completedCompilation = false;
if (_lazyCompilationUnitCompletedTrees == null) Interlocked.CompareExchange(ref _lazyCompilationUnitCompletedTrees, new HashSet<SyntaxTree>(), null);
lock (_lazyCompilationUnitCompletedTrees)
{
if (_lazyCompilationUnitCompletedTrees.Add(tree))
{
completedCompilationUnit = true;
if (_lazyCompilationUnitCompletedTrees.Count == this.SyntaxTrees.Length)
{
completedCompilation = true;
}
}
}
if (completedCompilationUnit)
{
EventQueue.Enqueue(new CompilationUnitCompletedEvent(this, tree));
}
if (completedCompilation)
{
EventQueue.Enqueue(new CompilationCompletedEvent(this));
EventQueue.Complete(); // signal the end of compilation events
}
}
internal void ReportUnusedImports(DiagnosticBag diagnostics, CancellationToken cancellationToken, SyntaxTree filterTree = null)
{
if (_lazyImportInfos != null)
{
foreach (ImportInfo info in _lazyImportInfos)
{
cancellationToken.ThrowIfCancellationRequested();
SyntaxTree infoTree = info.Tree;
if (filterTree == null || filterTree == infoTree)
{
TextSpan infoSpan = info.Span;
if (!this.IsImportDirectiveUsed(infoTree, infoSpan.Start))
{
ErrorCode code = info.Kind == SyntaxKind.ExternAliasDirective
? ErrorCode.HDN_UnusedExternAlias
: ErrorCode.HDN_UnusedUsingDirective;
diagnostics.Add(code, infoTree.GetLocation(infoSpan));
}
}
}
}
// By definition, a tree is complete when all of its compiler diagnostics have been reported.
// Since unused imports are the last thing we compute and report, a tree is complete when
// the unused imports have been reported.
if (EventQueue != null)
{
if (filterTree != null)
{
CompleteTree(filterTree);
}
else
{
foreach (var tree in this.SyntaxTrees)
{
CompleteTree(tree);
}
}
}
}
internal void RecordImport(UsingDirectiveSyntax syntax)
{
RecordImportInternal(syntax);
}
internal void RecordImport(ExternAliasDirectiveSyntax syntax)
{
RecordImportInternal(syntax);
}
private void RecordImportInternal(CSharpSyntaxNode syntax)
{
LazyInitializer.EnsureInitialized(ref _lazyImportInfos).
Add(new ImportInfo(syntax.SyntaxTree, syntax.Kind(), syntax.Span));
}
private struct ImportInfo : IEquatable<ImportInfo>
{
public readonly SyntaxTree Tree;
public readonly SyntaxKind Kind;
public readonly TextSpan Span;
public ImportInfo(SyntaxTree tree, SyntaxKind kind, TextSpan span)
{
this.Tree = tree;
this.Kind = kind;
this.Span = span;
}
public override bool Equals(object obj)
{
return (obj is ImportInfo) && Equals((ImportInfo)obj);
}
public bool Equals(ImportInfo other)
{
return
other.Kind == this.Kind &&
other.Tree == this.Tree &&
other.Span == this.Span;
}
public override int GetHashCode()
{
return Hash.Combine(Tree, Span.Start);
}
}
#endregion
#region Diagnostics
internal override CommonMessageProvider MessageProvider
{
get { return _syntaxAndDeclarations.MessageProvider; }
}
/// <summary>
/// The bag in which semantic analysis should deposit its diagnostics.
/// </summary>
internal DiagnosticBag DeclarationDiagnostics
{
get
{
// We should only be placing diagnostics in this bag until
// we are done gathering declaration diagnostics. Assert that is
// the case. But since we have bugs (see https://github.com/dotnet/roslyn/issues/846)
// we disable the assertion until they are fixed.
Debug.Assert(!_declarationDiagnosticsFrozen || true);
if (_lazyDeclarationDiagnostics == null)
{
var diagnostics = new DiagnosticBag();
Interlocked.CompareExchange(ref _lazyDeclarationDiagnostics, diagnostics, null);
}
return _lazyDeclarationDiagnostics;
}
}
private IEnumerable<Diagnostic> FreezeDeclarationDiagnostics()
{
_declarationDiagnosticsFrozen = true;
var result = _lazyDeclarationDiagnostics?.AsEnumerable() ?? Enumerable.Empty<Diagnostic>();
return result;
}
private DiagnosticBag _lazyDeclarationDiagnostics;
private bool _declarationDiagnosticsFrozen;
/// <summary>
/// A bag in which diagnostics that should be reported after code gen can be deposited.
/// </summary>
internal DiagnosticBag AdditionalCodegenWarnings
{
get
{
return _additionalCodegenWarnings;
}
}
private readonly DiagnosticBag _additionalCodegenWarnings = new DiagnosticBag();
internal DeclarationTable Declarations
{
get
{
return _syntaxAndDeclarations.GetLazyState().DeclarationTable;
}
}
/// <summary>
/// Gets the diagnostics produced during the parsing stage of a compilation. There are no diagnostics for declarations or accessor or
/// method bodies, for example.
/// </summary>
public override ImmutableArray<Diagnostic> GetParseDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Parse, false, cancellationToken);
}
/// <summary>
/// Gets the diagnostics produced during symbol declaration headers. There are no diagnostics for accessor or
/// method bodies, for example.
/// </summary>
public override ImmutableArray<Diagnostic> GetDeclarationDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Declare, false, cancellationToken);
}
/// <summary>
/// Gets the diagnostics produced during the analysis of method bodies and field initializers.
/// </summary>
public override ImmutableArray<Diagnostic> GetMethodBodyDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(CompilationStage.Compile, false, cancellationToken);
}
/// <summary>
/// Gets the all the diagnostics for the compilation, including syntax, declaration, and binding. Does not
/// include any diagnostics that might be produced during emit.
/// </summary>
public override ImmutableArray<Diagnostic> GetDiagnostics(CancellationToken cancellationToken = default(CancellationToken))
{
return GetDiagnostics(DefaultDiagnosticsStage, true, cancellationToken);
}
internal ImmutableArray<Diagnostic> GetDiagnostics(CompilationStage stage, bool includeEarlierStages, CancellationToken cancellationToken)
{
var builder = DiagnosticBag.GetInstance();
if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages))
{
var syntaxTrees = this.SyntaxTrees;
if (this.Options.ConcurrentBuild)
{
var parallelOptions = cancellationToken.CanBeCanceled
? new ParallelOptions() { CancellationToken = cancellationToken }
: DefaultParallelOptions;
Parallel.For(0, syntaxTrees.Length, parallelOptions,
UICultureUtilities.WithCurrentUICulture<int>(i =>
{
var syntaxTree = syntaxTrees[i];
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree);
builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken));
}));
}
else
{
foreach (var syntaxTree in syntaxTrees)
{
cancellationToken.ThrowIfCancellationRequested();
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree);
cancellationToken.ThrowIfCancellationRequested();
builder.AddRange(syntaxTree.GetDiagnostics(cancellationToken));
}
}
}
if (stage == CompilationStage.Declare || stage > CompilationStage.Declare && includeEarlierStages)
{
builder.AddRange(Options.Errors);
cancellationToken.ThrowIfCancellationRequested();
// the set of diagnostics related to establishing references.
builder.AddRange(GetBoundReferenceManager().Diagnostics);
cancellationToken.ThrowIfCancellationRequested();
builder.AddRange(GetSourceDeclarationDiagnostics(cancellationToken: cancellationToken));
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Compile || stage > CompilationStage.Compile && includeEarlierStages)
{
var methodBodyDiagnostics = DiagnosticBag.GetInstance();
GetDiagnosticsForAllMethodBodies(methodBodyDiagnostics, cancellationToken);
builder.AddRangeAndFree(methodBodyDiagnostics);
}
// Before returning diagnostics, we filter warnings
// to honor the compiler options (e.g., /nowarn, /warnaserror and /warn) and the pragmas.
var result = DiagnosticBag.GetInstance();
FilterAndAppendAndFreeDiagnostics(result, ref builder);
return result.ToReadOnlyAndFree<Diagnostic>();
}
private static void AppendLoadDirectiveDiagnostics(DiagnosticBag builder, SyntaxAndDeclarationManager syntaxAndDeclarations, SyntaxTree syntaxTree, Func<IEnumerable<Diagnostic>, IEnumerable<Diagnostic>> locationFilterOpt = null)
{
ImmutableArray<LoadDirective> loadDirectives;
if (syntaxAndDeclarations.GetLazyState().LoadDirectiveMap.TryGetValue(syntaxTree, out loadDirectives))
{
Debug.Assert(!loadDirectives.IsEmpty);
foreach (var directive in loadDirectives)
{
IEnumerable<Diagnostic> diagnostics = directive.Diagnostics;
if (locationFilterOpt != null)
{
diagnostics = locationFilterOpt(diagnostics);
}
builder.AddRange(diagnostics);
}
}
}
// Do the steps in compilation to get the method body diagnostics, but don't actually generate
// IL or emit an assembly.
private void GetDiagnosticsForAllMethodBodies(DiagnosticBag diagnostics, CancellationToken cancellationToken)
{
MethodCompiler.CompileMethodBodies(
compilation: this,
moduleBeingBuiltOpt: null,
generateDebugInfo: false,
hasDeclarationErrors: false,
diagnostics: diagnostics,
filterOpt: null,
cancellationToken: cancellationToken);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken);
this.ReportUnusedImports(diagnostics, cancellationToken);
}
private static bool IsDefinedOrImplementedInSourceTree(Symbol symbol, SyntaxTree tree, TextSpan? span)
{
if (symbol.IsDefinedInSourceTree(tree, span))
{
return true;
}
if (symbol.IsPartialDefinition())
{
MethodSymbol implementationPart = ((MethodSymbol)symbol).PartialImplementationPart;
if ((object)implementationPart != null)
{
return implementationPart.IsDefinedInSourceTree(tree, span);
}
}
if (symbol.Kind == SymbolKind.Method && symbol.IsImplicitlyDeclared && ((MethodSymbol)symbol).MethodKind == MethodKind.Constructor)
{
// Include implicitly declared constructor if containing type is included
return IsDefinedOrImplementedInSourceTree(symbol.ContainingType, tree, span);
}
return false;
}
private ImmutableArray<Diagnostic> GetDiagnosticsForMethodBodiesInTree(SyntaxTree tree, TextSpan? span, CancellationToken cancellationToken)
{
DiagnosticBag diagnostics = DiagnosticBag.GetInstance();
MethodCompiler.CompileMethodBodies(
compilation: this,
moduleBeingBuiltOpt: null,
generateDebugInfo: false,
hasDeclarationErrors: false,
diagnostics: diagnostics,
filterOpt: s => IsDefinedOrImplementedInSourceTree(s, tree, span),
cancellationToken: cancellationToken);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, null, null, diagnostics, cancellationToken, tree, span);
// Report unused directives only if computing diagnostics for the entire tree.
// Otherwise we cannot determine if a particular directive is used outside of the given sub-span within the tree.
if (!span.HasValue || span.Value == tree.GetRoot(cancellationToken).FullSpan)
{
ReportUnusedImports(diagnostics, cancellationToken, tree);
}
return diagnostics.ToReadOnlyAndFree();
}
/// <summary>
/// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives.
/// 'incoming' is freed.
/// </summary>
/// <returns>True when there is no error or warning treated as an error.</returns>
internal override bool FilterAndAppendAndFreeDiagnostics(DiagnosticBag accumulator, ref DiagnosticBag incoming)
{
bool result = FilterAndAppendDiagnostics(accumulator, incoming.AsEnumerableWithoutResolution());
incoming.Free();
incoming = null;
return result;
}
/// <summary>
/// Filter out warnings based on the compiler options (/nowarn, /warn and /warnaserror) and the pragma warning directives.
/// </summary>
/// <returns>True when there is no error.</returns>
private bool FilterAndAppendDiagnostics(DiagnosticBag accumulator, IEnumerable<Diagnostic> incoming)
{
bool hasError = false;
bool reportSuppressedDiagnostics = Options.ReportSuppressedDiagnostics;
foreach (Diagnostic d in incoming)
{
var filtered = _options.FilterDiagnostic(d);
if (filtered == null ||
(!reportSuppressedDiagnostics && filtered.IsSuppressed))
{
continue;
}
else if (filtered.Severity == DiagnosticSeverity.Error)
{
hasError = true;
}
accumulator.Add(filtered);
}
return !hasError;
}
private ImmutableArray<Diagnostic> GetSourceDeclarationDiagnostics(SyntaxTree syntaxTree = null, TextSpan? filterSpanWithinTree = null, Func<IEnumerable<Diagnostic>, SyntaxTree, TextSpan?, IEnumerable<Diagnostic>> locationFilterOpt = null, CancellationToken cancellationToken = default(CancellationToken))
{
// global imports diagnostics (specified via compilation options):
GlobalImports.Complete(cancellationToken);
SourceLocation location = null;
if (syntaxTree != null)
{
var root = syntaxTree.GetRoot(cancellationToken);
location = filterSpanWithinTree.HasValue ?
new SourceLocation(syntaxTree, filterSpanWithinTree.Value) :
new SourceLocation(root);
}
Assembly.ForceComplete(location, cancellationToken);
var result = this.FreezeDeclarationDiagnostics();
if (locationFilterOpt != null)
{
Debug.Assert(syntaxTree != null);
result = locationFilterOpt(result, syntaxTree, filterSpanWithinTree);
}
// NOTE: Concatenate the CLS diagnostics *after* filtering by tree/span, because they're already filtered.
ImmutableArray<Diagnostic> clsDiagnostics = GetClsComplianceDiagnostics(syntaxTree, filterSpanWithinTree, cancellationToken);
return result.AsImmutable().Concat(clsDiagnostics);
}
private ImmutableArray<Diagnostic> GetClsComplianceDiagnostics(SyntaxTree syntaxTree, TextSpan? filterSpanWithinTree, CancellationToken cancellationToken)
{
if (syntaxTree != null)
{
var builder = DiagnosticBag.GetInstance();
ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken, syntaxTree, filterSpanWithinTree);
return builder.ToReadOnlyAndFree();
}
if (_lazyClsComplianceDiagnostics.IsDefault)
{
var builder = DiagnosticBag.GetInstance();
ClsComplianceChecker.CheckCompliance(this, builder, cancellationToken);
ImmutableInterlocked.InterlockedInitialize(ref _lazyClsComplianceDiagnostics, builder.ToReadOnlyAndFree());
}
Debug.Assert(!_lazyClsComplianceDiagnostics.IsDefault);
return _lazyClsComplianceDiagnostics;
}
private static IEnumerable<Diagnostic> FilterDiagnosticsByLocation(IEnumerable<Diagnostic> diagnostics, SyntaxTree tree, TextSpan? filterSpanWithinTree)
{
foreach (var diagnostic in diagnostics)
{
if (diagnostic.ContainsLocation(tree, filterSpanWithinTree))
{
yield return diagnostic;
}
}
}
internal ImmutableArray<Diagnostic> GetDiagnosticsForSyntaxTree(
CompilationStage stage,
SyntaxTree syntaxTree,
TextSpan? filterSpanWithinTree,
bool includeEarlierStages,
CancellationToken cancellationToken = default(CancellationToken))
{
cancellationToken.ThrowIfCancellationRequested();
var builder = DiagnosticBag.GetInstance();
if (stage == CompilationStage.Parse || (stage > CompilationStage.Parse && includeEarlierStages))
{
AppendLoadDirectiveDiagnostics(builder, _syntaxAndDeclarations, syntaxTree,
diagnostics => FilterDiagnosticsByLocation(diagnostics, syntaxTree, filterSpanWithinTree));
var syntaxDiagnostics = syntaxTree.GetDiagnostics();
syntaxDiagnostics = FilterDiagnosticsByLocation(syntaxDiagnostics, syntaxTree, filterSpanWithinTree);
builder.AddRange(syntaxDiagnostics);
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Declare || (stage > CompilationStage.Declare && includeEarlierStages))
{
var declarationDiagnostics = GetSourceDeclarationDiagnostics(syntaxTree, filterSpanWithinTree, FilterDiagnosticsByLocation, cancellationToken);
Debug.Assert(declarationDiagnostics.All(d => d.ContainsLocation(syntaxTree, filterSpanWithinTree)));
builder.AddRange(declarationDiagnostics);
}
cancellationToken.ThrowIfCancellationRequested();
if (stage == CompilationStage.Compile || (stage > CompilationStage.Compile && includeEarlierStages))
{
//remove some errors that don't have locations in the tree, like "no suitable main method."
//Members in trees other than the one being examined are not compiled. This includes field
//initializers which can result in 'field is never initialized' warnings for fields in partial
//types when the field is in a different source file than the one for which we're getting diagnostics.
//For that reason the bag must be also filtered by tree.
IEnumerable<Diagnostic> methodBodyDiagnostics = GetDiagnosticsForMethodBodiesInTree(syntaxTree, filterSpanWithinTree, cancellationToken);
// TODO: Enable the below commented assert and remove the filtering code in the next line.
// GetDiagnosticsForMethodBodiesInTree seems to be returning diagnostics with locations that don't satisfy the filter tree/span, this must be fixed.
// Debug.Assert(methodBodyDiagnostics.All(d => DiagnosticContainsLocation(d, syntaxTree, filterSpanWithinTree)));
methodBodyDiagnostics = FilterDiagnosticsByLocation(methodBodyDiagnostics, syntaxTree, filterSpanWithinTree);
builder.AddRange(methodBodyDiagnostics);
}
// Before returning diagnostics, we filter warnings
// to honor the compiler options (/nowarn, /warnaserror and /warn) and the pragmas.
var result = DiagnosticBag.GetInstance();
FilterAndAppendAndFreeDiagnostics(result, ref builder);
return result.ToReadOnlyAndFree<Diagnostic>();
}
#endregion
#region Resources
protected override void AppendDefaultVersionResource(Stream resourceStream)
{
var sourceAssembly = SourceAssembly;
string fileVersion = sourceAssembly.FileVersion ?? sourceAssembly.Identity.Version.ToString();
Win32ResourceConversions.AppendVersionToResourceStream(resourceStream,
!this.Options.OutputKind.IsApplication(),
fileVersion: fileVersion,
originalFileName: this.SourceModule.Name,
internalName: this.SourceModule.Name,
productVersion: sourceAssembly.InformationalVersion ?? fileVersion,
fileDescription: sourceAssembly.Title ?? " ", //alink would give this a blank if nothing was supplied.
assemblyVersion: sourceAssembly.Identity.Version,
legalCopyright: sourceAssembly.Copyright ?? " ", //alink would give this a blank if nothing was supplied.
legalTrademarks: sourceAssembly.Trademark,
productName: sourceAssembly.Product,
comments: sourceAssembly.Description,
companyName: sourceAssembly.Company);
}
#endregion
#region Emit
internal override byte LinkerMajorVersion => 0x30;
internal override bool IsDelaySigned
{
get { return SourceAssembly.IsDelaySigned; }
}
internal override StrongNameKeys StrongNameKeys
{
get { return SourceAssembly.StrongNameKeys; }
}
internal override CommonPEModuleBuilder CreateModuleBuilder(
EmitOptions emitOptions,
IMethodSymbol debugEntryPoint,
IEnumerable<ResourceDescription> manifestResources,
CompilationTestData testData,
DiagnosticBag diagnostics,
CancellationToken cancellationToken)
{
Debug.Assert(!IsSubmission || HasCodeToEmit());
string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions, diagnostics);
if (runtimeMDVersion == null)
{
return null;
}
var moduleProps = ConstructModuleSerializationProperties(emitOptions, runtimeMDVersion);
if (manifestResources == null)
{
manifestResources = SpecializedCollections.EmptyEnumerable<ResourceDescription>();
}
PEModuleBuilder moduleBeingBuilt;
if (_options.OutputKind.IsNetModule())
{
moduleBeingBuilt = new PENetModuleBuilder(
(SourceModuleSymbol)SourceModule,
emitOptions,
moduleProps,
manifestResources);
}
else
{
var kind = _options.OutputKind.IsValid() ? _options.OutputKind : OutputKind.DynamicallyLinkedLibrary;
moduleBeingBuilt = new PEAssemblyBuilder(
SourceAssembly,
emitOptions,
kind,
moduleProps,
manifestResources);
}
if (debugEntryPoint != null)
{
moduleBeingBuilt.SetDebugEntryPoint((MethodSymbol)debugEntryPoint, diagnostics);
}
// testData is only passed when running tests.
if (testData != null)
{
moduleBeingBuilt.SetMethodTestData(testData.Methods);
testData.Module = moduleBeingBuilt;
}
return moduleBeingBuilt;
}
internal override bool CompileImpl(
CommonPEModuleBuilder moduleBuilder,
Stream win32Resources,
Stream xmlDocStream,
bool emittingPdb,
DiagnosticBag diagnostics,
Predicate<ISymbol> filterOpt,
CancellationToken cancellationToken)
{
// The diagnostics should include syntax and declaration errors. We insert these before calling Emitter.Emit, so that the emitter
// does not attempt to emit if there are declaration errors (but we do insert all errors from method body binding...)
bool hasDeclarationErrors = !FilterAndAppendDiagnostics(diagnostics, GetDiagnostics(CompilationStage.Declare, true, cancellationToken));
// TODO (tomat): NoPIA:
// EmbeddedSymbolManager.MarkAllDeferredSymbolsAsReferenced(this)
var moduleBeingBuilt = (PEModuleBuilder)moduleBuilder;
if (moduleBeingBuilt.EmitOptions.EmitMetadataOnly)
{
if (hasDeclarationErrors)
{
return false;
}
if (moduleBeingBuilt.SourceModule.HasBadAttributes)
{
// If there were errors but no declaration diagnostics, explicitly add a "Failed to emit module" error.
diagnostics.Add(ErrorCode.ERR_ModuleEmitFailure, NoLocation.Singleton, ((Cci.INamedEntity)moduleBeingBuilt).Name);
return false;
}
SynthesizedMetadataCompiler.ProcessSynthesizedMembers(this, moduleBeingBuilt, cancellationToken);
}
else
{
if (emittingPdb && !StartSourceChecksumCalculation(moduleBeingBuilt, diagnostics))
{
return false;
}
// Perform initial bind of method bodies in spite of earlier errors. This is the same
// behavior as when calling GetDiagnostics()
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag methodBodyDiagnosticBag = DiagnosticBag.GetInstance();
MethodCompiler.CompileMethodBodies(
this,
moduleBeingBuilt,
emittingPdb,
hasDeclarationErrors,
diagnostics: methodBodyDiagnosticBag,
filterOpt: filterOpt,
cancellationToken: cancellationToken);
SetupWin32Resources(moduleBeingBuilt, win32Resources, methodBodyDiagnosticBag);
ReportManifestResourceDuplicates(
moduleBeingBuilt.ManifestResources,
SourceAssembly.Modules.Skip(1).Select((m) => m.Name), //all modules except the first one
AddedModulesResourceNames(methodBodyDiagnosticBag),
methodBodyDiagnosticBag);
bool hasMethodBodyErrorOrWarningAsError = !FilterAndAppendAndFreeDiagnostics(diagnostics, ref methodBodyDiagnosticBag);
if (hasDeclarationErrors || hasMethodBodyErrorOrWarningAsError)
{
return false;
}
}
cancellationToken.ThrowIfCancellationRequested();
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag xmlDiagnostics = DiagnosticBag.GetInstance();
string assemblyName = FileNameUtilities.ChangeExtension(moduleBeingBuilt.EmitOptions.OutputNameOverride, extension: null);
DocumentationCommentCompiler.WriteDocumentationCommentXml(this, assemblyName, xmlDocStream, xmlDiagnostics, cancellationToken);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref xmlDiagnostics))
{
return false;
}
// Use a temporary bag so we don't have to refilter pre-existing diagnostics.
DiagnosticBag importDiagnostics = DiagnosticBag.GetInstance();
this.ReportUnusedImports(importDiagnostics, cancellationToken);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref importDiagnostics))
{
Debug.Assert(false, "Should never produce an error");
return false;
}
return true;
}
// TODO: consider unifying with VB
private bool StartSourceChecksumCalculation(PEModuleBuilder moduleBeingBuilt, DiagnosticBag diagnostics)
{
var syntaxTrees = this.SyntaxTrees;
// Check that all syntax trees are debuggable:
bool allTreesDebuggable = true;
foreach (var tree in syntaxTrees)
{
if (!string.IsNullOrEmpty(tree.FilePath) && tree.GetText().Encoding == null)
{
diagnostics.Add(ErrorCode.ERR_EncodinglessSyntaxTree, tree.GetRoot().GetLocation());
allTreesDebuggable = false;
}
}
if (!allTreesDebuggable)
{
return false;
}
// Add debug documents for all trees with distinct paths.
foreach (var tree in syntaxTrees)
{
if (!string.IsNullOrEmpty(tree.FilePath))
{
// compilation does not guarantee that all trees will have distinct paths.
// Do not attempt adding a document for a particular path if we already added one.
string normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(tree.FilePath, basePath: null);
var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath);
if (existingDoc == null)
{
moduleBeingBuilt.AddDebugDocument(MakeDebugSourceDocumentForTree(normalizedPath, tree));
}
}
}
// Add debug documents for all pragmas.
// If there are clashes with already processed directives, report warnings.
// If there are clashes with debug documents that came from actual trees, ignore the pragma.
foreach (var tree in syntaxTrees)
{
AddDebugSourceDocumentsForChecksumDirectives(moduleBeingBuilt, tree, diagnostics);
}
return true;
}
private IEnumerable<string> AddedModulesResourceNames(DiagnosticBag diagnostics)
{
ImmutableArray<ModuleSymbol> modules = SourceAssembly.Modules;
for (int i = 1; i < modules.Length; i++)
{
var m = (Symbols.Metadata.PE.PEModuleSymbol)modules[i];
ImmutableArray<EmbeddedResource> resources;
try
{
resources = m.Module.GetEmbeddedResourcesOrThrow();
}
catch (BadImageFormatException)
{
diagnostics.Add(new CSDiagnosticInfo(ErrorCode.ERR_BindToBogus, m), NoLocation.Singleton);
continue;
}
foreach (var resource in resources)
{
yield return resource.Name;
}
}
}
internal override EmitDifferenceResult EmitDifference(
EmitBaseline baseline,
IEnumerable<SemanticEdit> edits,
Func<ISymbol, bool> isAddedSymbol,
Stream metadataStream,
Stream ilStream,
Stream pdbStream,
ICollection<MethodDefinitionHandle> updatedMethods,
CompilationTestData testData,
CancellationToken cancellationToken)
{
return EmitHelpers.EmitDifference(
this,
baseline,
edits,
isAddedSymbol,
metadataStream,
ilStream,
pdbStream,
updatedMethods,
testData,
cancellationToken);
}
internal string GetRuntimeMetadataVersion(EmitOptions emitOptions, DiagnosticBag diagnostics)
{
string runtimeMDVersion = GetRuntimeMetadataVersion(emitOptions);
if (runtimeMDVersion != null)
{
return runtimeMDVersion;
}
DiagnosticBag runtimeMDVersionDiagnostics = DiagnosticBag.GetInstance();
runtimeMDVersionDiagnostics.Add(ErrorCode.WRN_NoRuntimeMetadataVersion, NoLocation.Singleton);
if (!FilterAndAppendAndFreeDiagnostics(diagnostics, ref runtimeMDVersionDiagnostics))
{
return null;
}
return string.Empty; //prevent emitter from crashing.
}
private string GetRuntimeMetadataVersion(EmitOptions emitOptions)
{
var corAssembly = Assembly.CorLibrary as Symbols.Metadata.PE.PEAssemblySymbol;
if ((object)corAssembly != null)
{
return corAssembly.Assembly.ManifestModule.MetadataVersion;
}
return emitOptions.RuntimeMetadataVersion;
}
private static void AddDebugSourceDocumentsForChecksumDirectives(
PEModuleBuilder moduleBeingBuilt,
SyntaxTree tree,
DiagnosticBag diagnostics)
{
var checksumDirectives = tree.GetRoot().GetDirectives(d => d.Kind() == SyntaxKind.PragmaChecksumDirectiveTrivia &&
!d.ContainsDiagnostics);
foreach (var directive in checksumDirectives)
{
var checksumDirective = (PragmaChecksumDirectiveTriviaSyntax)directive;
var path = checksumDirective.File.ValueText;
var checksumText = checksumDirective.Bytes.ValueText;
var normalizedPath = moduleBeingBuilt.NormalizeDebugDocumentPath(path, basePath: tree.FilePath);
var existingDoc = moduleBeingBuilt.TryGetDebugDocumentForNormalizedPath(normalizedPath);
// duplicate checksum pragmas are valid as long as values match
// if we have seen this document already, check for matching values.
if (existingDoc != null)
{
// pragma matches a file path on an actual tree.
// Dev12 compiler just ignores the pragma in this case which means that
// checksum of the actual tree always wins and no warning is given.
// We will continue doing the same.
if (existingDoc.IsComputedChecksum)
{
continue;
}
var checksumAndAlgorithm = existingDoc.ChecksumAndAlgorithm;
if (ChecksumMatches(checksumText, checksumAndAlgorithm.Item1))
{
var guid = Guid.Parse(checksumDirective.Guid.ValueText);
if (guid == checksumAndAlgorithm.Item2)
{
// all parts match, nothing to do
continue;
}
}
// did not match to an existing document
// produce a warning and ignore the pragma
diagnostics.Add(ErrorCode.WRN_ConflictingChecksum, new SourceLocation(checksumDirective), path);
}
else
{
var newDocument = new Cci.DebugSourceDocument(
normalizedPath,
Cci.DebugSourceDocument.CorSymLanguageTypeCSharp,
MakeChecksumBytes(checksumDirective.Bytes.ValueText),
Guid.Parse(checksumDirective.Guid.ValueText));
moduleBeingBuilt.AddDebugDocument(newDocument);
}
}
}
private static bool ChecksumMatches(string bytesText, ImmutableArray<byte> bytes)
{
if (bytesText.Length != bytes.Length * 2)
{
return false;
}
for (int i = 0, len = bytesText.Length / 2; i < len; i++)
{
// 1A in text becomes 0x1A
var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 +
SyntaxFacts.HexValue(bytesText[i * 2 + 1]);
if (b != bytes[i])
{
return false;
}
}
return true;
}
private static ImmutableArray<byte> MakeChecksumBytes(string bytesText)
{
int length = bytesText.Length / 2;
var builder = ArrayBuilder<byte>.GetInstance(length);
for (int i = 0; i < length; i++)
{
// 1A in text becomes 0x1A
var b = SyntaxFacts.HexValue(bytesText[i * 2]) * 16 +
SyntaxFacts.HexValue(bytesText[i * 2 + 1]);
builder.Add((byte)b);
}
return builder.ToImmutableAndFree();
}
private static Cci.DebugSourceDocument MakeDebugSourceDocumentForTree(string normalizedPath, SyntaxTree tree)
{
return new Cci.DebugSourceDocument(normalizedPath, Cci.DebugSourceDocument.CorSymLanguageTypeCSharp, () => tree.GetChecksumAndAlgorithm());
}
private void SetupWin32Resources(PEModuleBuilder moduleBeingBuilt, Stream win32Resources, DiagnosticBag diagnostics)
{
if (win32Resources == null)
return;
switch (DetectWin32ResourceForm(win32Resources))
{
case Win32ResourceForm.COFF:
moduleBeingBuilt.Win32ResourceSection = MakeWin32ResourcesFromCOFF(win32Resources, diagnostics);
break;
case Win32ResourceForm.RES:
moduleBeingBuilt.Win32Resources = MakeWin32ResourceList(win32Resources, diagnostics);
break;
default:
diagnostics.Add(ErrorCode.ERR_BadWin32Res, NoLocation.Singleton, "Unrecognized file format.");
break;
}
}
internal override bool HasCodeToEmit()
{
foreach (var syntaxTree in this.SyntaxTrees)
{
var unit = syntaxTree.GetCompilationUnitRoot();
if (unit.Members.Count > 0)
{
return true;
}
}
return false;
}
#endregion
#region Common Members
protected override Compilation CommonWithReferences(IEnumerable<MetadataReference> newReferences)
{
return WithReferences(newReferences);
}
protected override Compilation CommonWithAssemblyName(string assemblyName)
{
return WithAssemblyName(assemblyName);
}
protected override ITypeSymbol CommonGetSubmissionResultType(out bool hasValue)
{
return GetSubmissionResultType(out hasValue);
}
protected override IAssemblySymbol CommonAssembly
{
get { return this.Assembly; }
}
protected override INamespaceSymbol CommonGlobalNamespace
{
get { return this.GlobalNamespace; }
}
protected override CompilationOptions CommonOptions
{
get { return _options; }
}
protected override Compilation CommonPreviousSubmission
{
get { return _previousSubmission; }
}
protected override SemanticModel CommonGetSemanticModel(SyntaxTree syntaxTree, bool ignoreAccessibility)
{
return this.GetSemanticModel((SyntaxTree)syntaxTree, ignoreAccessibility);
}
protected override IEnumerable<SyntaxTree> CommonSyntaxTrees
{
get
{
return this.SyntaxTrees;
}
}
protected override Compilation CommonAddSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
return this.AddSyntaxTrees(trees);
}
protected override Compilation CommonRemoveSyntaxTrees(IEnumerable<SyntaxTree> trees)
{
return this.RemoveSyntaxTrees(trees);
}
protected override Compilation CommonRemoveAllSyntaxTrees()
{
return this.RemoveAllSyntaxTrees();
}
protected override Compilation CommonReplaceSyntaxTree(SyntaxTree oldTree, SyntaxTree newTree)
{
return this.ReplaceSyntaxTree((SyntaxTree)oldTree, (SyntaxTree)newTree);
}
protected override Compilation CommonWithOptions(CompilationOptions options)
{
return this.WithOptions((CSharpCompilationOptions)options);
}
protected override Compilation CommonWithPreviousSubmission(Compilation newPreviousSubmission)
{
return this.WithPreviousSubmission((CSharpCompilation)newPreviousSubmission);
}
protected override bool CommonContainsSyntaxTree(SyntaxTree syntaxTree)
{
return this.ContainsSyntaxTree((SyntaxTree)syntaxTree);
}
protected override ISymbol CommonGetAssemblyOrModuleSymbol(MetadataReference reference)
{
return this.GetAssemblyOrModuleSymbol(reference);
}
protected override Compilation CommonClone()
{
return this.Clone();
}
protected override IModuleSymbol CommonSourceModule
{
get { return this.SourceModule; }
}
protected override INamedTypeSymbol CommonGetSpecialType(SpecialType specialType)
{
return this.GetSpecialType(specialType);
}
protected override INamespaceSymbol CommonGetCompilationNamespace(INamespaceSymbol namespaceSymbol)
{
return this.GetCompilationNamespace(namespaceSymbol);
}
protected override INamedTypeSymbol CommonGetTypeByMetadataName(string metadataName)
{
return this.GetTypeByMetadataName(metadataName);
}
protected override INamedTypeSymbol CommonScriptClass
{
get { return this.ScriptClass; }
}
protected override IArrayTypeSymbol CommonCreateArrayTypeSymbol(ITypeSymbol elementType, int rank)
{
return CreateArrayTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType"), rank);
}
protected override IPointerTypeSymbol CommonCreatePointerTypeSymbol(ITypeSymbol elementType)
{
return CreatePointerTypeSymbol(elementType.EnsureCSharpSymbolOrNull<ITypeSymbol, TypeSymbol>("elementType"));
}
protected override ITypeSymbol CommonDynamicType
{
get { return DynamicType; }
}
protected override INamedTypeSymbol CommonObjectType
{
get { return this.ObjectType; }
}
protected override IMethodSymbol CommonGetEntryPoint(CancellationToken cancellationToken)
{
return this.GetEntryPoint(cancellationToken);
}
internal override int CompareSourceLocations(Location loc1, Location loc2)
{
Debug.Assert(loc1.IsInSource);
Debug.Assert(loc2.IsInSource);
var comparison = CompareSyntaxTreeOrdering(loc1.SourceTree, loc2.SourceTree);
if (comparison != 0)
{
return comparison;
}
return loc1.SourceSpan.Start - loc2.SourceSpan.Start;
}
/// <summary>
/// Return true if there is a source declaration symbol name that meets given predicate.
/// </summary>
public override bool ContainsSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken))
{
if (predicate == null)
{
throw new ArgumentNullException(nameof(predicate));
}
if (filter == SymbolFilter.None)
{
throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter));
}
return this.Declarations.ContainsName(predicate, filter, cancellationToken);
}
/// <summary>
/// Return source declaration symbols whose name meets given predicate.
/// </summary>
public override IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter = SymbolFilter.TypeAndMember, CancellationToken cancellationToken = default(CancellationToken))
{
if (predicate == null)
{
throw new ArgumentNullException(nameof(predicate));
}
if (filter == SymbolFilter.None)
{
throw new ArgumentException(CSharpResources.NoNoneSearchCriteria, nameof(filter));
}
return new SymbolSearcher(this).GetSymbolsWithName(predicate, filter, cancellationToken);
}
#endregion
/// <summary>
/// Returns if the compilation has all of the members necessary to emit metadata about
/// dynamic types.
/// </summary>
/// <returns></returns>
internal bool HasDynamicEmitAttributes()
{
return
(object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctor) != null &&
(object)GetWellKnownTypeMember(WellKnownMember.System_Runtime_CompilerServices_DynamicAttribute__ctorTransformFlags) != null;
}
internal override AnalyzerDriver AnalyzerForLanguage(ImmutableArray<DiagnosticAnalyzer> analyzers, AnalyzerManager analyzerManager)
{
return new AnalyzerDriver<SyntaxKind>(analyzers, n => n.Kind(), analyzerManager);
}
internal void SymbolDeclaredEvent(Symbol symbol)
{
if (EventQueue != null) EventQueue.Enqueue(new SymbolDeclaredCompilationEvent(this, symbol));
}
/// <summary>
/// Determine if enum arrays can be initialized using block initialization.
/// </summary>
/// <returns>True if it's safe to use block initialization for enum arrays.</returns>
/// <remarks>
/// In NetFx 4.0, block array initializers do not work on all combinations of {32/64 X Debug/Retail} when array elements are enums.
/// This is fixed in 4.5 thus enabling block array initialization for a very common case.
/// We look for the presence of <see cref="System.Runtime.GCLatencyMode.SustainedLowLatency"/> which was introduced in .Net 4.5
/// </remarks>
internal bool EnableEnumArrayBlockInitialization
{
get
{
var sustainedLowLatency = GetWellKnownTypeMember(WellKnownMember.System_Runtime_GCLatencyMode__SustainedLowLatency);
return sustainedLowLatency != null && sustainedLowLatency.ContainingAssembly == Assembly.CorLibrary;
}
}
private class SymbolSearcher
{
private readonly Dictionary<Declaration, NamespaceOrTypeSymbol> _cache;
private readonly CSharpCompilation _compilation;
public SymbolSearcher(CSharpCompilation compilation)
{
_cache = new Dictionary<Declaration, NamespaceOrTypeSymbol>();
_compilation = compilation;
}
public IEnumerable<ISymbol> GetSymbolsWithName(Func<string, bool> predicate, SymbolFilter filter, CancellationToken cancellationToken)
{
var result = new HashSet<ISymbol>();
var spine = new List<MergedNamespaceOrTypeDeclaration>();
AppendSymbolsWithName(spine, _compilation.Declarations.MergedRoot, predicate, filter, result, cancellationToken);
return result;
}
private void AppendSymbolsWithName(
List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current,
Func<string, bool> predicate, SymbolFilter filter, HashSet<ISymbol> set, CancellationToken cancellationToken)
{
var includeNamespace = (filter & SymbolFilter.Namespace) == SymbolFilter.Namespace;
var includeType = (filter & SymbolFilter.Type) == SymbolFilter.Type;
var includeMember = (filter & SymbolFilter.Member) == SymbolFilter.Member;
if (current.Kind == DeclarationKind.Namespace)
{
if (includeNamespace && predicate(current.Name))
{
var container = GetSpineSymbol(spine);
set.Add(GetSymbol(container, current));
}
}
else
{
if (includeType && predicate(current.Name))
{
var container = GetSpineSymbol(spine);
set.Add(GetSymbol(container, current));
}
if (includeMember)
{
AppendMemberSymbolsWithName(spine, current, predicate, set, cancellationToken);
}
}
spine.Add(current);
foreach (var child in current.Children.OfType<MergedNamespaceOrTypeDeclaration>())
{
if (includeMember || includeType)
{
AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken);
continue;
}
if (child.Kind == DeclarationKind.Namespace)
{
AppendSymbolsWithName(spine, child, predicate, filter, set, cancellationToken);
}
}
// pop last one
spine.RemoveAt(spine.Count - 1);
}
private void AppendMemberSymbolsWithName(
List<MergedNamespaceOrTypeDeclaration> spine, MergedNamespaceOrTypeDeclaration current,
Func<string, bool> predicate, HashSet<ISymbol> set, CancellationToken cancellationToken)
{
spine.Add(current);
var container = GetSpineSymbol(spine);
foreach (var member in container.GetMembers())
{
if (!member.IsTypeOrTypeAlias() &&
(member.CanBeReferencedByName || member.IsExplicitInterfaceImplementation() || member.IsIndexer()) &&
predicate(member.Name))
{
set.Add(member);
}
}
spine.RemoveAt(spine.Count - 1);
}
private NamespaceOrTypeSymbol GetSpineSymbol(List<MergedNamespaceOrTypeDeclaration> spine)
{
if (spine.Count == 0)
{
return null;
}
var symbol = GetCachedSymbol(spine[spine.Count - 1]);
if (symbol != null)
{
return symbol;
}
var current = _compilation.GlobalNamespace as NamespaceOrTypeSymbol;
for (var i = 1; i < spine.Count; i++)
{
current = GetSymbol(current, spine[i]);
}
return current;
}
private NamespaceOrTypeSymbol GetCachedSymbol(MergedNamespaceOrTypeDeclaration declaration)
{
NamespaceOrTypeSymbol symbol;
if (_cache.TryGetValue(declaration, out symbol))
{
return symbol;
}
return null;
}
private NamespaceOrTypeSymbol GetSymbol(NamespaceOrTypeSymbol container, MergedNamespaceOrTypeDeclaration declaration)
{
if (container == null)
{
return _compilation.GlobalNamespace;
}
if (declaration.Kind == DeclarationKind.Namespace)
{
AddCache(container.GetMembers(declaration.Name).OfType<NamespaceOrTypeSymbol>());
}
else
{
AddCache(container.GetTypeMembers(declaration.Name));
}
return GetCachedSymbol(declaration);
}
private void AddCache(IEnumerable<NamespaceOrTypeSymbol> symbols)
{
foreach (var symbol in symbols)
{
var mergedNamespace = symbol as MergedNamespaceSymbol;
if (mergedNamespace != null)
{
_cache[mergedNamespace.ConstituentNamespaces.OfType<SourceNamespaceSymbol>().First().MergedDeclaration] = symbol;
continue;
}
var sourceNamespace = symbol as SourceNamespaceSymbol;
if (sourceNamespace != null)
{
_cache[sourceNamespace.MergedDeclaration] = sourceNamespace;
continue;
}
var sourceType = symbol as SourceMemberContainerTypeSymbol;
if (sourceType != null)
{
_cache[sourceType.MergedDeclaration] = sourceType;
}
}
}
}
}
}
| Java |
/*******************************************************************************
* Copyright 2015 Software Evolution and Architecture Lab, University of Zurich
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package CloudWave;
public enum CloudWaveJNI {
instance;
public static final String CLOUDWAVE_LIB = "cloudwavejni";
CloudWaveJNI() {System.loadLibrary(CLOUDWAVE_LIB);}
public static CloudWaveJNI getInstance(){return instance;}
public void init() throws CloudWaveException{
int r = initJNI();
if (r<0) {
System.err.println("initJNI returned " + r);
throw new CloudWaveException();
}
}
public void free(){
freeJNI();
}
protected IEventHandler eventHandler;
public IEventHandler getEventHandler() {
return eventHandler;
}
public void setEventHandler(IEventHandler eh) {
synchronized(this){ eventHandler = eh;}
}
public void doEvent(String event){
synchronized(this) {
if (eventHandler!=null)
eventHandler.doEvent(event);
}
}
protected synchronized static void callback(String event){
instance.doEvent(event);
}
//#: Init/Free
public native int initJNI();
protected native int freeJNI();
//:#
//#: Log
protected native int initLog();
protected native int freeLog();
protected native int setLogId(String id);
protected native String getLogId();
protected native int recordLog(int level, String message);
protected native int recordLogL(int level, String message, long id);
//:#
//#: Metric
protected native int initMetric();
protected native int freeMetric();
protected native int recordMetricL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordMetricD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordMetricS(int source, String name, String mdata, String munit, int type, String value);
protected native int recordEventL(int source, String name, String mdata, String munit, int type, long value);
protected native int recordEventD(int source, String name, String mdata, String munit, int type, double value);
protected native int recordEventS(int source, String name, String mdata, String munit, int type, String value);
//:#
//#: Events
protected native int initEvent();
protected native int freeEvent();
protected native int postEvent(String event_json);
protected native long subscribe(String event_id);
protected native int unsubscribe(long id);
//:#
}
| Java |
const NamingMixin = {
_name: null,
getName() {
return this._name;
},
_shortName: null,
getShortName() {
return this._shortName || this.getName();
},
_abbreviation: null,
getAbbreviation() {
return this._abbreviation || this.getShortName();
},
};
export default NamingMixin;
| Java |
from mainapp import create_app
app = create_app()
if __name__ == '__main__':
app.run(host='0.0.0.0')
| Java |
/*
* Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.identity.entitlement.filter.callback;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.identity.entitlement.filter.exception.EntitlementFilterException;
import javax.servlet.http.HttpServletRequest;
public class BasicAuthCallBackHandler extends EntitlementFilterCallBackHandler {
private static final Log log = LogFactory.getLog(BasicAuthCallBackHandler.class);
public BasicAuthCallBackHandler(HttpServletRequest request) throws EntitlementFilterException {
String authHeaderEn = null;
if (!(request.getHeader("Authorization") == null || request.getHeader("Authorization").equals("null"))) {
authHeaderEn = request.getHeader("Authorization");
String tempArr[] = authHeaderEn.split(" ");
if (tempArr.length == 2) {
String authHeaderDc = new String(Base64.decodeBase64(tempArr[1].getBytes()));
tempArr = authHeaderDc.split(":");
if (tempArr.length == 2) {
setUserName(tempArr[0]);
}
}
throw new EntitlementFilterException("Unable to retrieve username from Authorization header");
}
}
}
| Java |
package main
import (
"github.com/ActiveState/tail"
"github.com/ugorji/go/codec"
"io/ioutil"
"log"
"os"
"reflect"
"regexp"
"strconv"
"strings"
"time"
)
type inputTail struct {
path string
format string
tag string
pos_file string
offset int64
sync_interval int
codec *codec.JsonHandle
time_key string
}
func (self *inputTail) Init(f map[string]string) error {
self.sync_interval = 2
value := f["path"]
if len(value) > 0 {
self.path = value
}
value = f["format"]
if len(value) > 0 {
self.format = value
if value == "json" {
_codec := codec.JsonHandle{}
_codec.MapType = reflect.TypeOf(map[string]interface{}(nil))
self.codec = &_codec
value = f["time_key"]
if len(value) > 0 {
self.time_key = value
} else {
self.time_key = "time"
}
}
}
value = f["tag"]
if len(value) > 0 {
self.tag = value
}
value = f["pos_file"]
if len(value) > 0 {
self.pos_file = value
str, err := ioutil.ReadFile(self.pos_file)
if err != nil {
log.Println("ioutil.ReadFile:", err)
}
f, err := os.Open(self.path)
if err != nil {
log.Println("os.Open:", err)
}
info, err := f.Stat()
if err != nil {
log.Println("f.Stat:", err)
self.offset = 0
} else {
offset, _ := strconv.Atoi(string(str))
if int64(offset) > info.Size() {
self.offset = info.Size()
} else {
self.offset = int64(offset)
}
}
}
value = f["sync_interval"]
if len(value) > 0 {
sync_interval, err := strconv.Atoi(value)
if err != nil {
return err
}
self.sync_interval = sync_interval
}
return nil
}
func (self *inputTail) Run(runner InputRunner) error {
defer func() {
if err := recover(); err != nil {
logs.Fatalln("recover panic at err:", err)
}
}()
var seek int
if self.offset > 0 {
seek = os.SEEK_SET
} else {
seek = os.SEEK_END
}
t, err := tail.TailFile(self.path, tail.Config{
Poll: true,
ReOpen: true,
Follow: true,
MustExist: false,
Location: &tail.SeekInfo{int64(self.offset), seek}})
if err != nil {
return err
}
f, err := os.OpenFile(self.pos_file, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0600)
if err != nil {
log.Fatalln("os.OpenFile", err)
}
defer f.Close()
var re regexp.Regexp
if string(self.format[0]) == string("/") || string(self.format[len(self.format)-1]) == string("/") {
format := strings.Trim(self.format, "/")
trueformat := regexp.MustCompile("\\(\\?<").ReplaceAllString(format, "(?P<")
if trueformat != format {
log.Printf("pos_file:%s, format:%s", self.path, trueformat)
}
re = *regexp.MustCompile(trueformat)
self.format = "regexp"
} else if self.format == "json" {
}
tick := time.NewTicker(time.Second * time.Duration(self.sync_interval))
count := 0
for {
select {
case <-tick.C:
{
if count > 0 {
offset, err := t.Tell()
if err != nil {
log.Println("Tell return error: ", err)
continue
}
str := strconv.Itoa(int(offset))
_, err = f.WriteAt([]byte(str), 0)
if err != nil {
log.Println("f.WriteAt", err)
return err
}
count = 0
}
}
case line := <-t.Lines:
{
pack := <-runner.InChan()
pack.MsgBytes = []byte(line.Text)
pack.Msg.Tag = self.tag
pack.Msg.Timestamp = line.Time.Unix()
if self.format == "regexp" {
text := re.FindSubmatch([]byte(line.Text))
if text == nil {
pack.Recycle()
continue
}
for i, name := range re.SubexpNames() {
if len(name) > 0 {
pack.Msg.Data[name] = string(text[i])
}
}
} else if self.format == "json" {
dec := codec.NewDecoderBytes([]byte(line.Text), self.codec)
err := dec.Decode(&pack.Msg.Data)
if err != nil {
log.Println("json.Unmarshal", err)
pack.Recycle()
continue
} else {
t, ok := pack.Msg.Data[self.time_key]
if ok {
if time, xx := t.(uint64); xx {
pack.Msg.Timestamp = int64(time)
delete(pack.Msg.Data, self.time_key)
} else if time64, oo := t.(int64); oo {
pack.Msg.Timestamp = time64
delete(pack.Msg.Data, self.time_key)
} else {
log.Println("time is not int64, ", t, " typeof:", reflect.TypeOf(t))
pack.Recycle()
continue
}
}
}
}
count++
runner.RouterChan() <- pack
}
}
}
err = t.Wait()
if err != nil {
return err
}
return err
}
func init() {
RegisterInput("tail", func() interface{} {
return new(inputTail)
})
}
| Java |
/*
* Copyright 2012-2017 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.boot.logging;
import org.apache.commons.logging.Log;
import org.junit.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoMoreInteractions;
import static org.mockito.Mockito.verifyZeroInteractions;
/**
* Tests for {@link DeferredLog}.
*
* @author Phillip Webb
*/
public class DeferredLogTests {
private DeferredLog deferredLog = new DeferredLog();
private Object message = "Message";
private Throwable throwable = new IllegalStateException();
private Log log = mock(Log.class);
@Test
public void isTraceEnabled() throws Exception {
assertThat(this.deferredLog.isTraceEnabled()).isTrue();
}
@Test
public void isDebugEnabled() throws Exception {
assertThat(this.deferredLog.isDebugEnabled()).isTrue();
}
@Test
public void isInfoEnabled() throws Exception {
assertThat(this.deferredLog.isInfoEnabled()).isTrue();
}
@Test
public void isWarnEnabled() throws Exception {
assertThat(this.deferredLog.isWarnEnabled()).isTrue();
}
@Test
public void isErrorEnabled() throws Exception {
assertThat(this.deferredLog.isErrorEnabled()).isTrue();
}
@Test
public void isFatalEnabled() throws Exception {
assertThat(this.deferredLog.isFatalEnabled()).isTrue();
}
@Test
public void trace() throws Exception {
this.deferredLog.trace(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, null);
}
@Test
public void traceWithThrowable() throws Exception {
this.deferredLog.trace(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).trace(this.message, this.throwable);
}
@Test
public void debug() throws Exception {
this.deferredLog.debug(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, null);
}
@Test
public void debugWithThrowable() throws Exception {
this.deferredLog.debug(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).debug(this.message, this.throwable);
}
@Test
public void info() throws Exception {
this.deferredLog.info(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, null);
}
@Test
public void infoWithThrowable() throws Exception {
this.deferredLog.info(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).info(this.message, this.throwable);
}
@Test
public void warn() throws Exception {
this.deferredLog.warn(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, null);
}
@Test
public void warnWithThrowable() throws Exception {
this.deferredLog.warn(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).warn(this.message, this.throwable);
}
@Test
public void error() throws Exception {
this.deferredLog.error(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, null);
}
@Test
public void errorWithThrowable() throws Exception {
this.deferredLog.error(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).error(this.message, this.throwable);
}
@Test
public void fatal() throws Exception {
this.deferredLog.fatal(this.message);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, null);
}
@Test
public void fatalWithThrowable() throws Exception {
this.deferredLog.fatal(this.message, this.throwable);
this.deferredLog.replayTo(this.log);
verify(this.log).fatal(this.message, this.throwable);
}
@Test
public void clearsOnReplayTo() throws Exception {
this.deferredLog.info("1");
this.deferredLog.fatal("2");
Log log2 = mock(Log.class);
this.deferredLog.replayTo(this.log);
this.deferredLog.replayTo(log2);
verify(this.log).info("1", null);
verify(this.log).fatal("2", null);
verifyNoMoreInteractions(this.log);
verifyZeroInteractions(log2);
}
}
| Java |
# Copyright 2012 OpenStack Foundation
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Identity v2 EC2 Credentials action implementations"""
import logging
import six
from cliff import command
from cliff import lister
from cliff import show
from openstackclient.common import utils
from openstackclient.i18n import _ # noqa
class CreateEC2Creds(show.ShowOne):
"""Create EC2 credentials"""
log = logging.getLogger(__name__ + ".CreateEC2Creds")
def get_parser(self, prog_name):
parser = super(CreateEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--project',
metavar='<project>',
help=_('Specify a project [admin only]'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.project:
project = utils.find_resource(
identity_client.tenants,
parsed_args.project,
).id
else:
# Get the project from the current auth
project = identity_client.auth_tenant_id
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.create(user, project)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))
class DeleteEC2Creds(command.Command):
"""Delete EC2 credentials"""
log = logging.getLogger(__name__ + '.DeleteEC2Creds')
def get_parser(self, prog_name):
parser = super(DeleteEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
identity_client.ec2.delete(user, parsed_args.access_key)
class ListEC2Creds(lister.Lister):
"""List EC2 credentials"""
log = logging.getLogger(__name__ + '.ListEC2Creds')
def get_parser(self, prog_name):
parser = super(ListEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
columns = ('access', 'secret', 'tenant_id', 'user_id')
column_headers = ('Access', 'Secret', 'Project ID', 'User ID')
data = identity_client.ec2.list(user)
return (column_headers,
(utils.get_item_properties(
s, columns,
formatters={},
) for s in data))
class ShowEC2Creds(show.ShowOne):
"""Show EC2 credentials"""
log = logging.getLogger(__name__ + '.ShowEC2Creds')
def get_parser(self, prog_name):
parser = super(ShowEC2Creds, self).get_parser(prog_name)
parser.add_argument(
'access_key',
metavar='<access-key>',
help=_('Credentials access key'),
)
parser.add_argument(
'--user',
metavar='<user>',
help=_('Specify a user [admin only]'),
)
return parser
def take_action(self, parsed_args):
self.log.debug('take_action(%s)', parsed_args)
identity_client = self.app.client_manager.identity
if parsed_args.user:
user = utils.find_resource(
identity_client.users,
parsed_args.user,
).id
else:
# Get the user from the current auth
user = identity_client.auth_user_id
creds = identity_client.ec2.get(user, parsed_args.access_key)
info = {}
info.update(creds._info)
return zip(*sorted(six.iteritems(info)))
| Java |
/*
* Copyright 2010 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.runtime.pipeline;
/**
*
* <p>This api is experimental and thus the classes and the interfaces returned are subject to change.</p>
*/
public interface Transformer
extends
Emitter,
Receiver,
Stage {
}
| Java |
<div class="container userContainer">
<div class="row">
<input type="text" ng-model="model.data.search" placeholder="Search user..." ng-change="model.search()" autofocus>
</div>
<div class="row userContainer-row" ng-click="model.userSelect(user)" ng-show="model.users.length" ng-repeat="user in model.users">
{{user.firstName}} {{user.lastName}}
</div>
<div class="row userContainer-row" ng-show="!model.users.length">
No user found ...
</div>
</div>
| Java |
/*
* sbt
* Copyright 2011 - 2018, Lightbend, Inc.
* Copyright 2008 - 2010, Mark Harrah
* Licensed under Apache License 2.0 (see LICENSE)
*/
package testpkg
import scala.concurrent.duration._
// starts svr using server-test/completions and perform sbt/completion tests
object ServerCompletionsTest extends AbstractServerTest {
override val testDirectory: String = "completions"
test("return basic completions on request") { _ =>
val completionStr = """{ "query": "" }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 15, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
println(s)
s contains """"result":{"items":["""
})
}
test("return completion for custom tasks") { _ =>
val completionStr = """{ "query": "hell" }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 16, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
s contains """"result":{"items":["hello"]"""
})
}
test("return completions for user classes") { _ =>
val completionStr = """{ "query": "testOnly org." }"""
svr.sendJsonRpc(
s"""{ "jsonrpc": "2.0", "id": 17, "method": "sbt/completion", "params": $completionStr }"""
)
assert(svr.waitForString(10.seconds) { s =>
s contains """"result":{"items":["testOnly org.sbt.ExampleSpec"]"""
})
}
}
| Java |
/*******************************************************************************
* Copyright (c) 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.ibm.ws.lars.rest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
import com.ibm.ws.lars.rest.model.Asset;
import com.ibm.ws.lars.rest.model.AssetList;
/**
*
*/
public class TestUtils {
/**
* Reads the specified InputStream and returns a byte array containing all the bytes read.
*/
public static byte[] slurp(InputStream is) throws IOException {
byte[] buffer = new byte[1024];
int length;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while ((length = is.read(buffer)) != -1) {
baos.write(buffer, 0, length);
}
return baos.toByteArray();
}
/**
* Assert that an AssetList contains exactly the given list of assets
* <p>
* This method assumes that all assets have an ID and there are no duplicates in the asset list.
*/
public static void assertAssetList(AssetList list, Asset... assets) {
Map<String, Asset> assetIdMap = new HashMap<>();
for (Asset asset : assets) {
if (assetIdMap.put(asset.get_id(), asset) != null) {
throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson());
}
}
for (Asset asset : list) {
if (assetIdMap.remove(asset.get_id()) == null) {
throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson());
}
}
if (!assetIdMap.isEmpty()) {
StringBuilder message = new StringBuilder("Assets missing from asset list:\n");
for (Asset asset : assetIdMap.values()) {
message.append(asset.toJson());
message.append("\n");
}
throw new AssertionError(message.toString());
}
}
}
| Java |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.topic.impl.reliable;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.partition.MigrationState;
import com.hazelcast.partition.MigrationListener;
import com.hazelcast.partition.ReplicaMigrationEvent;
import com.hazelcast.ringbuffer.impl.RingbufferService;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.OverridePropertyRule;
import com.hazelcast.test.TestHazelcastInstanceFactory;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import com.hazelcast.topic.ITopic;
import com.hazelcast.topic.Message;
import com.hazelcast.topic.MessageListener;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.util.Collection;
import java.util.HashSet;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertTrue;
@Category({QuickTest.class, ParallelJVMTest.class})
@RunWith(HazelcastParallelClassRunner.class)
public class SubscriptionMigrationTest extends HazelcastTestSupport {
@Rule
public OverridePropertyRule overridePropertyRule = OverridePropertyRule.set("hazelcast.partition.count", "2");
// gh issue: https://github.com/hazelcast/hazelcast/issues/13602
@Test
public void testListenerReceivesMessagesAfterPartitionIsMigratedBack() {
TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory();
HazelcastInstance instance1 = factory.newHazelcastInstance();
final String rtNameOnPartition0 = generateReliableTopicNameForPartition(instance1, 0);
final String rtNameOnPartition1 = generateReliableTopicNameForPartition(instance1, 1);
ITopic<String> topic0 = instance1.getReliableTopic(rtNameOnPartition0);
ITopic<String> topic1 = instance1.getReliableTopic(rtNameOnPartition1);
final CountingMigrationListener migrationListener = new CountingMigrationListener();
instance1.getPartitionService().addMigrationListener(migrationListener);
final PayloadMessageListener<String> listener0 = new PayloadMessageListener<String>();
final PayloadMessageListener<String> listener1 = new PayloadMessageListener<String>();
topic0.addMessageListener(listener0);
topic1.addMessageListener(listener1);
topic0.publish("itemA");
topic1.publish("item1");
HazelcastInstance instance2 = factory.newHazelcastInstance();
// 1 primary, 1 backup migration
assertEqualsEventually(2, migrationListener.partitionMigrationCount);
instance2.shutdown();
assertEqualsEventually(3, migrationListener.partitionMigrationCount);
topic0.publish("itemB");
topic1.publish("item2");
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(listener0.isReceived("itemA"));
assertTrue(listener0.isReceived("itemB"));
assertTrue(listener1.isReceived("item1"));
assertTrue(listener1.isReceived("item2"));
}
});
}
public class PayloadMessageListener<V> implements MessageListener<V> {
private Collection<V> receivedMessages = new HashSet<V>();
@Override
public void onMessage(Message<V> message) {
receivedMessages.add(message.getMessageObject());
}
boolean isReceived(V message) {
return receivedMessages.contains(message);
}
}
public class CountingMigrationListener implements MigrationListener {
AtomicInteger partitionMigrationCount = new AtomicInteger();
@Override
public void migrationStarted(MigrationState state) {
}
@Override
public void migrationFinished(MigrationState state) {
}
@Override
public void replicaMigrationCompleted(ReplicaMigrationEvent event) {
partitionMigrationCount.incrementAndGet();
}
@Override
public void replicaMigrationFailed(ReplicaMigrationEvent event) {
}
}
private String generateReliableTopicNameForPartition(HazelcastInstance instance, int partitionId) {
return generateKeyForPartition(instance, RingbufferService.TOPIC_RB_PREFIX, partitionId);
}
}
| Java |
# Copyright 2012 SINA Corporation
# Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Extracts OpenStack config option info from module(s)."""
from __future__ import print_function
import argparse
import imp
import os
import re
import socket
import sys
import textwrap
from oslo.config import cfg
import six
import stevedore.named
from climate.openstack.common import gettextutils
from climate.openstack.common import importutils
gettextutils.install('climate')
STROPT = "StrOpt"
BOOLOPT = "BoolOpt"
INTOPT = "IntOpt"
FLOATOPT = "FloatOpt"
LISTOPT = "ListOpt"
DICTOPT = "DictOpt"
MULTISTROPT = "MultiStrOpt"
OPT_TYPES = {
STROPT: 'string value',
BOOLOPT: 'boolean value',
INTOPT: 'integer value',
FLOATOPT: 'floating point value',
LISTOPT: 'list value',
DICTOPT: 'dict value',
MULTISTROPT: 'multi valued',
}
OPTION_REGEX = re.compile(r"(%s)" % "|".join([STROPT, BOOLOPT, INTOPT,
FLOATOPT, LISTOPT, DICTOPT,
MULTISTROPT]))
PY_EXT = ".py"
BASEDIR = os.path.abspath(os.path.join(os.path.dirname(__file__),
"../../../../"))
WORDWRAP_WIDTH = 60
def generate(argv):
parser = argparse.ArgumentParser(
description='generate sample configuration file',
)
parser.add_argument('-m', dest='modules', action='append')
parser.add_argument('-l', dest='libraries', action='append')
parser.add_argument('srcfiles', nargs='*')
parsed_args = parser.parse_args(argv)
mods_by_pkg = dict()
for filepath in parsed_args.srcfiles:
pkg_name = filepath.split(os.sep)[1]
mod_str = '.'.join(['.'.join(filepath.split(os.sep)[:-1]),
os.path.basename(filepath).split('.')[0]])
mods_by_pkg.setdefault(pkg_name, list()).append(mod_str)
# NOTE(lzyeval): place top level modules before packages
pkg_names = sorted(pkg for pkg in mods_by_pkg if pkg.endswith(PY_EXT))
ext_names = sorted(pkg for pkg in mods_by_pkg if pkg not in pkg_names)
pkg_names.extend(ext_names)
# opts_by_group is a mapping of group name to an options list
# The options list is a list of (module, options) tuples
opts_by_group = {'DEFAULT': []}
if parsed_args.modules:
for module_name in parsed_args.modules:
module = _import_module(module_name)
if module:
for group, opts in _list_opts(module):
opts_by_group.setdefault(group, []).append((module_name,
opts))
# Look for entry points defined in libraries (or applications) for
# option discovery, and include their return values in the output.
#
# Each entry point should be a function returning an iterable
# of pairs with the group name (or None for the default group)
# and the list of Opt instances for that group.
if parsed_args.libraries:
loader = stevedore.named.NamedExtensionManager(
'oslo.config.opts',
names=list(set(parsed_args.libraries)),
invoke_on_load=False,
)
for ext in loader:
for group, opts in ext.plugin():
opt_list = opts_by_group.setdefault(group or 'DEFAULT', [])
opt_list.append((ext.name, opts))
for pkg_name in pkg_names:
mods = mods_by_pkg.get(pkg_name)
mods.sort()
for mod_str in mods:
if mod_str.endswith('.__init__'):
mod_str = mod_str[:mod_str.rfind(".")]
mod_obj = _import_module(mod_str)
if not mod_obj:
raise RuntimeError("Unable to import module %s" % mod_str)
for group, opts in _list_opts(mod_obj):
opts_by_group.setdefault(group, []).append((mod_str, opts))
print_group_opts('DEFAULT', opts_by_group.pop('DEFAULT', []))
for group in sorted(opts_by_group.keys()):
print_group_opts(group, opts_by_group[group])
def _import_module(mod_str):
try:
if mod_str.startswith('bin.'):
imp.load_source(mod_str[4:], os.path.join('bin', mod_str[4:]))
return sys.modules[mod_str[4:]]
else:
return importutils.import_module(mod_str)
except Exception as e:
sys.stderr.write("Error importing module %s: %s\n" % (mod_str, str(e)))
return None
def _is_in_group(opt, group):
"Check if opt is in group."
for value in group._opts.values():
# NOTE(llu): Temporary workaround for bug #1262148, wait until
# newly released oslo.config support '==' operator.
if not(value['opt'] != opt):
return True
return False
def _guess_groups(opt, mod_obj):
# is it in the DEFAULT group?
if _is_in_group(opt, cfg.CONF):
return 'DEFAULT'
# what other groups is it in?
for value in cfg.CONF.values():
if isinstance(value, cfg.CONF.GroupAttr):
if _is_in_group(opt, value._group):
return value._group.name
raise RuntimeError(
"Unable to find group for option %s, "
"maybe it's defined twice in the same group?"
% opt.name
)
def _list_opts(obj):
def is_opt(o):
return (isinstance(o, cfg.Opt) and
not isinstance(o, cfg.SubCommandOpt))
opts = list()
for attr_str in dir(obj):
attr_obj = getattr(obj, attr_str)
if is_opt(attr_obj):
opts.append(attr_obj)
elif (isinstance(attr_obj, list) and
all(map(lambda x: is_opt(x), attr_obj))):
opts.extend(attr_obj)
ret = {}
for opt in opts:
ret.setdefault(_guess_groups(opt, obj), []).append(opt)
return ret.items()
def print_group_opts(group, opts_by_module):
print("[%s]" % group)
print('')
for mod, opts in opts_by_module:
print('#')
print('# Options defined in %s' % mod)
print('#')
print('')
for opt in opts:
_print_opt(opt)
print('')
def _get_my_ip():
try:
csock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
csock.connect(('8.8.8.8', 80))
(addr, port) = csock.getsockname()
csock.close()
return addr
except socket.error:
return None
def _sanitize_default(name, value):
"""Set up a reasonably sensible default for pybasedir, my_ip and host."""
if value.startswith(sys.prefix):
# NOTE(jd) Don't use os.path.join, because it is likely to think the
# second part is an absolute pathname and therefore drop the first
# part.
value = os.path.normpath("/usr/" + value[len(sys.prefix):])
elif value.startswith(BASEDIR):
return value.replace(BASEDIR, '/usr/lib/python/site-packages')
elif BASEDIR in value:
return value.replace(BASEDIR, '')
elif value == _get_my_ip():
return '10.0.0.1'
elif value in (socket.gethostname(), socket.getfqdn()) and 'host' in name:
return 'climate'
elif value.strip() != value:
return '"%s"' % value
return value
def _print_opt(opt):
opt_name, opt_default, opt_help = opt.dest, opt.default, opt.help
if not opt_help:
sys.stderr.write('WARNING: "%s" is missing help string.\n' % opt_name)
opt_help = ""
opt_type = None
try:
opt_type = OPTION_REGEX.search(str(type(opt))).group(0)
except (ValueError, AttributeError) as err:
sys.stderr.write("%s\n" % str(err))
sys.exit(1)
opt_help = u'%s (%s)' % (opt_help,
OPT_TYPES[opt_type])
print('#', "\n# ".join(textwrap.wrap(opt_help, WORDWRAP_WIDTH)))
if opt.deprecated_opts:
for deprecated_opt in opt.deprecated_opts:
if deprecated_opt.name:
deprecated_group = (deprecated_opt.group if
deprecated_opt.group else "DEFAULT")
print('# Deprecated group/name - [%s]/%s' %
(deprecated_group,
deprecated_opt.name))
try:
if opt_default is None:
print('#%s=<None>' % opt_name)
elif opt_type == STROPT:
assert(isinstance(opt_default, six.string_types))
print('#%s=%s' % (opt_name, _sanitize_default(opt_name,
opt_default)))
elif opt_type == BOOLOPT:
assert(isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, str(opt_default).lower()))
elif opt_type == INTOPT:
assert(isinstance(opt_default, int) and
not isinstance(opt_default, bool))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == FLOATOPT:
assert(isinstance(opt_default, float))
print('#%s=%s' % (opt_name, opt_default))
elif opt_type == LISTOPT:
assert(isinstance(opt_default, list))
print('#%s=%s' % (opt_name, ','.join(opt_default)))
elif opt_type == DICTOPT:
assert(isinstance(opt_default, dict))
opt_default_strlist = [str(key) + ':' + str(value)
for (key, value) in opt_default.items()]
print('#%s=%s' % (opt_name, ','.join(opt_default_strlist)))
elif opt_type == MULTISTROPT:
assert(isinstance(opt_default, list))
if not opt_default:
opt_default = ['']
for default in opt_default:
print('#%s=%s' % (opt_name, default))
print('')
except Exception:
sys.stderr.write('Error in option "%s"\n' % opt_name)
sys.exit(1)
def main():
generate(sys.argv[1:])
if __name__ == '__main__':
main()
| Java |
registerNpc(1007, {
walk_speed = 0,
run_speed = 0,
scale = 130,
r_weapon = 0,
l_weapon = 0,
level = 10,
hp = 100,
attack = 100,
hit = 100,
def = 100,
res = 100,
avoid = 100,
attack_spd = 100,
is_magic_damage = 0,
ai_type = 0,
give_exp = 39,
drop_type = 58,
drop_money = 0,
drop_item = 0,
union_number = 0,
need_summon_count = 225,
sell_tab0 = 225,
sell_tab1 = 0,
sell_tab2 = 0,
sell_tab3 = 0,
can_target = 0,
attack_range = 200,
npc_type = 999,
hit_material_type = 0,
face_icon = 17,
summon_mob_type = 17,
quest_type = 0,
height = 0
});
function OnInit(entity)
return true
end
function OnCreate(entity)
return true
end
function OnDelete(entity)
return true
end
function OnDead(entity)
end
function OnDamaged(entity)
end | Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.management.internal.cli.commands;
import java.util.List;
import java.util.Set;
import org.springframework.shell.core.annotation.CliCommand;
import org.springframework.shell.core.annotation.CliOption;
import org.apache.geode.cache.execute.ResultCollector;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.management.cli.CliMetaData;
import org.apache.geode.management.cli.ConverterHint;
import org.apache.geode.management.cli.Result;
import org.apache.geode.management.internal.cli.CliUtil;
import org.apache.geode.management.internal.cli.domain.SubscriptionQueueSizeResult;
import org.apache.geode.management.internal.cli.functions.GetSubscriptionQueueSizeFunction;
import org.apache.geode.management.internal.cli.i18n.CliStrings;
import org.apache.geode.management.internal.cli.result.ResultBuilder;
import org.apache.geode.management.internal.security.ResourceOperation;
import org.apache.geode.security.ResourcePermission;
public class CountDurableCQEventsCommand extends InternalGfshCommand {
DurableClientCommandsResultBuilder builder = new DurableClientCommandsResultBuilder();
@CliCommand(value = CliStrings.COUNT_DURABLE_CQ_EVENTS,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__HELP)
@CliMetaData()
@ResourceOperation(resource = ResourcePermission.Resource.CLUSTER,
operation = ResourcePermission.Operation.READ)
public Result countDurableCqEvents(
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID, mandatory = true,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CLIENT__ID__HELP) final String durableClientId,
@CliOption(key = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME,
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__DURABLE__CQ__NAME__HELP) final String cqName,
@CliOption(key = {CliStrings.MEMBER, CliStrings.MEMBERS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__MEMBER__HELP,
optionContext = ConverterHint.MEMBERIDNAME) final String[] memberNameOrId,
@CliOption(key = {CliStrings.GROUP, CliStrings.GROUPS},
help = CliStrings.COUNT_DURABLE_CQ_EVENTS__GROUP__HELP,
optionContext = ConverterHint.MEMBERGROUP) final String[] group) {
Result result;
try {
Set<DistributedMember> targetMembers = findMembers(group, memberNameOrId);
if (targetMembers.isEmpty()) {
return ResultBuilder.createUserErrorResult(CliStrings.NO_MEMBERS_FOUND_MESSAGE);
}
String[] params = new String[2];
params[0] = durableClientId;
params[1] = cqName;
final ResultCollector<?, ?> rc =
CliUtil.executeFunction(new GetSubscriptionQueueSizeFunction(), params, targetMembers);
final List<SubscriptionQueueSizeResult> funcResults =
(List<SubscriptionQueueSizeResult>) rc.getResult();
String queueSizeColumnName;
if (cqName != null && !cqName.isEmpty()) {
queueSizeColumnName = CliStrings
.format(CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, cqName);
} else {
queueSizeColumnName = CliStrings.format(
CliStrings.COUNT_DURABLE_CQ_EVENTS__SUBSCRIPTION__QUEUE__SIZE__CLIENT, durableClientId);
}
result = builder.buildTableResultForQueueSize(funcResults, queueSizeColumnName);
} catch (Exception e) {
result = ResultBuilder.createGemFireErrorResult(e.getMessage());
}
return result;
}
}
| Java |
/**
* @author Oleksandr Prunyak ([email protected])
* @version $Id$
* @since 0.1
*/
package ru.job4j.loop; | Java |
/**
* @license
* Copyright 2018 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as Lint from "../../index";
// tslint:disable: object-literal-sort-keys
export const codeExamples = [
{
description: "Prefer `while` loops instead of `for` loops without an initializer and incrementor.",
config: Lint.Utils.dedent`
"rules": { "prefer-while": true }
`,
pass: Lint.Utils.dedent`
for(let i = 1; i < 10; i++) {
console.log(i);
}
for (let i = 0; i < 10; i+=1) {
console.log(i);
}
for (let i = 0; i < 10;) {
i += 1;
}
`,
fail: Lint.Utils.dedent`
for(;;) {
console.log('Hello World');
}
for(;true===true;) {
console.log('Hello World');
}
`,
},
];
| Java |
/*
* Copyright 2019 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.properties;
import org.junit.Test;
import org.kie.workbench.common.stunner.bpmn.client.marshall.converters.customproperties.CustomElement;
import org.kie.workbench.common.stunner.bpmn.definition.property.general.SLADueDate;
import static junit.framework.TestCase.assertTrue;
import static org.kie.workbench.common.stunner.bpmn.client.marshall.converters.fromstunner.Factories.bpmn2;
public class EmbeddedSubprocessPropertyWriterTest {
private SubProcessPropertyWriter tested = new SubProcessPropertyWriter(bpmn2.createSubProcess(),
new FlatVariableScope());
@Test
public void testSetIsAsync() {
tested.setAsync(Boolean.TRUE);
assertTrue(CustomElement.async.of(tested.getFlowElement()).get());
}
@Test
public void testSetSlaDueDate() {
String slaDueDate = "12/25/1983";
tested.setSlaDueDate(new SLADueDate(slaDueDate));
assertTrue(CustomElement.slaDueDate.of(tested.getFlowElement()).get().contains(slaDueDate));
}
}
| Java |
{% macro warnings_and_loader() -%}
<div ng-cloak>
<div class="oppia-toast-container toast-top-center">
<div ng-repeat="warning in (alertsService.warnings | limitTo:5) track by $index" class="toast toast-warning oppia-toast">
<button type="button" class="toast-close-button" ng-click="alertsService.deleteWarning(warning)" role="button">×</button>
<div class="toast-message">
<[warning.content]>
</div>
</div>
</div>
<div>
<div ng-repeat="message in alertsService.messages track by $index">
<alert-message message-object="message" message-index="$index"></alert-message>
</div>
</div>
<div ng-show="loadingMessage" class="oppia-loading-fullpage">
<div class="oppia-align-center">
<span translate="<[loadingMessage]>"></span>
<span class="oppia-loading-dot-one">.</span>
<span class="oppia-loading-dot-two">.</span>
<span class="oppia-loading-dot-three">.</span>
</div>
</div>
<div ng-show="!loadingMessage">
{% block content %}{% endblock %}
{% block footer %}{% endblock %}
</div>
</div>
{%- endmacro %}
<!DOCTYPE html>
<html ng-app="oppia" ng-controller="Base" itemscope itemtype="http://schema.org/Organization">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">
<!-- Tiles for Internet Explorer. -->
<meta name="application-name" content="{{SITE_NAME}}">
<meta name="msapplication-TileColor" content="#ffffff">
<meta name="msapplication-square70x70logo" content="{{DOMAIN_URL}}/images/logo/msapplication-tiny.png">
<meta name="msapplication-square150x150logo" content="{{DOMAIN_URL}}/images/logo/msapplication-square.png">
<meta name="msapplication-wide310x150logo" content="{{DOMAIN_URL}}/images/logo/msapplication-wide.png">
<meta name="msapplication-square310x310logo" content="{{DOMAIN_URL}}/images/logo/msapplication-large.png">
<!-- The itemprops are for G+ sharing. -->
<meta itemprop="name" content="{{meta_name}}">
<meta itemprop="description" content="{{meta_description}}">
<!-- The og tags are for Facebook sharing. -->
<meta property="og:title" content="{{meta_name}}">
<meta property="og:site_name" content="Oppia">
<meta property="og:url" content="{{FULL_URL}}">
<meta property="og:description" content="{{meta_description}}">
<meta property="og:type" content="article">
<meta property="og:image" content="{{DOMAIN_URL}}/images/logo/288x288_logo_mint.png">
<link rel="apple-touch-icon" href="/images/logo/favicon.png">
<!-- The title is bound to the rootScope. The content of the block
maintitle can be a string or a translation id. If it is a translation it
will be replaced by its translation when the page is loading. If it is a
string it would be displayed as is. This is the only way to translate
the page title because the head of the file is outside the scope of
any other controller. -->
<title itemprop="name" translate="{% block maintitle %}Oppia{% endblock maintitle %}"></title>
{% block base_url %}{% endblock base_url %}
{% block header_css %}
{% include 'header_css_libs.html' %}
{% endblock header_css %}
<script>
var GLOBALS = {
ADDITIONAL_ANGULAR_MODULES: [],
csrf_token: JSON.parse('{{csrf_token|js_string}}'),
csrf_token_i18n: JSON.parse('{{csrf_token_i18n|js_string}}'),
csrf_token_create_exploration: JSON.parse('{{csrf_token_create_exploration|js_string}}'),
DEV_MODE: JSON.parse('{{DEV_MODE|js_string}}'),
INVALID_NAME_CHARS: JSON.parse('{{INVALID_NAME_CHARS|js_string}}'),
ACTIVITY_STATUS_PRIVATE: JSON.parse(
'{{ACTIVITY_STATUS_PRIVATE|js_string}}'),
ACTIVITY_STATUS_PUBLIC: JSON.parse(
'{{ACTIVITY_STATUS_PUBLIC|js_string}}'),
ACTIVITY_STATUS_PUBLICIZED: JSON.parse(
'{{ACTIVITY_STATUS_PUBLICIZED|js_string}}'),
ALL_CATEGORIES: JSON.parse('{{ALL_CATEGORIES|js_string}}'),
ALL_LANGUAGE_CODES: JSON.parse('{{ALL_LANGUAGE_CODES|js_string}}'),
DEFAULT_LANGUAGE_CODE: JSON.parse(
'{{DEFAULT_LANGUAGE_CODE|js_string}}'),
RTE_COMPONENT_SPECS: JSON.parse('{{RTE_COMPONENT_SPECS|js_string}}'),
CAN_SEND_ANALYTICS_EVENTS: JSON.parse(
'{{CAN_SEND_ANALYTICS_EVENTS|js_string}}'),
/* A list of functions to be called when an exploration is completed. */
POST_COMPLETION_HOOKS: [],
SYSTEM_USERNAMES: JSON.parse('{{SYSTEM_USERNAMES|js_string}}'),
userIsLoggedIn: JSON.parse('{{user_is_logged_in|js_string}}'),
preferredSiteLanguageCode: JSON.parse('{{preferred_site_language_code|js_string}}'),
SUPPORTED_SITE_LANGUAGES: JSON.parse('{{SUPPORTED_SITE_LANGUAGES|js_string}}')
};
{% if additional_angular_modules %}
GLOBALS.ADDITIONAL_ANGULAR_MODULES = JSON.parse('{{additional_angular_modules|js_string}}');
{% endif %}
</script>
{% block header_js %}
{% include 'header_js_libs.html' %}
{% endblock header_js %}
{{BEFORE_END_HEAD_TAG_HOOK}}
</head>
<body>
{% if iframed %}
{{ warnings_and_loader() }}
{% else %}
<div class="oppia-base-container" ng-class="{'oppia-sidebar-menu-open': sidebarIsShown, 'oppia-sidebar-menu-closed': !sidebarIsShown}">
<div class="oppia-content-container">
<div id="wrapper">
<div class="oppia-main-body">
<!-- Top navigation. -->
<nav class="navbar navbar-default oppia-navbar oppia-prevent-selection" role="navigation">
<div class="navbar-container">
<div class="navbar-header protractor-test-navbar-header pull-left">
<a ng-if="windowIsNarrow" ng-click="openSidebar()" class="navbar-brand oppia-navbar-menu oppia-transition-200">
<i class="material-icons oppia-navbar-menu-icon"></i>
</a>
<a class="oppia-navbar-brand-name oppia-transition-200" href="/" focus-on="<[LABEL_FOR_CLEARING_FOCUS]>">
<img src="/images/logo/288x128_logo_white.png" class="oppia-logo" ng-class="windowIsNarrow ? 'oppia-logo-small' : 'oppia-logo-wide'">
</a>
<!-- This is needed for the correct image to appear when an exploration is shared using G+. -->
<a style="display: none;">
<img src="/images/logo/288x128_logo_mint.png" itemprop="image">
</a>
</div>
{% if nav_mode != 'signup' %}
<div ng-cloak class="navbar-header pull-right">
<ul class="nav oppia-navbar-nav oppia-navbar-profile">
{% if username %}
<li class="dropdown pull-right">
<a class="dropdown-toggle oppia-navbar-dropdown-toggle" data-toggle="dropdown" ng-mouseover="onMouseoverProfilePictureOrDropdown($event)" ng-mouseleave="onMouseoutProfilePictureOrDropdown($event)">
<div class="oppia-navbar-profile-picture-container" ng-cloak>
{% if profile_picture_data_url %}
<img src="{{profile_picture_data_url}}" class="oppia-navbar-profile-picture img-circle">
<span class="caret" style="margin-top: 10px;"></span>
{% else %}
<i class="material-icons md-40" style="margin-top: -1px;"></i>
<span class="caret" style="margin-top: -26px;"></span>
{% endif %}
<div class="oppia-navbar-dashboard-indicator ng-cloak" ng-if="numUnseenNotifications > 0">
<span class="oppia-navbar-dashboard-indicator-text">
<[numUnseenNotifications]>
</span>
</div>
<div style="display: none;" class="oppia-user-email">
{{user_email}}
</div>
{% if is_admin or is_moderator %}
<div class="oppia-navbar-role-indicator">
{% if is_admin %}
<!-- "right: 4px;" is necessary here but not in moderator to prevent 'A' from appearing off-center because 'A' is slightly thinner than 'M' in this font -->
<span class="oppia-navbar-role-text" style="right: 4px;">A</span>
{% elif is_moderator %}
<span class="oppia-navbar-role-text">M</span>
{% endif %}
</div>
{% endif %}
</div>
</a>
<ul class="dropdown-menu ng-cloak oppia-navbar-dropdown" role="menu" ng-mouseover="onMouseoverProfilePictureOrDropdown($event)" ng-mouseleave="onMouseoutProfilePictureOrDropdown($event)" ng-show="profileDropdownIsActive">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/profile/{{username}}">
<strong>{{username}}</strong>
</a>
</li>
<hr class="oppia-top-right-menu-item-separator">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/dashboard">
<span translate="I18N_TOPNAV_DASHBOARD"></span>
</a>
</li>
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/notifications_dashboard">
<span translate="I18N_TOPNAV_NOTIFICATIONS"></span>
<span ng-if="numUnseenNotifications > 0">
(<[numUnseenNotifications]>)
</span>
</a>
</li>
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/preferences">
<span translate="I18N_TOPNAV_PREFERENCES"></span>
</a>
</li>
{% if is_moderator %}
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/moderator" target="_blank">
<span translate="I18N_TOPNAV_MODERATOR_PAGE"></span>
</a>
</li>
{% endif %}
{% if is_super_admin %}
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="/admin" target="_blank">
<span translate="I18N_TOPNAV_ADMIN_PAGE"></span>
</a>
</li>
{% endif %}
<hr class="oppia-top-right-menu-item-separator">
<li>
<a ng-click="onMouseoutProfilePictureOrDropdown($event)" href="{{logout_url}}">
<span translate="I18N_TOPNAV_LOGOUT"></span>
</a>
</li>
</ul>
</li>
{% else %}
<li class="dropdown oppia-navbar-clickable-dropdown pull-right">
<div class="oppia-navbar-button-container" style="margin-right: 10px;">
<button class="btn oppia-navbar-button"
ng-click="onLoginButtonClicked('{{login_url}}')">
<span translate="I18N_TOPNAV_SIGN_IN"></span>
<span class="caret"></span>
</button>
</div>
<ul class="dropdown-menu oppia-navbar-dropdown" role="menu" style="margin-right: 15px; padding: 0;" ng-mouseover="onMouseoverDropdownMenu($event)" ng-mouseleave="onMouseoutDropdownMenu($event)">
<li>
<a href style="padding: 0; width: 200px;" ng-click="onLoginButtonClicked('{{login_url}}')">
<img src="/images/signin/Red-signin-Long-base-44dp.png">
</a>
</li>
</ul>
</li>
{% endif %}
</ul>
<ul class="nav oppia-navbar-nav">
{% if nav_mode != 'create' and nav_mode != 'explore' %}
<ul ng-if="windowIsNarrow" class="nav oppia-navbar-tabs-narrow">
<create-activity-button></create-activity-button>
</ul>
<ul ng-if="!windowIsNarrow" class="nav oppia-navbar-tabs">
<create-activity-button></create-activity-button>
{% if SHOW_CUSTOM_PAGES %}
<li class="oppia-clickable-navbar-element pull-right">
<a class="oppia-navbar-tab" href="/forum" translate="I18N_TOPNAV_FORUM">
</a>
</li>
{% endif %}
<li class="dropdown oppia-navbar-clickable-dropdown pull-right">
<a class="oppia-navbar-tab">
<span translate="I18N_TOPNAV_ABOUT"></span>
<span class="caret"></span>
</a>
<ul class="dropdown-menu oppia-navbar-dropdown" ng-mouseover="onMouseoverDropdownMenu($event)" ng-mouseleave="onMouseoutDropdownMenu($event)">
<li><a href="/about" translate="I18N_TOPNAV_ABOUT_OPPIA"></a></li>
<li><a href="/teach" translate="I18N_TOPNAV_TEACH_WITH_OPPIA"></a></li>
{% for additional_link in SIDEBAR_MENU_ADDITIONAL_LINKS %}
<li><a href="{{additional_link['link']}}" target="_blank">{{additional_link['name']}}</a></li>
{% endfor %}
{% if SHOW_CUSTOM_PAGES %}
<li><a href="/terms" translate="I18N_TOPNAV_TERMS_OF_SERVICE"></a></li>
<li><a href="/privacy" translate="I18N_TOPNAV_PRIVACY_POLICY"></a></li>
{% endif %}
</ul>
</li>
<li class="oppia-clickable-navbar-element pull-right">
<a class="oppia-navbar-tab" href="/library" translate="I18N_TOPNAV_LIBRARY"></a>
</li>
</ul>
{% endif %}
</ul>
</div>
{% endif %}
<div class="collapse navbar-collapse ng-cloak">
{% block navbar_breadcrumb %}
{% endblock navbar_breadcrumb %}
{% block local_top_nav_options %}
{% endblock %}
</div>
</div>
</nav>
<div class="oppia-top-of-page-padding">
</div>
{{ warnings_and_loader() }}
</div>
<noscript>
<div class="oppia-page-cards-container">
<div class="md-default-theme oppia-page-card oppia-long-text">
<h2>
<span translate="I18N_SPLASH_JAVASCRIPT_ERROR_TITLE"></span>
<i class="material-icons"></i>
</h2>
<p translate="I18N_SPLASH_JAVASCRIPT_ERROR_DESCRIPTION"
translate-values="{hrefUrl: 'http://www.enable-javascript.com/'}"></p>
<p translate="I18N_SPLASH_JAVASCRIPT_ERROR_THANKS"></p>
</div>
</div>
</noscript>
{% include 'side_nav.html' %}
</div>
</div>
</div>
{% if DEV_MODE %}
<div class="oppia-dev-mode">
Dev Mode
</div>
{% endif %}
{% if SITE_FEEDBACK_FORM_URL %}
<a href="{{SITE_FEEDBACK_FORM_URL}}" target="_blank"
class="oppia-site-feedback oppia-transition-200">
<i class="material-icons md-18" style="vertical-align: middle;"></i>
<span translate="I18N_SPLASH_SITE_FEEDBACK"></span>
</i>
</a>
{% endif %}
{% endif %}
{% include 'directives.html' %}
{% include 'forms/form_builder_templates.html' %}
{% include 'footer_js_libs.html' %}
{% include 'components/collection_summary_tile_directive.html' %}
{% include 'components/exploration_summary_tile_directive.html' %}
{% include 'components/rating_display.html' %}
{% include 'components/create_activity_button_directive.html' %}
{% include 'components/activity_tiles_infinity_grid_directive.html' %}
{% include 'components/loading_dots_directive.html' %}
<script>
{{ include_js_file('app.js') }}
{{ include_js_file('base.js') }}
{{ include_js_file('directives.js') }}
{{ include_js_file('filters.js') }}
{{ include_js_file('i18n.js') }}
{{ include_js_file('forms/formBuilder.js') }}
{{ include_js_file('services/alertsService.js') }}
{{ include_js_file('services/explorationContextService.js') }}
{{ include_js_file('services/autoplayedVideosService.js') }}
{{ include_js_file('services/searchService.js') }}
{{ include_js_file('components/ActivityTilesInfinityGridDirective.js') }}
{{ include_js_file('components/AlertMessageDirective.js') }}
{{ include_js_file('components/CollectionCreationService.js') }}
{{ include_js_file('components/CollectionSummaryTileDirective.js') }}
{{ include_js_file('components/CreateActivityButtonDirective.js') }}
{{ include_js_file('components/ExplorationCreationService.js') }}
{{ include_js_file('components/ExplorationSummaryTileDirective.js') }}
{{ include_js_file('components/LoadingDotsDirective.js') }}
{{ include_js_file('components/ObjectEditorDirective.js') }}
{{ include_js_file('components/RatingComputationService.js')}}
{{ include_js_file('components/RatingDisplayDirective.js')}}
{{ include_js_file('components/Select2DropdownDirective.js') }}
{{ include_js_file('domain/utilities/UrlInterpolationService.js') }}
{{ include_js_file('expressions/expressionSyntaxTree.js') }}
{{ include_js_file('expressions/evaluator.js') }}
{{ include_js_file('expressions/parser.js') }}
{{ include_js_file('domain/utilities/UrlInterpolationService.js') }}
{{ OBJECT_EDITORS_JS }}
</script>
{% block footer_js %}
{% endblock footer_js %}
{{BEFORE_END_BODY_TAG_HOOK}}
</body>
</html>
| Java |
<!DOCTYPE html><html lang="en"><head><title>src/Parser</title></head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"><meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0"><meta name="groc-relative-root" content="../"><meta name="groc-document-path" content="src/Parser.coffee"><meta name="groc-project-path" content="src/Parser.coffee"><meta name="groc-github-url" content="https://github.com/sjorek/goatee-rules.js"><link rel="stylesheet" type="text/css" media="all" href="../assets/style.css"><script type="text/javascript" src="../assets/behavior.js"></script><body><div id="meta"><div class="file-path"><a href="https://github.com/sjorek/goatee-rules.js/blob/master/src/Parser.coffee">src/Parser.coffee</a></div></div><div id="document"><div class="segment"><div class="code folded"><div class="wrapper marker"><span class="c1">### </span></div><div class="wrapper"><span class="hljs-comment">###
BSD 3-Clause License
Copyright (c) 2017, Stephan Jorek
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
###</span>
<span class="hljs-keyword">try</span>
exports = <span class="hljs-built_in">require</span> <span class="hljs-string">'./ParserImpl'</span>
<span class="hljs-keyword">catch</span>
exports = <span class="hljs-literal">null</span>
<span class="hljs-keyword">if</span> exports <span class="hljs-keyword">is</span> <span class="hljs-literal">null</span>
Grammar = <span class="hljs-built_in">require</span> <span class="hljs-string">'./Grammar'</span>
exports = <span class="hljs-built_in">module</span>?.exports ? <span class="hljs-keyword">this</span></div></div></div><div class="segment"><div class="comments "><div class="wrapper"><h1 id="parser">Parser</h1>
<hr>
<p>A thin compatibillity layer providing an
“on-the-fly” generated goatee-rules parser.</p></div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static property parser of type <em>Parser</em></span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.parser = parser = Grammar.createParser()</div></div></div><div class="segment"><div class="comments doc-section"><div class="wrapper"><p><span class='doc-section-header'> class Parser and namespace GoateeScript</span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.Parser = parser.Parser</div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static function parse</span></p>
<hr></div></div><div class="code"><div class="wrapper"> exports.parse = <span class="hljs-function"><span class="hljs-params">()</span> -></span> parser.parse.apply(parser, arguments)</div></div></div><div class="segment"><div class="comments doc-section doc-section-static"><div class="wrapper"><p><span class='doc-section-header'>Static function main</span></p>
<hr>
<p>Parameters:</p>
<ul>
<li><strong>args must be an Array.</strong></li>
</ul></div></div><div class="code"><div class="wrapper"> exports.main = <span class="hljs-function"><span class="hljs-params">(args)</span> -></span>
<span class="hljs-keyword">if</span> <span class="hljs-keyword">not</span> args[<span class="hljs-number">1</span>]
<span class="hljs-built_in">console</span>.log <span class="hljs-string">"Usage: <span class="hljs-subst">#{args[<span class="hljs-number">0</span>]}</span> FILE"</span>
process.exit <span class="hljs-number">1</span>
source = <span class="hljs-built_in">require</span>(<span class="hljs-string">'fs'</span>).readFileSync(
<span class="hljs-built_in">require</span>(<span class="hljs-string">'path'</span>).normalize(args[<span class="hljs-number">1</span>]), <span class="hljs-string">"utf8"</span>
)
parser.parse(source)
<span class="hljs-built_in">module</span>.exports = exports</div></div></div><div class="segment"><div class="comments "><div class="wrapper"><p>execute main automatically</p></div></div><div class="code"><div class="wrapper"><span class="hljs-keyword">if</span> (<span class="hljs-built_in">module</span> <span class="hljs-keyword">isnt</span> <span class="hljs-literal">undefined</span> && <span class="hljs-built_in">require</span>.main <span class="hljs-keyword">is</span> <span class="hljs-built_in">module</span>)
exports.main process.argv.slice(<span class="hljs-number">1</span>)</div></div></div></div></body></html> | Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_22) on Wed Sep 14 22:21:32 CEST 2011 -->
<META http-equiv="Content-Type" content="text/html; charset=ISO-8859-1">
<TITLE>
net.sourceforge.pmd.typeresolution.rules.imports Class Hierarchy (PMD 4.2.6 API)
</TITLE>
<META NAME="date" CONTENT="2011-09-14">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="net.sourceforge.pmd.typeresolution.rules.imports Class Hierarchy (PMD 4.2.6 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/visitors/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?net/sourceforge/pmd/typeresolution/rules/imports/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
Hierarchy For Package net.sourceforge.pmd.typeresolution.rules.imports
</H2>
</CENTER>
<DL>
<DT><B>Package Hierarchies:</B><DD><A HREF="../../../../../../overview-tree.html">All Packages</A></DL>
<HR>
<H2>
Class Hierarchy
</H2>
<UL>
<LI TYPE="circle">java.lang.Object<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/CommonAbstractRule.html" title="class in net.sourceforge.pmd"><B>CommonAbstractRule</B></A> (implements net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/Rule.html" title="interface in net.sourceforge.pmd">Rule</A>)
<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/AbstractJavaRule.html" title="class in net.sourceforge.pmd"><B>AbstractJavaRule</B></A> (implements net.sourceforge.pmd.ast.<A HREF="../../../../../../net/sourceforge/pmd/ast/JavaParserVisitor.html" title="interface in net.sourceforge.pmd.ast">JavaParserVisitor</A>)
<UL>
<LI TYPE="circle">net.sourceforge.pmd.<A HREF="../../../../../../net/sourceforge/pmd/AbstractRule.html" title="class in net.sourceforge.pmd"><B>AbstractRule</B></A><UL>
<LI TYPE="circle">net.sourceforge.pmd.rules.imports.<A HREF="../../../../../../net/sourceforge/pmd/rules/imports/UnusedImportsRule.html" title="class in net.sourceforge.pmd.rules.imports"><B>UnusedImportsRule</B></A><UL>
<LI TYPE="circle">net.sourceforge.pmd.typeresolution.rules.imports.<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/imports/UnusedImports.html" title="class in net.sourceforge.pmd.typeresolution.rules.imports"><B>UnusedImports</B></A></UL>
</UL>
</UL>
</UL>
</UL>
</UL>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Class</FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <FONT CLASS="NavBarFont1">Use</FONT> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Tree</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/rules/package-tree.html"><B>PREV</B></A>
<A HREF="../../../../../../net/sourceforge/pmd/typeresolution/visitors/package-tree.html"><B>NEXT</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../index.html?net/sourceforge/pmd/typeresolution/rules/imports/package-tree.html" target="_top"><B>FRAMES</B></A>
<A HREF="package-tree.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2002-2011 InfoEther. All Rights Reserved.
</BODY>
</HTML>
| Java |
/*
* Copyright 2021 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "Firestore/core/src/local/memory_persistence.h"
#include "Firestore/core/test/unit/local/bundle_cache_test.h"
#include "Firestore/core/test/unit/local/persistence_testing.h"
namespace firebase {
namespace firestore {
namespace local {
namespace {
std::unique_ptr<Persistence> PersistenceFactory() {
return MemoryPersistenceWithEagerGcForTesting();
}
} // namespace
INSTANTIATE_TEST_SUITE_P(MemoryBundleCacheTest,
BundleCacheTest,
testing::Values(PersistenceFactory));
} // namespace local
} // namespace firestore
} // namespace firebase
| Java |
<?php
/**
* Copyright 2011 Crucial Web Studio, LLC or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* https://raw.githubusercontent.com/chargely/chargify-sdk-php/master/LICENSE.md
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
namespace Crucial\Service;
use GuzzleHttp\Client;
use GuzzleHttp\Psr7;
use GuzzleHttp\Psr7\Request;
use GuzzleHttp\Psr7\Response;
use GuzzleHttp\Exception\RequestException;
use GuzzleHttp\HandlerStack;
use Crucial\Service\Chargify\Exception\BadMethodCallException;
use Crucial\Service\Chargify\Adjustment;
use Crucial\Service\Chargify\Charge;
use Crucial\Service\Chargify\Component;
use Crucial\Service\Chargify\Coupon;
use Crucial\Service\Chargify\Customer;
use Crucial\Service\Chargify\Event;
use Crucial\Service\Chargify\Product;
use Crucial\Service\Chargify\Refund;
use Crucial\Service\Chargify\Statement;
use Crucial\Service\Chargify\Stats;
use Crucial\Service\Chargify\Subscription;
use Crucial\Service\Chargify\Transaction;
use Crucial\Service\Chargify\Webhook;
class Chargify
{
/**
* Version
*/
const VERSION = '0.1.1';
/**
* Guzzle http client
*
* @var Client
*/
private $httpClient;
/**
* The complete hostname; e.g. "my-app-subdomain.chargify.com",
* not just "my-app-subdomain"
*
* @var string
*/
protected $hostname;
/**
* Your http authentication password. The password is always "x".
*
* @var string
*/
protected $password = 'x';
/**
* Your api key
*
* @var string
*/
protected $apiKey;
/**
* Shared key
*
* @var string
*/
protected $sharedKey;
/**
* Timeout
*
* @var int
*/
protected $timeout = 10;
/*
* json
*
* @var string
*/
protected $format = 'json';
/**
* Config used in constructor.
*
* @var array
*/
protected $config;
/**
* @var Response|false|null
*/
protected $lastResponse;
/**
* Initialize the service
*
* @param array $config
*/
public function __construct($config)
{
// store a copy
$this->config = $config;
// set individual properties
$this->hostname = trim($config['hostname'], '/');
$this->apiKey = $config['api_key'];
$this->sharedKey = $config['shared_key'];
if (!empty($config['timeout'])) {
$this->timeout = $config['timeout'];
}
$this->httpClient = new Client([
'base_uri' => 'https://' . $this->hostname . '/',
'handler' => HandlerStack::create(),
'timeout' => $this->timeout,
'allow_redirects' => false,
'auth' => [$this->apiKey, $this->password],
'headers' => [
'User-Agent' => 'chargify-sdk-php/' . self::VERSION . ' (https://github.com/chargely/chargify-sdk-php)',
'Content-Type' => 'application/' . $this->format
]
]);
}
/**
* @return Client
*/
public function getHttpClient()
{
return $this->httpClient;
}
/**
* Returns config sent in constructor
*
* @return array
*/
public function getConfig()
{
return $this->config;
}
/**
* Send the request to Chargify
*
* @param string $path URL path we are requesting such as: /subscriptions/<subscription_id>/adjustments
* @param string $method GET, POST, PUT, DELETE
* @param string $rawData
* @param array $params
*
* @return Response|FALSE Response object or FALSE if there was no response (networking error, timeout, etc.)
*/
public function request($path, $method, $rawData = null, $params = [])
{
$method = strtoupper($method);
$path = ltrim($path, '/');
$path = $path . '.' . $this->format;
$client = $this->getHttpClient();
$method = strtoupper($method);
$options = [
'query' => $params,
'body' => null,
];
$request = new Request($method, $path);
if (in_array($method, array('POST', 'PUT'))) {
if (null === $rawData) {
throw new BadMethodCallException('You must send raw data in a POST or PUT request');
}
}
if (!empty($rawData)) {
$options['body'] = Psr7\stream_for($rawData);
}
try {
$response = $client->send($request, $options);
} catch (RequestException $e) {
if ($e->hasResponse()) {
$response = $e->getResponse();
} else {
$response = false;
}
}
$this->lastResponse = $response;
return $response;
}
/**
* @return Response
*/
public function getLastResponse()
{
return $this->lastResponse;
}
/**
* Helper for instantiating an instance of Customer
*
* @return Customer
*/
public function customer()
{
return new Customer($this);
}
/**
* Helper for instantiating an instance of Subscription
*
* @return Subscription
*/
public function subscription()
{
return new Subscription($this);
}
/**
* Helper for instantiating an instance of Product
*
* @return Product
*/
public function product()
{
return new Product($this);
}
/**
* Helper for instantiating an instance of Adjustment
*
* @return Adjustment
*/
public function adjustment()
{
return new Adjustment($this);
}
/**
* Helper for instantiating an instance of Charge
*
* @return Charge
*/
public function charge()
{
return new Charge($this);
}
/**
* Helper for instantiating an instance of Component
*
* @return Component
*/
public function component()
{
return new Component($this);
}
/**
* Helper for instantiating an instance of Coupon
*
* @return Coupon
*/
public function coupon()
{
return new Coupon($this);
}
/**
* Helper for instantiating an instance of Transaction
*
* @return Transaction
*/
public function transaction()
{
return new Transaction($this);
}
/**
* Helper for instantiating an instance of Refund
*
* @return Refund
*/
public function refund()
{
return new Refund($this);
}
/**
* Helper for instantiating an instance of Statement
*
* @return Statement
*/
public function statement()
{
return new Statement($this);
}
/**
* Helper for instantiating an instance of Event
*
* @return Event
*/
public function event()
{
return new Event($this);
}
/**
* Helper for instantiating an instance of Webhook
*
* @return Webhook
*/
public function webhook()
{
return new Webhook($this);
}
/**
* Helper for instantiating an instance of Stats
*
* @return Stats
*/
public function stats()
{
return new Stats($this);
}
}
| Java |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.exprtree;
/**
* Container of nodes representing operators.
*
* <p> Important: Do not use outside of Soy code (treat as superpackage-private).
*
* @author Kai Huang
*/
public class OperatorNodes {
private OperatorNodes() {}
/**
* Node representing the unary '-' (negative) operator.
*/
public static class NegativeOpNode extends AbstractOperatorNode {
public NegativeOpNode() { super(Operator.NEGATIVE); }
protected NegativeOpNode(NegativeOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NEGATIVE_OP_NODE; }
@Override public NegativeOpNode clone() { return new NegativeOpNode(this); }
}
/**
* Node representing the 'not' operator.
*/
public static class NotOpNode extends AbstractOperatorNode {
public NotOpNode() { super(Operator.NOT); }
protected NotOpNode(NotOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_OP_NODE; }
@Override public NotOpNode clone() { return new NotOpNode(this); }
}
/**
* Node representing the '*' (times) operator.
*/
public static class TimesOpNode extends AbstractOperatorNode {
public TimesOpNode() { super(Operator.TIMES); }
protected TimesOpNode(TimesOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.TIMES_OP_NODE; }
@Override public TimesOpNode clone() { return new TimesOpNode(this); }
}
/**
* Node representing the '/' (divde by) operator.
*/
public static class DivideByOpNode extends AbstractOperatorNode {
public DivideByOpNode() { super(Operator.DIVIDE_BY); }
protected DivideByOpNode(DivideByOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.DIVIDE_BY_OP_NODE; }
@Override public DivideByOpNode clone() { return new DivideByOpNode(this); }
}
/**
* Node representing the '%' (mod) operator.
*/
public static class ModOpNode extends AbstractOperatorNode {
public ModOpNode() { super(Operator.MOD); }
protected ModOpNode(ModOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MOD_OP_NODE; }
@Override public ModOpNode clone() { return new ModOpNode(this); }
}
/**
* Node representing the '+' (plus) operator.
*/
public static class PlusOpNode extends AbstractOperatorNode {
public PlusOpNode() { super(Operator.PLUS); }
protected PlusOpNode(PlusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.PLUS_OP_NODE; }
@Override public PlusOpNode clone() { return new PlusOpNode(this); }
}
/**
* Node representing the binary '-' (minus) operator.
*/
public static class MinusOpNode extends AbstractOperatorNode {
public MinusOpNode() { super(Operator.MINUS); }
protected MinusOpNode(MinusOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.MINUS_OP_NODE; }
@Override public MinusOpNode clone() { return new MinusOpNode(this); }
}
/**
* Node representing the '<' (less than) operator.
*/
public static class LessThanOpNode extends AbstractOperatorNode {
public LessThanOpNode() { super(Operator.LESS_THAN); }
protected LessThanOpNode(LessThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OP_NODE; }
@Override public LessThanOpNode clone() { return new LessThanOpNode(this); }
}
/**
* Node representing the '>' (greater than) operator.
*/
public static class GreaterThanOpNode extends AbstractOperatorNode {
public GreaterThanOpNode() { super(Operator.GREATER_THAN); }
protected GreaterThanOpNode(GreaterThanOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OP_NODE; }
@Override public GreaterThanOpNode clone() { return new GreaterThanOpNode(this); }
}
/**
* Node representing the '<=' (less than or equal) operator.
*/
public static class LessThanOrEqualOpNode extends AbstractOperatorNode {
public LessThanOrEqualOpNode() { super(Operator.LESS_THAN_OR_EQUAL); }
protected LessThanOrEqualOpNode(LessThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.LESS_THAN_OR_EQUAL_OP_NODE; }
@Override public LessThanOrEqualOpNode clone() { return new LessThanOrEqualOpNode(this); }
}
/**
* Node representing the '>=' (greater than or equal) operator.
*/
public static class GreaterThanOrEqualOpNode extends AbstractOperatorNode {
public GreaterThanOrEqualOpNode() { super(Operator.GREATER_THAN_OR_EQUAL); }
protected GreaterThanOrEqualOpNode(GreaterThanOrEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.GREATER_THAN_OR_EQUAL_OP_NODE; }
@Override public GreaterThanOrEqualOpNode clone() { return new GreaterThanOrEqualOpNode(this); }
}
/**
* Node representing the '==' (equal) operator.
*/
public static class EqualOpNode extends AbstractOperatorNode {
public EqualOpNode() { super(Operator.EQUAL); }
protected EqualOpNode(EqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.EQUAL_OP_NODE; }
@Override public EqualOpNode clone() { return new EqualOpNode(this); }
}
/**
* Node representing the '!=' (not equal) operator.
*/
public static class NotEqualOpNode extends AbstractOperatorNode {
public NotEqualOpNode() { super(Operator.NOT_EQUAL); }
protected NotEqualOpNode(NotEqualOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.NOT_EQUAL_OP_NODE; }
@Override public NotEqualOpNode clone() { return new NotEqualOpNode(this); }
}
/**
* Node representing the 'and' operator.
*/
public static class AndOpNode extends AbstractOperatorNode {
public AndOpNode() { super(Operator.AND); }
protected AndOpNode(AndOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.AND_OP_NODE; }
@Override public AndOpNode clone() { return new AndOpNode(this); }
}
/**
* Node representing the 'or' operator.
*/
public static class OrOpNode extends AbstractOperatorNode {
public OrOpNode() { super(Operator.OR); }
protected OrOpNode(OrOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.OR_OP_NODE; }
@Override public OrOpNode clone() { return new OrOpNode(this); }
}
/**
* Node representing the ternary '? :' (conditional) operator.
*/
public static class ConditionalOpNode extends AbstractOperatorNode {
public ConditionalOpNode() { super(Operator.CONDITIONAL); }
protected ConditionalOpNode(ConditionalOpNode orig) { super(orig); }
@Override public Kind getKind() { return Kind.CONDITIONAL_OP_NODE; }
@Override public ConditionalOpNode clone() { return new ConditionalOpNode(this); }
}
}
| Java |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.remote.work.artifact;
import com.thoughtworks.go.plugin.access.artifact.ArtifactExtensionConstants;
import com.thoughtworks.go.plugin.api.request.GoApiRequest;
import com.thoughtworks.go.plugin.api.response.DefaultGoApiResponse;
import com.thoughtworks.go.plugin.api.response.GoApiResponse;
import com.thoughtworks.go.plugin.infra.GoPluginApiRequestProcessor;
import com.thoughtworks.go.plugin.infra.plugininfo.GoPluginDescriptor;
import com.thoughtworks.go.remote.work.artifact.ConsoleLogMessage.LogLevel;
import com.thoughtworks.go.util.command.*;
import com.thoughtworks.go.work.GoPublisher;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import static java.lang.String.format;
public class ArtifactRequestProcessor implements GoPluginApiRequestProcessor {
private static final List<String> goSupportedVersions = ArtifactExtensionConstants.SUPPORTED_VERSIONS;
private final SafeOutputStreamConsumer safeOutputStreamConsumer;
private final ProcessType processType;
private enum ProcessType {
FETCH, PUBLISH
}
private static final Map<LogLevel, String> FETCH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.OUT);
put(LogLevel.ERROR, TaggedStreamConsumer.ERR);
}};
private static final Map<LogLevel, String> PUBLISH_ARTIFACT_LOG_LEVEL_TAG = new HashMap<LogLevel, String>() {{
put(LogLevel.INFO, TaggedStreamConsumer.PUBLISH);
put(LogLevel.ERROR, TaggedStreamConsumer.PUBLISH_ERR);
}};
private ArtifactRequestProcessor(GoPublisher publisher, ProcessType processType, EnvironmentVariableContext environmentVariableContext) {
CompositeConsumer errorStreamConsumer = new CompositeConsumer(CompositeConsumer.ERR, publisher);
CompositeConsumer outputStreamConsumer = new CompositeConsumer(CompositeConsumer.OUT, publisher);
this.safeOutputStreamConsumer = new SafeOutputStreamConsumer(new ProcessOutputStreamConsumer(errorStreamConsumer, outputStreamConsumer));
safeOutputStreamConsumer.addSecrets(environmentVariableContext.secrets());
this.processType = processType;
}
public static ArtifactRequestProcessor forFetchArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.FETCH, environmentVariableContext);
}
public static ArtifactRequestProcessor forPublishArtifact(GoPublisher goPublisher, EnvironmentVariableContext environmentVariableContext) {
return new ArtifactRequestProcessor(goPublisher, ProcessType.PUBLISH, environmentVariableContext);
}
@Override
public GoApiResponse process(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
validatePluginRequest(request);
switch (Request.fromString(request.api())) {
case CONSOLE_LOG:
return processConsoleLogRequest(pluginDescriptor, request);
default:
return DefaultGoApiResponse.error("Illegal api request");
}
}
private GoApiResponse processConsoleLogRequest(GoPluginDescriptor pluginDescriptor, GoApiRequest request) {
final ConsoleLogMessage consoleLogMessage = ConsoleLogMessage.fromJSON(request.requestBody());
final String message = format("[%s] %s", pluginDescriptor.id(), consoleLogMessage.getMessage());
Optional<String> parsedTag = parseTag(processType, consoleLogMessage.getLogLevel());
if (parsedTag.isPresent()) {
safeOutputStreamConsumer.taggedStdOutput(parsedTag.get(), message);
return DefaultGoApiResponse.success(null);
}
return DefaultGoApiResponse.error(format("Unsupported log level `%s`.", consoleLogMessage.getLogLevel()));
}
private Optional<String> parseTag(ProcessType requestType, LogLevel logLevel) {
switch (requestType) {
case FETCH:
return Optional.ofNullable(FETCH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
case PUBLISH:
return Optional.ofNullable(PUBLISH_ARTIFACT_LOG_LEVEL_TAG.get(logLevel));
}
return Optional.empty();
}
private void validatePluginRequest(GoApiRequest goPluginApiRequest) {
if (!goSupportedVersions.contains(goPluginApiRequest.apiVersion())) {
throw new RuntimeException(format("Unsupported '%s' API version: %s. Supported versions: %s", goPluginApiRequest.api(), goPluginApiRequest.apiVersion(), goSupportedVersions));
}
}
public enum Request {
CONSOLE_LOG("go.processor.artifact.console-log");
private final String requestName;
Request(String requestName) {
this.requestName = requestName;
}
public static Request fromString(String requestName) {
if (requestName != null) {
for (Request request : Request.values()) {
if (requestName.equalsIgnoreCase(request.requestName)) {
return request;
}
}
}
return null;
}
public String requestName() {
return requestName;
}
}
}
| Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!-- NewPage -->
<html lang="en">
<head>
<!-- Generated by javadoc -->
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>com.cloudera.oryx.api.serving (Oryx 2.8.0 API)</title>
<link rel="stylesheet" type="text/css" href="../../../../../stylesheet.css" title="Style">
<script type="text/javascript" src="../../../../../script.js"></script>
</head>
<body>
<h1 class="bar"><a href="../../../../../com/cloudera/oryx/api/serving/package-summary.html" target="classFrame">com.cloudera.oryx.api.serving</a></h1>
<div class="indexContainer">
<h2 title="Interfaces">Interfaces</h2>
<ul title="Interfaces">
<li><a href="HasCSV.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">HasCSV</span></a></li>
<li><a href="ServingModel.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">ServingModel</span></a></li>
<li><a href="ServingModelManager.html" title="interface in com.cloudera.oryx.api.serving" target="classFrame"><span class="interfaceName">ServingModelManager</span></a></li>
</ul>
<h2 title="Classes">Classes</h2>
<ul title="Classes">
<li><a href="AbstractServingModelManager.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">AbstractServingModelManager</a></li>
<li><a href="OryxResource.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">OryxResource</a></li>
</ul>
<h2 title="Exceptions">Exceptions</h2>
<ul title="Exceptions">
<li><a href="OryxServingException.html" title="class in com.cloudera.oryx.api.serving" target="classFrame">OryxServingException</a></li>
</ul>
</div>
</body>
</html>
| Java |
package com.senseidb.search.node.impl;
import org.json.JSONObject;
import com.senseidb.search.node.SenseiQueryBuilder;
import com.senseidb.search.node.SenseiQueryBuilderFactory;
import com.senseidb.search.req.SenseiQuery;
import com.senseidb.util.JSONUtil.FastJSONObject;
public abstract class AbstractJsonQueryBuilderFactory implements SenseiQueryBuilderFactory {
@Override
public SenseiQueryBuilder getQueryBuilder(SenseiQuery query) throws Exception {
JSONObject jsonQuery = null;
if (query != null) {
byte[] bytes = query.toBytes();
jsonQuery = new FastJSONObject(new String(bytes, SenseiQuery.utf8Charset));
}
return buildQueryBuilder(jsonQuery);
}
public abstract SenseiQueryBuilder buildQueryBuilder(JSONObject jsonQuery);
}
| Java |
/*
* Copyright (c) 2018 STMicroelectronics
*
* SPDX-License-Identifier: Apache-2.0
*/
#include <zephyr.h>
#include <misc/printk.h>
#include <board.h>
#include <gpio.h>
#include <i2c.h>
#include <spi.h>
#include <sensor.h>
/* #define ARGONKEY_TEST_LOG 1 */
#define WHOAMI_REG 0x0F
#define WHOAMI_ALT_REG 0x4F
static inline float out_ev(struct sensor_value *val)
{
return (val->val1 + (float)val->val2 / 1000000);
}
static int lsm6dsl_trig_cnt;
#ifdef CONFIG_LSM6DSL_TRIGGER
static void lsm6dsl_trigger_handler(struct device *dev,
struct sensor_trigger *trig)
{
#ifdef ARGONKEY_TEST_LOG
char out_str[64];
#endif
struct sensor_value accel_x, accel_y, accel_z;
struct sensor_value gyro_x, gyro_y, gyro_z;
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
struct sensor_value magn_x, magn_y, magn_z;
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
struct sensor_value press, temp;
#endif
lsm6dsl_trig_cnt++;
sensor_sample_fetch_chan(dev, SENSOR_CHAN_ACCEL_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_X, &accel_x);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_Y, &accel_y);
sensor_channel_get(dev, SENSOR_CHAN_ACCEL_Z, &accel_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "accel (%f %f %f) m/s2", out_ev(&accel_x),
out_ev(&accel_y),
out_ev(&accel_z));
printk("TRIG %s\n", out_str);
#endif
/* lsm6dsl gyro */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_GYRO_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_X, &gyro_x);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_Y, &gyro_y);
sensor_channel_get(dev, SENSOR_CHAN_GYRO_Z, &gyro_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "gyro (%f %f %f) dps", out_ev(&gyro_x),
out_ev(&gyro_y),
out_ev(&gyro_z));
printk("TRIG %s\n", out_str);
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
/* lsm6dsl magn */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_MAGN_XYZ);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_X, &magn_x);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_Y, &magn_y);
sensor_channel_get(dev, SENSOR_CHAN_MAGN_Z, &magn_z);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "magn (%f %f %f) gauss", out_ev(&magn_x),
out_ev(&magn_y),
out_ev(&magn_z));
printk("TRIG %s\n", out_str);
#endif
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
/* lsm6dsl press/temp */
sensor_sample_fetch_chan(dev, SENSOR_CHAN_PRESS);
sensor_channel_get(dev, SENSOR_CHAN_PRESS, &press);
sensor_sample_fetch_chan(dev, SENSOR_CHAN_AMBIENT_TEMP);
sensor_channel_get(dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
#ifdef ARGONKEY_TEST_LOG
sprintf(out_str, "press (%f) kPa - temp (%f) deg", out_ev(&press),
out_ev(&temp));
printk("%s\n", out_str);
#endif
#endif
}
#endif
void main(void)
{
int cnt = 0;
char out_str[64];
static struct device *led0, *led1;
int i, on = 1;
led0 = device_get_binding(LED0_GPIO_CONTROLLER);
gpio_pin_configure(led0, LED0_GPIO_PIN, GPIO_DIR_OUT);
gpio_pin_write(led0, LED0_GPIO_PIN, 1);
led1 = device_get_binding(LED1_GPIO_CONTROLLER);
gpio_pin_configure(led1, LED1_GPIO_PIN, GPIO_DIR_OUT);
for (i = 0; i < 5; i++) {
gpio_pin_write(led1, LED1_GPIO_PIN, on);
k_sleep(200);
on = (on == 1) ? 0 : 1;
}
printk("ArgonKey test!!\n");
#ifdef CONFIG_LPS22HB
struct device *baro_dev = device_get_binding(CONFIG_LPS22HB_DEV_NAME);
if (!baro_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_LPS22HB_DEV_NAME);
return;
}
#endif
#ifdef CONFIG_HTS221
struct device *hum_dev = device_get_binding(CONFIG_HTS221_NAME);
if (!hum_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_HTS221_NAME);
return;
}
#endif
#ifdef CONFIG_LSM6DSL
struct device *accel_dev = device_get_binding(CONFIG_LSM6DSL_DEV_NAME);
if (!accel_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_LSM6DSL_DEV_NAME);
return;
}
#if defined(CONFIG_LSM6DSL_ACCEL_ODR) && (CONFIG_LSM6DSL_ACCEL_ODR == 0)
struct sensor_value a_odr_attr;
/* set sampling frequency to 104Hz for accel */
a_odr_attr.val1 = 104;
a_odr_attr.val2 = 0;
if (sensor_attr_set(accel_dev, SENSOR_CHAN_ACCEL_XYZ,
SENSOR_ATTR_SAMPLING_FREQUENCY, &a_odr_attr) < 0) {
printk("Cannot set sampling frequency for accelerometer.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_ACCEL_FS) && (CONFIG_LSM6DSL_ACCEL_FS == 0)
struct sensor_value a_fs_attr;
/* set full scale to 16g for accel */
sensor_g_to_ms2(16, &a_fs_attr);
if (sensor_attr_set(accel_dev, SENSOR_CHAN_ACCEL_XYZ,
SENSOR_ATTR_FULL_SCALE, &a_fs_attr) < 0) {
printk("Cannot set fs for accelerometer.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_GYRO_ODR) && (CONFIG_LSM6DSL_GYRO_ODR == 0)
struct sensor_value g_odr_attr;
/* set sampling frequency to 104Hz for accel */
g_odr_attr.val1 = 104;
g_odr_attr.val2 = 0;
if (sensor_attr_set(accel_dev, SENSOR_CHAN_GYRO_XYZ,
SENSOR_ATTR_SAMPLING_FREQUENCY, &g_odr_attr) < 0) {
printk("Cannot set sampling frequency for gyro.\n");
return;
}
#endif
#if defined(CONFIG_LSM6DSL_GYRO_FS) && (CONFIG_LSM6DSL_GYRO_FS == 0)
struct sensor_value g_fs_attr;
/* set full scale to 245dps for accel */
sensor_g_to_ms2(245, &g_fs_attr);
if (sensor_attr_set(accel_dev, SENSOR_CHAN_GYRO_XYZ,
SENSOR_ATTR_FULL_SCALE, &g_fs_attr) < 0) {
printk("Cannot set fs for gyroscope.\n");
return;
}
#endif
#endif
#ifdef CONFIG_VL53L0X
struct device *tof_dev = device_get_binding(CONFIG_VL53L0X_NAME);
if (!tof_dev) {
printk("Could not get pointer to %s sensor\n",
CONFIG_VL53L0X_NAME);
return;
}
#endif
#ifdef CONFIG_LSM6DSL_TRIGGER
struct sensor_trigger trig;
trig.type = SENSOR_TRIG_DATA_READY;
trig.chan = SENSOR_CHAN_ACCEL_XYZ;
sensor_trigger_set(accel_dev, &trig, lsm6dsl_trigger_handler);
#endif
while (1) {
#ifdef CONFIG_LPS22HB
struct sensor_value temp, press;
#endif
#ifdef CONFIG_HTS221
struct sensor_value humidity;
#endif
#ifdef CONFIG_LSM6DSL
struct sensor_value accel_x, accel_y, accel_z;
struct sensor_value gyro_x, gyro_y, gyro_z;
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
struct sensor_value magn_x, magn_y, magn_z;
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
struct sensor_value press, temp;
#endif
#endif
#ifdef CONFIG_VL53L0X
struct sensor_value prox;
#endif
#ifdef CONFIG_VL53L0X
sensor_sample_fetch(tof_dev);
sensor_channel_get(tof_dev, SENSOR_CHAN_PROX, &prox);
printk("proxy: %d ;\n", prox.val1);
sensor_channel_get(tof_dev, SENSOR_CHAN_DISTANCE, &prox);
printk("distance: %d -- %3d mm;\n", prox.val1, prox.val2);
#endif
#ifdef CONFIG_LPS22HB
sensor_sample_fetch(baro_dev);
sensor_channel_get(baro_dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
sensor_channel_get(baro_dev, SENSOR_CHAN_PRESS, &press);
printk("temp: %d.%02d C; press: %d.%06d\n",
temp.val1, temp.val2, press.val1, press.val2);
#endif
#ifdef CONFIG_HTS221
sensor_sample_fetch(hum_dev);
sensor_channel_get(hum_dev, SENSOR_CHAN_HUMIDITY, &humidity);
printk("humidity: %d.%06d\n",
humidity.val1, humidity.val2);
#endif
#ifdef CONFIG_LSM6DSL
/* lsm6dsl accel */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_ACCEL_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_X, &accel_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_Y, &accel_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_ACCEL_Z, &accel_z);
sprintf(out_str, "accel (%f %f %f) m/s2", out_ev(&accel_x),
out_ev(&accel_y),
out_ev(&accel_z));
printk("%s\n", out_str);
/* lsm6dsl gyro */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_GYRO_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_X, &gyro_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_Y, &gyro_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_GYRO_Z, &gyro_z);
sprintf(out_str, "gyro (%f %f %f) dps", out_ev(&gyro_x),
out_ev(&gyro_y),
out_ev(&gyro_z));
printk("%s\n", out_str);
#if defined(CONFIG_LSM6DSL_EXT0_LIS2MDL)
/* lsm6dsl magn */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_MAGN_XYZ);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_X, &magn_x);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_Y, &magn_y);
sensor_channel_get(accel_dev, SENSOR_CHAN_MAGN_Z, &magn_z);
sprintf(out_str, "magn (%f %f %f) gauss", out_ev(&magn_x),
out_ev(&magn_y),
out_ev(&magn_z));
printk("%s\n", out_str);
#endif
#if defined(CONFIG_LSM6DSL_EXT0_LPS22HB)
/* lsm6dsl press/temp */
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_PRESS);
sensor_channel_get(accel_dev, SENSOR_CHAN_PRESS, &press);
sensor_sample_fetch_chan(accel_dev, SENSOR_CHAN_AMBIENT_TEMP);
sensor_channel_get(accel_dev, SENSOR_CHAN_AMBIENT_TEMP, &temp);
sprintf(out_str, "press (%f) kPa - temp (%f) deg",
out_ev(&press), out_ev(&temp));
printk("%s\n", out_str);
#endif
#endif /* CONFIG_LSM6DSL */
printk("- (%d) (trig_cnt: %d)\n\n", ++cnt, lsm6dsl_trig_cnt);
k_sleep(2000);
}
}
| Java |
var searchData=
[
['value',['value',['../structguac__pool__int.html#af76ff5f21c6e0f69d95cdd1385ea24a4',1,'guac_pool_int']]],
['vguac_5fclient_5fabort',['vguac_client_abort',['../client_8h.html#a4c0eccd7d0ed3dbf3e7941ce297e0224',1,'client.h']]],
['vguac_5fclient_5flog',['vguac_client_log',['../client_8h.html#a37a0fa9cfc4c02236085e3852972f494',1,'client.h']]],
['vguac_5fprotocol_5fsend_5flog',['vguac_protocol_send_log',['../protocol_8h.html#a3a783d771e1727ba2a82b2298acf4ee4',1,'protocol.h']]],
['video_5fmimetypes',['video_mimetypes',['../structguac__client__info.html#aa58dc4ee1e3b8801e9b0abbf9135d8b6',1,'guac_client_info']]]
];
| Java |
package org.vertexium.util;
import org.vertexium.Authorizations;
import org.vertexium.Direction;
import org.vertexium.Vertex;
import java.util.Iterator;
public class VerticesToEdgeIdsIterable implements Iterable<String> {
private final Iterable<? extends Vertex> vertices;
private final Authorizations authorizations;
public VerticesToEdgeIdsIterable(Iterable<? extends Vertex> vertices, Authorizations authorizations) {
this.vertices = vertices;
this.authorizations = authorizations;
}
@Override
public Iterator<String> iterator() {
return new SelectManyIterable<Vertex, String>(this.vertices) {
@Override
public Iterable<String> getIterable(Vertex vertex) {
return vertex.getEdgeIds(Direction.BOTH, authorizations);
}
}.iterator();
}
}
| Java |
package com.kit.db;
public class Obj {
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.functions.recompile;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import org.apache.sysds.conf.CompilerConfig;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
import org.apache.sysds.utils.Statistics;
public class FunctionRecompileTest extends AutomatedTestBase
{
private final static String TEST_NAME1 = "funct_recompile";
private final static String TEST_DIR = "functions/recompile/";
private final static String TEST_CLASS_DIR = TEST_DIR + FunctionRecompileTest.class.getSimpleName() + "/";
private final static double eps = 1e-10;
private final static int rows = 20;
private final static int cols = 10;
private final static double sparsity = 1.0;
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME1,
new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "Rout" }) );
}
@Test
public void testFunctionWithoutRecompileWithoutIPA() {
runFunctionTest(false, false);
}
@Test
public void testFunctionWithoutRecompileWithIPA() {
runFunctionTest(false, true);
}
@Test
public void testFunctionWithRecompileWithoutIPA() {
runFunctionTest(true, false);
}
@Test
public void testFunctionWithRecompileWithIPA() {
runFunctionTest(true, true);
}
private void runFunctionTest( boolean recompile, boolean IPA )
{
boolean oldFlagRecompile = CompilerConfig.FLAG_DYN_RECOMPILE;
boolean oldFlagIPA = OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS;
try
{
TestConfiguration config = getTestConfiguration(TEST_NAME1);
config.addVariable("rows", rows);
config.addVariable("cols", cols);
loadTestConfiguration(config);
String HOME = SCRIPT_DIR + TEST_DIR;
fullDMLScriptName = HOME + TEST_NAME1 + ".dml";
programArgs = new String[]{"-args", input("V"),
Integer.toString(rows), Integer.toString(cols), output("R") };
fullRScriptName = HOME + TEST_NAME1 + ".R";
rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir();
long seed = System.nanoTime();
double[][] V = getRandomMatrix(rows, cols, 0, 1, sparsity, seed);
writeInputMatrix("V", V, true);
CompilerConfig.FLAG_DYN_RECOMPILE = recompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = IPA;
boolean exceptionExpected = false;
runTest(true, exceptionExpected, null, -1);
runRScript(true);
//note: change from previous version due to fix in op selection (unknown size XtX and mapmult)
//CHECK compiled MR jobs
int expectNumCompiled = -1;
if( IPA ) expectNumCompiled = 1; //reblock
else expectNumCompiled = 5; //reblock, GMR,GMR,GMR,GMR (last two should piggybacked)
Assert.assertEquals("Unexpected number of compiled MR jobs.",
expectNumCompiled, Statistics.getNoOfCompiledSPInst());
//CHECK executed MR jobs
int expectNumExecuted = -1;
if( recompile ) expectNumExecuted = 0;
else if( IPA ) expectNumExecuted = 1; //reblock
else expectNumExecuted = 41; //reblock, 10*(GMR,GMR,GMR, GMR) (last two should piggybacked)
Assert.assertEquals("Unexpected number of executed MR jobs.",
expectNumExecuted, Statistics.getNoOfExecutedSPInst());
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromOutputDir("R");
HashMap<CellIndex, Double> rfile = readRMatrixFromExpectedDir("Rout");
TestUtils.compareMatrices(dmlfile, rfile, eps, "DML", "R");
}
finally {
CompilerConfig.FLAG_DYN_RECOMPILE = oldFlagRecompile;
OptimizerUtils.ALLOW_INTER_PROCEDURAL_ANALYSIS = oldFlagIPA;
}
}
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.jobmanager;
import akka.actor.ActorSystem;
import org.apache.flink.configuration.ConfigConstants;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.akka.AkkaUtils;
import org.apache.flink.runtime.akka.ListeningBehaviour;
import org.apache.flink.runtime.blob.BlobClient;
import org.apache.flink.runtime.blob.BlobKey;
import org.apache.flink.runtime.client.JobExecutionException;
import org.apache.flink.runtime.instance.ActorGateway;
import org.apache.flink.runtime.jobgraph.JobGraph;
import org.apache.flink.runtime.jobgraph.JobVertex;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings;
import org.apache.flink.runtime.jobgraph.tasks.ExternalizedCheckpointSettings;
import org.apache.flink.runtime.jobgraph.tasks.JobCheckpointingSettings;
import org.apache.flink.runtime.leaderretrieval.LeaderRetrievalService;
import org.apache.flink.runtime.messages.JobManagerMessages;
import org.apache.flink.runtime.testingUtils.TestingUtils;
import org.apache.flink.runtime.testtasks.NoOpInvokable;
import org.apache.flink.runtime.util.LeaderRetrievalUtils;
import org.apache.flink.util.NetUtils;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
import scala.Tuple2;
import scala.concurrent.Await;
import scala.concurrent.Future;
import scala.concurrent.duration.FiniteDuration;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
/**
* Tests that the JobManager handles Jobs correctly that fail in
* the initialization during the submit phase.
*/
public class JobSubmitTest {
private static final FiniteDuration timeout = new FiniteDuration(60000, TimeUnit.MILLISECONDS);
private static ActorSystem jobManagerSystem;
private static ActorGateway jmGateway;
private static Configuration jmConfig;
@BeforeClass
public static void setupJobManager() {
jmConfig = new Configuration();
int port = NetUtils.getAvailablePort();
jmConfig.setString(ConfigConstants.JOB_MANAGER_IPC_ADDRESS_KEY, "localhost");
jmConfig.setInteger(ConfigConstants.JOB_MANAGER_IPC_PORT_KEY, port);
scala.Option<Tuple2<String, Object>> listeningAddress = scala.Option.apply(new Tuple2<String, Object>("localhost", port));
jobManagerSystem = AkkaUtils.createActorSystem(jmConfig, listeningAddress);
// only start JobManager (no ResourceManager)
JobManager.startJobManagerActors(
jmConfig,
jobManagerSystem,
TestingUtils.defaultExecutor(),
TestingUtils.defaultExecutor(),
JobManager.class,
MemoryArchivist.class)._1();
try {
LeaderRetrievalService lrs = LeaderRetrievalUtils.createLeaderRetrievalService(jmConfig);
jmGateway = LeaderRetrievalUtils.retrieveLeaderGateway(
lrs,
jobManagerSystem,
timeout
);
} catch (Exception e) {
fail("Could not retrieve the JobManager gateway. " + e.getMessage());
}
}
@AfterClass
public static void teardownJobmanager() {
if (jobManagerSystem != null) {
jobManagerSystem.shutdown();
}
}
@Test
public void testFailureWhenJarBlobsMissing() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Test Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// request the blob port from the job manager
Future<Object> future = jmGateway.ask(JobManagerMessages.getRequestBlobManagerPort(), timeout);
int blobPort = (Integer) Await.result(future, timeout);
// upload two dummy bytes and add their keys to the job graph as dependencies
BlobKey key1, key2;
BlobClient bc = new BlobClient(new InetSocketAddress("localhost", blobPort), jmConfig);
try {
key1 = bc.put(new byte[10]);
key2 = bc.put(new byte[10]);
// delete one of the blobs to make sure that the startup failed
bc.delete(key2);
}
finally {
bc.close();
}
jg.addBlob(key1);
jg.addBlob(key2);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
assertTrue(e.getCause() instanceof IOException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
/**
* Verifies a correct error message when vertices with master initialization
* (input formats / output formats) fail.
*/
@Test
public void testFailureWhenInitializeOnMasterFails() {
try {
// create a simple job graph
JobVertex jobVertex = new JobVertex("Vertex that fails in initializeOnMaster") {
private static final long serialVersionUID = -3540303593784587652L;
@Override
public void initializeOnMaster(ClassLoader loader) throws Exception {
throw new RuntimeException("test exception");
}
};
jobVertex.setInvokableClass(NoOpInvokable.class);
JobGraph jg = new JobGraph("test job", jobVertex);
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(
jg,
ListeningBehaviour.EXECUTION_RESULT),
timeout);
try {
Await.result(submitFuture, timeout);
}
catch (JobExecutionException e) {
// that is what we expect
// test that the exception nesting is not too deep
assertTrue(e.getCause() instanceof RuntimeException);
}
catch (Exception e) {
fail("Wrong exception type");
}
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
@Test
public void testAnswerFailureWhenSavepointReadFails() throws Exception {
// create a simple job graph
JobGraph jg = createSimpleJobGraph();
jg.setSavepointRestoreSettings(SavepointRestoreSettings.forPath("pathThatReallyDoesNotExist..."));
// submit the job
Future<Object> submitFuture = jmGateway.ask(
new JobManagerMessages.SubmitJob(jg, ListeningBehaviour.DETACHED), timeout);
Object result = Await.result(submitFuture, timeout);
assertEquals(JobManagerMessages.JobResultFailure.class, result.getClass());
}
private JobGraph createSimpleJobGraph() {
JobVertex jobVertex = new JobVertex("Vertex");
jobVertex.setInvokableClass(NoOpInvokable.class);
List<JobVertexID> vertexIdList = Collections.singletonList(jobVertex.getID());
JobGraph jg = new JobGraph("test job", jobVertex);
jg.setSnapshotSettings(new JobCheckpointingSettings(vertexIdList, vertexIdList, vertexIdList,
5000, 5000, 0L, 10, ExternalizedCheckpointSettings.none(), null, true));
return jg;
}
}
| Java |
#import "DockSquadImporter.h"
@interface DockSquadImporteriOS : DockSquadImporter
@end
| Java |
/*
* Copyright 2010 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.drools.core.base.accumulators;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.io.Serializable;
/**
* An implementation of an accumulator capable of counting occurences
*/
public class CountAccumulateFunction extends AbstractAccumulateFunction<CountAccumulateFunction.CountData> {
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
}
public void writeExternal(ObjectOutput out) throws IOException {
}
protected static class CountData implements Externalizable {
public long count = 0;
public CountData() {}
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
count = in.readLong();
}
public void writeExternal(ObjectOutput out) throws IOException {
out.writeLong(count);
}
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#createContext()
*/
public CountData createContext() {
return new CountData();
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#init(java.lang.Object)
*/
public void init(CountData data) {
data.count = 0;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#accumulate(java.lang.Object, java.lang.Object)
*/
public void accumulate(CountData data,
Object value) {
data.count++;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#reverse(java.lang.Object, java.lang.Object)
*/
public void reverse(CountData data,
Object value) {
data.count--;
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#getResult(java.lang.Object)
*/
public Object getResult(CountData data) {
return new Long( data.count );
}
/* (non-Javadoc)
* @see org.kie.base.accumulators.AccumulateFunction#supportsReverse()
*/
public boolean supportsReverse() {
return true;
}
/**
* {@inheritDoc}
*/
public Class< ? > getResultType() {
return Long.class;
}
}
| Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_51) on Fri Jul 19 02:59:04 EDT 2013 -->
<META http-equiv="Content-Type" content="text/html; charset=utf-8">
<TITLE>
Uses of Class org.apache.solr.client.solrj.response.RangeFacet.Count (Solr 4.4.0 API)
</TITLE>
<META NAME="date" CONTENT="2013-07-19">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.solr.client.solrj.response.RangeFacet.Count (Solr 4.4.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/solr/client/solrj/response//class-useRangeFacet.Count.html" target="_top"><B>FRAMES</B></A>
<A HREF="RangeFacet.Count.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.solr.client.solrj.response.RangeFacet.Count</B></H2>
</CENTER>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Packages that use <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD><A HREF="#org.apache.solr.client.solrj.response"><B>org.apache.solr.client.solrj.response</B></A></TD>
<TD>Convenience classes for dealing with various types of Solr responses. </TD>
</TR>
</TABLE>
<P>
<A NAME="org.apache.solr.client.solrj.response"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
Uses of <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A> in <A HREF="../../../../../../../org/apache/solr/client/solrj/response/package-summary.html">org.apache.solr.client.solrj.response</A></FONT></TH>
</TR>
</TABLE>
<P>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableSubHeadingColor">
<TH ALIGN="left" COLSPAN="2">Methods in <A HREF="../../../../../../../org/apache/solr/client/solrj/response/package-summary.html">org.apache.solr.client.solrj.response</A> that return types with arguments of type <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> <A HREF="http://download.oracle.com/javase/6/docs/api/java/util/List.html?is-external=true" title="class or interface in java.util">List</A><<A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response">RangeFacet.Count</A>></CODE></FONT></TD>
<TD><CODE><B>RangeFacet.</B><B><A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.html#getCounts()">getCounts</A></B>()</CODE>
<BR>
</TD>
</TR>
</TABLE>
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/solr/client/solrj/response/RangeFacet.Count.html" title="class in org.apache.solr.client.solrj.response"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/solr/client/solrj/response//class-useRangeFacet.Count.html" target="_top"><B>FRAMES</B></A>
<A HREF="RangeFacet.Count.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
<i>Copyright © 2000-2013 Apache Software Foundation. All Rights Reserved.</i>
<script src='../../../../../../../prettify.js' type='text/javascript'></script>
<script type='text/javascript'>
(function(){
var oldonload = window.onload;
if (typeof oldonload != 'function') {
window.onload = prettyPrint;
} else {
window.onload = function() {
oldonload();
prettyPrint();
}
}
})();
</script>
</BODY>
</HTML>
| Java |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.operator.scalar.AbstractTestFunctions;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilder;
import com.google.common.net.InetAddresses;
import io.airlift.slice.Slices;
import org.testng.annotations.Test;
import static com.facebook.presto.spi.function.OperatorType.HASH_CODE;
import static com.facebook.presto.spi.function.OperatorType.INDETERMINATE;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
import static com.facebook.presto.spi.type.VarcharType.VARCHAR;
import static com.facebook.presto.type.IpAddressType.IPADDRESS;
import static com.facebook.presto.type.IpPrefixType.IPPREFIX;
import static java.lang.System.arraycopy;
public class TestIpPrefixOperators
extends AbstractTestFunctions
{
@Test
public void testVarcharToIpPrefixCast()
{
assertFunction("CAST('::ffff:1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('192.168.0.0/24' AS IPPREFIX)", IPPREFIX, "192.168.0.0/24");
assertFunction("CAST('255.2.3.4/0' AS IPPREFIX)", IPPREFIX, "0.0.0.0/0");
assertFunction("CAST('255.2.3.4/1' AS IPPREFIX)", IPPREFIX, "128.0.0.0/1");
assertFunction("CAST('255.2.3.4/2' AS IPPREFIX)", IPPREFIX, "192.0.0.0/2");
assertFunction("CAST('255.2.3.4/4' AS IPPREFIX)", IPPREFIX, "240.0.0.0/4");
assertFunction("CAST('1.2.3.4/8' AS IPPREFIX)", IPPREFIX, "1.0.0.0/8");
assertFunction("CAST('1.2.3.4/16' AS IPPREFIX)", IPPREFIX, "1.2.0.0/16");
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX)", IPPREFIX, "1.2.3.0/24");
assertFunction("CAST('1.2.3.255/25' AS IPPREFIX)", IPPREFIX, "1.2.3.128/25");
assertFunction("CAST('1.2.3.255/26' AS IPPREFIX)", IPPREFIX, "1.2.3.192/26");
assertFunction("CAST('1.2.3.255/28' AS IPPREFIX)", IPPREFIX, "1.2.3.240/28");
assertFunction("CAST('1.2.3.255/30' AS IPPREFIX)", IPPREFIX, "1.2.3.252/30");
assertFunction("CAST('1.2.3.255/32' AS IPPREFIX)", IPPREFIX, "1.2.3.255/32");
assertFunction("CAST('2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8::ff00:42:8329/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
assertFunction("CAST('2001:db8:0:0:1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8:0:0:1::1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:db8::1:0:0:1/128' AS IPPREFIX)", IPPREFIX, "2001:db8::1:0:0:1/128");
assertFunction("CAST('2001:DB8::FF00:ABCD:12EF/128' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:abcd:12ef/128");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/0' AS IPPREFIX)", IPPREFIX, "::/0");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/1' AS IPPREFIX)", IPPREFIX, "8000::/1");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/2' AS IPPREFIX)", IPPREFIX, "c000::/2");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/4' AS IPPREFIX)", IPPREFIX, "f000::/4");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/8' AS IPPREFIX)", IPPREFIX, "ff00::/8");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/16' AS IPPREFIX)", IPPREFIX, "ffff::/16");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/32' AS IPPREFIX)", IPPREFIX, "ffff:ffff::/32");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/48' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff::/48");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/64' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff::/64");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/80' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff::/80");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/96' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff::/96");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/112' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:0/112");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/120' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00/120");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/124' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0/124");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/126' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffc/126");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/127' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe/127");
assertFunction("CAST('ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128' AS IPPREFIX)", IPPREFIX, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff/128");
assertFunction("IPPREFIX '10.0.0.0/32'", IPPREFIX, "10.0.0.0/32");
assertFunction("IPPREFIX '64:ff9b::10.0.0.0/128'", IPPREFIX, "64:ff9b::a00:0/128");
assertInvalidCast("CAST('facebook.com/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: facebook.com/32");
assertInvalidCast("CAST('localhost/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: localhost/32");
assertInvalidCast("CAST('2001:db8::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:db8::1::1/128");
assertInvalidCast("CAST('2001:zxy::1::1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 2001:zxy::1::1/128");
assertInvalidCast("CAST('789.1.1.1/32' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 789.1.1.1/32");
assertInvalidCast("CAST('192.1.1.1' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1");
assertInvalidCast("CAST('192.1.1.1/128' AS IPPREFIX)", "Cannot cast value to IPPREFIX: 192.1.1.1/128");
}
@Test
public void testIpPrefixToVarcharCast()
{
assertFunction("CAST(IPPREFIX '::ffff:1.2.3.4/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '::ffff:102:304/32' AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS VARCHAR)", VARCHAR, "2001:db8::ff00:42:8329/128");
assertFunction("CAST(IPPREFIX '2001:db8:0:0:1:0:0:1/128' AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('1.2.3.4/32' AS IPPREFIX) AS VARCHAR)", VARCHAR, "1.2.3.4/32");
assertFunction("CAST(CAST('2001:db8:0:0:1::1/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "2001:db8::1:0:0:1/128");
assertFunction("CAST(CAST('64:ff9b::10.0.0.0/128' AS IPPREFIX) AS VARCHAR)", VARCHAR, "64:ff9b::a00:0/128");
}
@Test
public void testIpPrefixToIpAddressCast()
{
assertFunction("CAST(IPPREFIX '1.2.3.4/32' AS IPADDRESS)", IPADDRESS, "1.2.3.4");
assertFunction("CAST(IPPREFIX '1.2.3.4/24' AS IPADDRESS)", IPADDRESS, "1.2.3.0");
assertFunction("CAST(IPPREFIX '::1/128' AS IPADDRESS)", IPADDRESS, "::1");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/128' AS IPADDRESS)", IPADDRESS, "2001:db8::ff00:42:8329");
assertFunction("CAST(IPPREFIX '2001:db8::ff00:42:8329/64' AS IPADDRESS)", IPADDRESS, "2001:db8::");
}
@Test
public void testIpAddressToIpPrefixCast()
{
assertFunction("CAST(IPADDRESS '1.2.3.4' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::ffff:102:304' AS IPPREFIX)", IPPREFIX, "1.2.3.4/32");
assertFunction("CAST(IPADDRESS '::1' AS IPPREFIX)", IPPREFIX, "::1/128");
assertFunction("CAST(IPADDRESS '2001:db8::ff00:42:8329' AS IPPREFIX)", IPPREFIX, "2001:db8::ff00:42:8329/128");
}
@Test
public void testEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' = IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '192.168.0.0/32' = IPPREFIX '::ffff:192.168.0.0/32'", BOOLEAN, true);
assertFunction("IPPREFIX '10.0.0.0/32' = IPPREFIX '::ffff:a00:0/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/24' AS IPPREFIX) = IPPREFIX '1.2.3.5/24'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:db8::ff00:42:8329/128' = IPPREFIX '2001:db8::ff00:42:8300/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) = IPPREFIX '1.2.3.5/32'", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) = IPPREFIX '1.2.0.0/25'", BOOLEAN, false);
}
@Test
public void testDistinctFrom()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' IS DISTINCT FROM CAST(NULL AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST(NULL AS IPPREFIX) IS DISTINCT FROM IPPREFIX '2001:db8::ff00:42:8328/128'", BOOLEAN, true);
}
@Test
public void testNotEquals()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' != IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) != IPPREFIX '1.2.3.4/32'", BOOLEAN, false);
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' <> IPPREFIX '2001:db8::ff00:42:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) <> CAST('::ffff:1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
}
@Test
public void testOrderOperators()
{
assertFunction("IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128' > IPPREFIX '1.2.3.4/32'", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.4/32' > IPPREFIX '2001:0db8:0000:0000:0000:ff00:0042:8329/128'", BOOLEAN, false);
assertFunction("CAST('1.2.3.4/32' AS IPPREFIX) < CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("CAST('1.2.3.5/32' AS IPPREFIX) < CAST('1.2.3.4/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("CAST('1.2.0.0/24' AS IPPREFIX) < CAST('1.2.0.0/25' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.5/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, true);
assertFunction("IPPREFIX '1.2.3.6/32' <= CAST('1.2.3.5/32' AS IPPREFIX)", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::1/128' >= IPPREFIX '::1/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::/128' >= IPPREFIX '::1/128'", BOOLEAN, false);
assertFunction("IPPREFIX '::1/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, true);
assertFunction("IPPREFIX '::2222/128' BETWEEN IPPREFIX '::/128' AND IPPREFIX '::1234/128'", BOOLEAN, false);
}
@Test
public void testIndeterminate()
{
assertOperator(INDETERMINATE, "CAST(null AS IPPREFIX)", BOOLEAN, true);
assertOperator(INDETERMINATE, "IPPREFIX '::2222/128'", BOOLEAN, false);
}
@Test
public void testHash()
{
assertOperator(HASH_CODE, "CAST(null AS IPPREFIX)", BIGINT, null);
assertOperator(HASH_CODE, "IPPREFIX '::2222/128'", BIGINT, hashFromType("::2222/128"));
}
private static long hashFromType(String address)
{
BlockBuilder blockBuilder = IPPREFIX.createBlockBuilder(null, 1);
String[] parts = address.split("/");
byte[] bytes = new byte[IPPREFIX.getFixedSize()];
byte[] addressBytes = InetAddresses.forString(parts[0]).getAddress();
arraycopy(addressBytes, 0, bytes, 0, 16);
bytes[IPPREFIX.getFixedSize() - 1] = (byte) Integer.parseInt(parts[1]);
IPPREFIX.writeSlice(blockBuilder, Slices.wrappedBuffer(bytes));
Block block = blockBuilder.build();
return IPPREFIX.hash(block, 0);
}
}
| Java |
---
external help file: Microsoft.Azure.Commands.HDInsight.dll-Help.xml
ms.assetid: 4ED47646-542B-4983-B46B-B603BE33D499
online version:
schema: 2.0.0
---
# New-AzureRmHDInsightSqoopJobDefinition
## SYNOPSIS
Creates a Sqoop job object.
## SYNTAX
```
New-AzureRmHDInsightSqoopJobDefinition [-Files <String[]>] [-StatusFolder <String>] [-File <String>]
[-Command <String>] [-LibDir <String>] [<CommonParameters>]
```
## DESCRIPTION
The **New-AzureRmHDInsightSqoopJobDefinition** cmdlet defines a Sqoop job object for use with an Azure HDInsight cluster.
## EXAMPLES
### Example 1: Create a Sqoop job definition
```
PS C:\># Cluster info
PS C:\>$clusterName = "your-hadoop-001"
PS C:\>$clusterCreds = Get-Credential
PS C:\>New-AzureRmHDInsightSqoopJobDefinition -StatusFolder $statusFolder `
-Command $sqoopCommand `
| Start-AzureRmHDInsightJob -ClusterName $clusterName `
-ClusterCredential $clusterCreds
```
This command creates a Sqoop job definition.
## PARAMETERS
### -Command
Specifies the Sqoop command.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -File
Specifies the path to a file that contains the query to run.
The file must be available on the Storage account associated with the cluster.
You can use this parameter instead of the *Query* parameter.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -Files
Specifies a collection of files that are associated with a Hive job.
```yaml
Type: String[]
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -LibDir
Specifies the library directory for the Sqoop job.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### -StatusFolder
Specifies the location of the folder that contains standard outputs and error outputs for a job.
```yaml
Type: String
Parameter Sets: (All)
Aliases:
Required: False
Position: Named
Default value: None
Accept pipeline input: False
Accept wildcard characters: False
```
### CommonParameters
This cmdlet supports the common parameters: -Debug, -ErrorAction, -ErrorVariable, -InformationAction, -InformationVariable, -OutVariable, -OutBuffer, -PipelineVariable, -Verbose, -WarningAction, and -WarningVariable. For more information, see about_CommonParameters (http://go.microsoft.com/fwlink/?LinkID=113216).
## INPUTS
## OUTPUTS
### Microsoft.Azure.Commands.HDInsight.Models.AzureHDInsightSqoopJobDefinition
## NOTES
## RELATED LINKS
[Start-AzureRmHDInsightJob](./Start-AzureRmHDInsightJob.md)
| Java |
"""
IP Types
"""
import logging
from ipaddress import ip_address
from socket import AF_INET, AF_INET6
from vpp_papi import VppEnum
from vpp_object import VppObject
try:
text_type = unicode
except NameError:
text_type = str
_log = logging.getLogger(__name__)
class DpoProto:
DPO_PROTO_IP4 = 0
DPO_PROTO_IP6 = 1
DPO_PROTO_MPLS = 2
DPO_PROTO_ETHERNET = 3
DPO_PROTO_BIER = 4
DPO_PROTO_NSH = 5
INVALID_INDEX = 0xffffffff
def get_dpo_proto(addr):
if ip_address(addr).version == 6:
return DpoProto.DPO_PROTO_IP6
else:
return DpoProto.DPO_PROTO_IP4
class VppIpAddressUnion():
def __init__(self, addr):
self.addr = addr
self.ip_addr = ip_address(text_type(self.addr))
def encode(self):
if self.version == 6:
return {'ip6': self.ip_addr}
else:
return {'ip4': self.ip_addr}
@property
def version(self):
return self.ip_addr.version
@property
def address(self):
return self.addr
@property
def length(self):
return self.ip_addr.max_prefixlen
@property
def bytes(self):
return self.ip_addr.packed
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.ip_addr == other.ip_addr
elif hasattr(other, "ip4") and hasattr(other, "ip6"):
# vl_api_address_union_t
if 4 == self.version:
return self.ip_addr == other.ip4
else:
return self.ip_addr == other.ip6
else:
raise Exception("Comparing VppIpAddressUnions:%s"
" with incomparable type: %s",
self, other)
def __ne__(self, other):
return not (self == other)
def __str__(self):
return str(self.ip_addr)
class VppIpMPrefix():
def __init__(self, saddr, gaddr, glen):
self.saddr = saddr
self.gaddr = gaddr
self.glen = glen
if ip_address(self.saddr).version != \
ip_address(self.gaddr).version:
raise ValueError('Source and group addresses must be of the '
'same address family.')
def encode(self):
return {
'af': ip_address(self.gaddr).vapi_af,
'grp_address': {
ip_address(self.gaddr).vapi_af_name: self.gaddr
},
'src_address': {
ip_address(self.saddr).vapi_af_name: self.saddr
},
'grp_address_length': self.glen,
}
@property
def length(self):
return self.glen
@property
def version(self):
return ip_address(self.gaddr).version
def __str__(self):
return "(%s,%s)/%d" % (self.saddr, self.gaddr, self.glen)
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.glen == other.glen and
self.saddr == other.gaddr and
self.saddr == other.saddr)
elif (hasattr(other, "grp_address_length") and
hasattr(other, "grp_address") and
hasattr(other, "src_address")):
# vl_api_mprefix_t
if 4 == self.version:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip4) and
self.saddr == str(other.src_address.ip4))
else:
return (self.glen == other.grp_address_length and
self.gaddr == str(other.grp_address.ip6) and
self.saddr == str(other.src_address.ip6))
return NotImplemented
class VppIpPuntPolicer(VppObject):
def __init__(self, test, policer_index, is_ip6=False):
self._test = test
self._policer_index = policer_index
self._is_ip6 = is_ip6
def add_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=True)
def remove_vpp_config(self):
self._test.vapi.ip_punt_police(policer_index=self._policer_index,
is_ip6=self._is_ip6, is_add=False)
def query_vpp_config(self):
NotImplemented
class VppIpPuntRedirect(VppObject):
def __init__(self, test, rx_index, tx_index, nh_addr):
self._test = test
self._rx_index = rx_index
self._tx_index = tx_index
self._nh_addr = ip_address(nh_addr)
def encode(self):
return {"rx_sw_if_index": self._rx_index,
"tx_sw_if_index": self._tx_index, "nh": self._nh_addr}
def add_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=True)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.ip_punt_redirect(punt=self.encode(), is_add=False)
def get_vpp_config(self):
is_ipv6 = True if self._nh_addr.version == 6 else False
return self._test.vapi.ip_punt_redirect_dump(
sw_if_index=self._rx_index, is_ipv6=is_ipv6)
def query_vpp_config(self):
if self.get_vpp_config():
return True
return False
class VppIpPathMtu(VppObject):
def __init__(self, test, nh, pmtu, table_id=0):
self._test = test
self.nh = nh
self.pmtu = pmtu
self.table_id = table_id
def add_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
self._test.registry.register(self, self._test.logger)
return self
def modify(self, pmtu):
self.pmtu = pmtu
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': self.pmtu})
return self
def remove_vpp_config(self):
self._test.vapi.ip_path_mtu_update(pmtu={'nh': self.nh,
'table_id': self.table_id,
'path_mtu': 0})
def query_vpp_config(self):
ds = list(self._test.vapi.vpp.details_iter(
self._test.vapi.ip_path_mtu_get))
for d in ds:
if self.nh == str(d.pmtu.nh) \
and self.table_id == d.pmtu.table_id \
and self.pmtu == d.pmtu.path_mtu:
return True
return False
def object_id(self):
return ("ip-path-mtu-%d-%s-%d" % (self.table_id,
self.nh,
self.pmtu))
def __str__(self):
return self.object_id()
| Java |
package org.plasma.provisioning.rdb.mysql.v5_5.query;
import org.plasma.provisioning.rdb.mysql.v5_5.TableColumnConstraint;
import org.plasma.query.DataProperty;
import org.plasma.query.Expression;
import org.plasma.query.dsl.DataNode;
import org.plasma.query.dsl.DomainRoot;
import org.plasma.query.dsl.PathNode;
import org.plasma.sdo.helper.PlasmaTypeHelper;
/**
* Generated Domain Specific Language (DSL) implementation class representing
* the domain model entity <b>TableColumnConstraint</b>.
*
* <p>
* </p>
* <b>Data Store Mapping:</b> Corresponds to the physical data store entity
* <b>REFERENTIAL_CONSTRAINTS</b>.
*
*/
public class QTableColumnConstraint extends DomainRoot {
private QTableColumnConstraint() {
super(PlasmaTypeHelper.INSTANCE.getType(TableColumnConstraint.class));
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
*/
public QTableColumnConstraint(PathNode source, String sourceProperty) {
super(source, sourceProperty);
}
/**
* Constructor which instantiates a domain query path node. A path may span
* multiple namespaces and therefore Java inplementation packages based on the
* <a href=
* "http://docs.plasma-sdo.org/api/org/plasma/config/PlasmaConfiguration.html"
* >Condiguration</a>. Note: while this constructor is public, it is not for
* application use!
*
* @param source
* the source path node
* @param sourceProperty
* the source property logical name
* @param expr
* the path predicate expression
*/
public QTableColumnConstraint(PathNode source, String sourceProperty, Expression expr) {
super(source, sourceProperty, expr);
}
/**
* Returns a new DSL query for <a
* href="http://docs.plasma-sdo.org/api/org/plasma/sdo/PlasmaType.html"
* >Type</a> <b>TableColumnConstraint</b> which can be used either as a query
* root or as the start (entry point) for a new path predicate expression.
*
* @return a new DSL query
*/
public static QTableColumnConstraint newQuery() {
return new QTableColumnConstraint();
}
/**
* Returns a DSL data element for property, <b>name</b>.
*
* @return a DSL data element for property, <b>name</b>.
*/
public DataProperty name() {
return new DataNode(this, TableColumnConstraint.PROPERTY.name.name());
}
/**
* Returns a DSL data element for property, <b>owner</b>.
*
* @return a DSL data element for property, <b>owner</b>.
*/
public DataProperty owner() {
return new DataNode(this, TableColumnConstraint.PROPERTY.owner.name());
}
/**
* Returns a DSL query element for reference property, <b>table</b>.
*
* @return a DSL query element for reference property, <b>table</b>.
*/
public QTable table() {
return new QTable(this, TableColumnConstraint.PROPERTY.table.name());
}
} | Java |
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: hapi/chart/metadata.proto
package chart
import proto "github.com/golang/protobuf/proto"
import fmt "fmt"
import math "math"
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type Metadata_Engine int32
const (
Metadata_UNKNOWN Metadata_Engine = 0
Metadata_GOTPL Metadata_Engine = 1
)
var Metadata_Engine_name = map[int32]string{
0: "UNKNOWN",
1: "GOTPL",
}
var Metadata_Engine_value = map[string]int32{
"UNKNOWN": 0,
"GOTPL": 1,
}
func (x Metadata_Engine) String() string {
return proto.EnumName(Metadata_Engine_name, int32(x))
}
func (Metadata_Engine) EnumDescriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{1, 0}
}
// Maintainer describes a Chart maintainer.
type Maintainer struct {
// Name is a user name or organization name
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// Email is an optional email address to contact the named maintainer
Email string `protobuf:"bytes,2,opt,name=email,proto3" json:"email,omitempty"`
// Url is an optional URL to an address for the named maintainer
Url string `protobuf:"bytes,3,opt,name=url,proto3" json:"url,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Maintainer) Reset() { *m = Maintainer{} }
func (m *Maintainer) String() string { return proto.CompactTextString(m) }
func (*Maintainer) ProtoMessage() {}
func (*Maintainer) Descriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{0}
}
func (m *Maintainer) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Maintainer.Unmarshal(m, b)
}
func (m *Maintainer) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Maintainer.Marshal(b, m, deterministic)
}
func (dst *Maintainer) XXX_Merge(src proto.Message) {
xxx_messageInfo_Maintainer.Merge(dst, src)
}
func (m *Maintainer) XXX_Size() int {
return xxx_messageInfo_Maintainer.Size(m)
}
func (m *Maintainer) XXX_DiscardUnknown() {
xxx_messageInfo_Maintainer.DiscardUnknown(m)
}
var xxx_messageInfo_Maintainer proto.InternalMessageInfo
func (m *Maintainer) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Maintainer) GetEmail() string {
if m != nil {
return m.Email
}
return ""
}
func (m *Maintainer) GetUrl() string {
if m != nil {
return m.Url
}
return ""
}
// Metadata for a Chart file. This models the structure of a Chart.yaml file.
//
// Spec: https://k8s.io/helm/blob/master/docs/design/chart_format.md#the-chart-file
type Metadata struct {
// The name of the chart
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
// The URL to a relevant project page, git repo, or contact person
Home string `protobuf:"bytes,2,opt,name=home,proto3" json:"home,omitempty"`
// Source is the URL to the source code of this chart
Sources []string `protobuf:"bytes,3,rep,name=sources,proto3" json:"sources,omitempty"`
// A SemVer 2 conformant version string of the chart
Version string `protobuf:"bytes,4,opt,name=version,proto3" json:"version,omitempty"`
// A one-sentence description of the chart
Description string `protobuf:"bytes,5,opt,name=description,proto3" json:"description,omitempty"`
// A list of string keywords
Keywords []string `protobuf:"bytes,6,rep,name=keywords,proto3" json:"keywords,omitempty"`
// A list of name and URL/email address combinations for the maintainer(s)
Maintainers []*Maintainer `protobuf:"bytes,7,rep,name=maintainers,proto3" json:"maintainers,omitempty"`
// The name of the template engine to use. Defaults to 'gotpl'.
Engine string `protobuf:"bytes,8,opt,name=engine,proto3" json:"engine,omitempty"`
// The URL to an icon file.
Icon string `protobuf:"bytes,9,opt,name=icon,proto3" json:"icon,omitempty"`
// The API Version of this chart.
ApiVersion string `protobuf:"bytes,10,opt,name=apiVersion,proto3" json:"apiVersion,omitempty"`
// The condition to check to enable chart
Condition string `protobuf:"bytes,11,opt,name=condition,proto3" json:"condition,omitempty"`
// The tags to check to enable chart
Tags string `protobuf:"bytes,12,opt,name=tags,proto3" json:"tags,omitempty"`
// The version of the application enclosed inside of this chart.
AppVersion string `protobuf:"bytes,13,opt,name=appVersion,proto3" json:"appVersion,omitempty"`
// Whether or not this chart is deprecated
Deprecated bool `protobuf:"varint,14,opt,name=deprecated,proto3" json:"deprecated,omitempty"`
// TillerVersion is a SemVer constraints on what version of Tiller is required.
// See SemVer ranges here: https://github.com/Masterminds/semver#basic-comparisons
TillerVersion string `protobuf:"bytes,15,opt,name=tillerVersion,proto3" json:"tillerVersion,omitempty"`
// Annotations are additional mappings uninterpreted by Tiller,
// made available for inspection by other applications.
Annotations map[string]string `protobuf:"bytes,16,rep,name=annotations,proto3" json:"annotations,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
// KubeVersion is a SemVer constraint specifying the version of Kubernetes required.
KubeVersion string `protobuf:"bytes,17,opt,name=kubeVersion,proto3" json:"kubeVersion,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *Metadata) Reset() { *m = Metadata{} }
func (m *Metadata) String() string { return proto.CompactTextString(m) }
func (*Metadata) ProtoMessage() {}
func (*Metadata) Descriptor() ([]byte, []int) {
return fileDescriptor_metadata_d6c714c73a051dcb, []int{1}
}
func (m *Metadata) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_Metadata.Unmarshal(m, b)
}
func (m *Metadata) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_Metadata.Marshal(b, m, deterministic)
}
func (dst *Metadata) XXX_Merge(src proto.Message) {
xxx_messageInfo_Metadata.Merge(dst, src)
}
func (m *Metadata) XXX_Size() int {
return xxx_messageInfo_Metadata.Size(m)
}
func (m *Metadata) XXX_DiscardUnknown() {
xxx_messageInfo_Metadata.DiscardUnknown(m)
}
var xxx_messageInfo_Metadata proto.InternalMessageInfo
func (m *Metadata) GetName() string {
if m != nil {
return m.Name
}
return ""
}
func (m *Metadata) GetHome() string {
if m != nil {
return m.Home
}
return ""
}
func (m *Metadata) GetSources() []string {
if m != nil {
return m.Sources
}
return nil
}
func (m *Metadata) GetVersion() string {
if m != nil {
return m.Version
}
return ""
}
func (m *Metadata) GetDescription() string {
if m != nil {
return m.Description
}
return ""
}
func (m *Metadata) GetKeywords() []string {
if m != nil {
return m.Keywords
}
return nil
}
func (m *Metadata) GetMaintainers() []*Maintainer {
if m != nil {
return m.Maintainers
}
return nil
}
func (m *Metadata) GetEngine() string {
if m != nil {
return m.Engine
}
return ""
}
func (m *Metadata) GetIcon() string {
if m != nil {
return m.Icon
}
return ""
}
func (m *Metadata) GetApiVersion() string {
if m != nil {
return m.ApiVersion
}
return ""
}
func (m *Metadata) GetCondition() string {
if m != nil {
return m.Condition
}
return ""
}
func (m *Metadata) GetTags() string {
if m != nil {
return m.Tags
}
return ""
}
func (m *Metadata) GetAppVersion() string {
if m != nil {
return m.AppVersion
}
return ""
}
func (m *Metadata) GetDeprecated() bool {
if m != nil {
return m.Deprecated
}
return false
}
func (m *Metadata) GetTillerVersion() string {
if m != nil {
return m.TillerVersion
}
return ""
}
func (m *Metadata) GetAnnotations() map[string]string {
if m != nil {
return m.Annotations
}
return nil
}
func (m *Metadata) GetKubeVersion() string {
if m != nil {
return m.KubeVersion
}
return ""
}
func init() {
proto.RegisterType((*Maintainer)(nil), "hapi.chart.Maintainer")
proto.RegisterType((*Metadata)(nil), "hapi.chart.Metadata")
proto.RegisterMapType((map[string]string)(nil), "hapi.chart.Metadata.AnnotationsEntry")
proto.RegisterEnum("hapi.chart.Metadata_Engine", Metadata_Engine_name, Metadata_Engine_value)
}
func init() { proto.RegisterFile("hapi/chart/metadata.proto", fileDescriptor_metadata_d6c714c73a051dcb) }
var fileDescriptor_metadata_d6c714c73a051dcb = []byte{
// 435 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x6c, 0x52, 0x5d, 0x6b, 0xd4, 0x40,
0x14, 0x35, 0xcd, 0x66, 0x77, 0x73, 0x63, 0x35, 0x0e, 0x52, 0xc6, 0x22, 0x12, 0x16, 0x85, 0x7d,
0xda, 0x82, 0xbe, 0x14, 0x1f, 0x04, 0x85, 0x52, 0x41, 0xbb, 0x95, 0xe0, 0x07, 0xf8, 0x36, 0x4d,
0x2e, 0xdd, 0x61, 0x93, 0x99, 0x30, 0x99, 0xad, 0xec, 0xaf, 0xf0, 0x2f, 0xcb, 0xdc, 0x64, 0x9a,
0xac, 0xf4, 0xed, 0x9e, 0x73, 0x66, 0xce, 0xcc, 0xbd, 0xf7, 0xc0, 0x8b, 0x8d, 0x68, 0xe4, 0x59,
0xb1, 0x11, 0xc6, 0x9e, 0xd5, 0x68, 0x45, 0x29, 0xac, 0x58, 0x35, 0x46, 0x5b, 0xcd, 0xc0, 0x49,
0x2b, 0x92, 0x16, 0x9f, 0x01, 0xae, 0x84, 0x54, 0x56, 0x48, 0x85, 0x86, 0x31, 0x98, 0x28, 0x51,
0x23, 0x0f, 0xb2, 0x60, 0x19, 0xe7, 0x54, 0xb3, 0xe7, 0x10, 0x61, 0x2d, 0x64, 0xc5, 0x8f, 0x88,
0xec, 0x00, 0x4b, 0x21, 0xdc, 0x99, 0x8a, 0x87, 0xc4, 0xb9, 0x72, 0xf1, 0x37, 0x82, 0xf9, 0x55,
0xff, 0xd0, 0x83, 0x46, 0x0c, 0x26, 0x1b, 0x5d, 0x63, 0xef, 0x43, 0x35, 0xe3, 0x30, 0x6b, 0xf5,
0xce, 0x14, 0xd8, 0xf2, 0x30, 0x0b, 0x97, 0x71, 0xee, 0xa1, 0x53, 0xee, 0xd0, 0xb4, 0x52, 0x2b,
0x3e, 0xa1, 0x0b, 0x1e, 0xb2, 0x0c, 0x92, 0x12, 0xdb, 0xc2, 0xc8, 0xc6, 0x3a, 0x35, 0x22, 0x75,
0x4c, 0xb1, 0x53, 0x98, 0x6f, 0x71, 0xff, 0x47, 0x9b, 0xb2, 0xe5, 0x53, 0xb2, 0xbd, 0xc7, 0xec,
0x1c, 0x92, 0xfa, 0xbe, 0xe1, 0x96, 0xcf, 0xb2, 0x70, 0x99, 0xbc, 0x3d, 0x59, 0x0d, 0x23, 0x59,
0x0d, 0xf3, 0xc8, 0xc7, 0x47, 0xd9, 0x09, 0x4c, 0x51, 0xdd, 0x4a, 0x85, 0x7c, 0x4e, 0x4f, 0xf6,
0xc8, 0xf5, 0x25, 0x0b, 0xad, 0x78, 0xdc, 0xf5, 0xe5, 0x6a, 0xf6, 0x0a, 0x40, 0x34, 0xf2, 0x67,
0xdf, 0x00, 0x90, 0x32, 0x62, 0xd8, 0x4b, 0x88, 0x0b, 0xad, 0x4a, 0x49, 0x1d, 0x24, 0x24, 0x0f,
0x84, 0x73, 0xb4, 0xe2, 0xb6, 0xe5, 0x8f, 0x3b, 0x47, 0x57, 0x77, 0x8e, 0x8d, 0x77, 0x3c, 0xf6,
0x8e, 0x9e, 0x71, 0x7a, 0x89, 0x8d, 0xc1, 0x42, 0x58, 0x2c, 0xf9, 0x93, 0x2c, 0x58, 0xce, 0xf3,
0x11, 0xc3, 0x5e, 0xc3, 0xb1, 0x95, 0x55, 0x85, 0xc6, 0x5b, 0x3c, 0x25, 0x8b, 0x43, 0x92, 0x5d,
0x42, 0x22, 0x94, 0xd2, 0x56, 0xb8, 0x7f, 0xb4, 0x3c, 0xa5, 0xe9, 0xbc, 0x39, 0x98, 0x8e, 0xcf,
0xd2, 0xc7, 0xe1, 0xdc, 0x85, 0xb2, 0x66, 0x9f, 0x8f, 0x6f, 0xba, 0x25, 0x6d, 0x77, 0x37, 0xe8,
0x1f, 0x7b, 0xd6, 0x2d, 0x69, 0x44, 0x9d, 0x7e, 0x80, 0xf4, 0x7f, 0x0b, 0x97, 0xaa, 0x2d, 0xee,
0xfb, 0xd4, 0xb8, 0xd2, 0xa5, 0xef, 0x4e, 0x54, 0x3b, 0x9f, 0x9a, 0x0e, 0xbc, 0x3f, 0x3a, 0x0f,
0x16, 0x19, 0x4c, 0x2f, 0xba, 0x05, 0x24, 0x30, 0xfb, 0xb1, 0xfe, 0xb2, 0xbe, 0xfe, 0xb5, 0x4e,
0x1f, 0xb1, 0x18, 0xa2, 0xcb, 0xeb, 0xef, 0xdf, 0xbe, 0xa6, 0xc1, 0xa7, 0xd9, 0xef, 0x88, 0xfe,
0x7c, 0x33, 0xa5, 0xdc, 0xbf, 0xfb, 0x17, 0x00, 0x00, 0xff, 0xff, 0x36, 0xf9, 0x0d, 0xa6, 0x14,
0x03, 0x00, 0x00,
}
| Java |
/*
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for
* license information.
*
* Code generated by Microsoft (R) AutoRest Code Generator.
* Changes may cause incorrect behavior and will be lost if the code is
* regenerated.
*/
'use strict';
/**
* @class
* Initializes a new instance of the DataSourceListResult class.
* @constructor
* The list data source by workspace operation response.
*
* @member {string} [nextLink] The link (url) to the next page of datasources.
*
*/
class DataSourceListResult extends Array {
constructor() {
super();
}
/**
* Defines the metadata of DataSourceListResult
*
* @returns {object} metadata of DataSourceListResult
*
*/
mapper() {
return {
required: false,
serializedName: 'DataSourceListResult',
type: {
name: 'Composite',
className: 'DataSourceListResult',
modelProperties: {
value: {
required: false,
serializedName: '',
type: {
name: 'Sequence',
element: {
required: false,
serializedName: 'DataSourceElementType',
type: {
name: 'Composite',
className: 'DataSource'
}
}
}
},
nextLink: {
required: false,
serializedName: 'nextLink',
type: {
name: 'String'
}
}
}
}
};
}
}
module.exports = DataSourceListResult;
| Java |
<link rel="import" href="../../../../../bower_components/paper-spinner/paper-spinner.html">
<dom-module id="nav-level-element">
<style>
:host{
display: block;
height: 100%;
}
.circle {
width: 25px;
height: 25px;
-moz-border-radius: 15px;
-webkit-border-radius: 15px;
border-radius: 15px;
}
.empty{
line-height:25px;
}
.current{
background: #87CEFA;
}
.green{
background: #86C67C;
}
.gray{
border: 1px solid black;
}
.wrapper{
text-align:center;
margin: 7px;
}
</style>
<template>
<div class="wrapper" on-click="_onClick">
<div class$="{{divClass}}">
<span class="empty"></span>
</div>
</div>
</template>
<script>
Polymer({
is: 'nav-level-element',
properties:{
divClass: {
type: String,
notify: true,
value: 'circle gray'
},
state: {
value: 'default'
}
},
observers: [
'_stateChanged(state)'
],
_onClick: function(){
this.fire('task-switch', this.index);
},
_stateChanged: function(state){
if(state === 'completed'){
this.divClass = 'circle green';
}else if(state === 'current'){
this.divClass = 'circle current';
}else{
this.divClass = 'circle gray';
}
},
setState: function(state){
this.state = state;
}
});
</script>
</dom-module>
| Java |
#--------------------------------------------------------------------
#----- GRNOC TSDS Aggregation DataService Library
#-----
#----- Copyright(C) 2015 The Trustees of Indiana University
#--------------------------------------------------------------------
#----- $HeadURL: svn+ssh://svn.grnoc.iu.edu/grnoc/tsds/services/trunk/lib/GRNOC/TSDS/DataService/Aggregation.pm $
#----- $Id: Aggregation.pm 35325 2015-02-13 19:15:28Z mj82 $
#-----
#----- This module inherits the base GRNOC::TSDS::DataService class
#----- and provides all of the methods to interact with aggregations
#--------------------------------------------------------------------
package GRNOC::TSDS::DataService::Aggregation;
use strict;
use warnings;
use base 'GRNOC::TSDS::DataService';
use GRNOC::Log;
use GRNOC::TSDS::MongoDB;
use GRNOC::TSDS::Parser;
use Tie::IxHash;
use DateTime;
use DateTime::Format::Strptime;
use Data::Dumper;
use JSON qw( decode_json );
### constants ###
use constant DEFAULT_COLLECTIONS => ['data', 'measurements', 'metadata', 'aggregate', 'expire'];
# this will hold the only actual reference to this object
my $singleton;
sub new {
my $caller = shift;
my $class = ref( $caller );
$class = $caller if ( !$class );
# if we've created this object (singleton) before, just return it
return $singleton if ( defined( $singleton ) );
my $self = $class->SUPER::new( @_ );
bless( $self, $class );
# store our newly created object as the singleton
$singleton = $self;
$self->parser( GRNOC::TSDS::Parser->new( @_ ) );
return $self;
}
# GET METHODS
sub get_aggregations {
my ( $self, %args ) = @_;
my $meta_fields;
my @aggregation_fields;
my $measurement_type = $args{'measurement_type'};
my $aggregate_collection = $self->mongo_ro()->get_collection($measurement_type, "aggregate");
if (! $aggregate_collection ) {
$self->error( 'Invalid Measurement Type.' );
return;
}
my $aggregates = $aggregate_collection->find();
if (! $aggregates ) {
$self->error( 'Invalid Measurement Type: no aggregations found.' );
return;
}
my @aggregate_results = @{$self->_get_agg_exp_fields($aggregates)};
my @new_results = sort by_blanklast ( @aggregate_results );
return \@new_results;
}
sub get_expirations {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $expiration_collection = $self->mongo_ro()->get_collection($measurement_type, "expire");
if (! $expiration_collection ) {
$self->error( 'Invalid Measurement Type.' );
return;
}
my $expirations = $expiration_collection->find();
if (! $expirations ) {
$self->error( 'Invalid Measurement Type: no expirations found.' );
return;
}
my @expiration_results = @{$self->_get_agg_exp_fields($expirations)};
my @new_results = sort by_blanklast ( @expiration_results );
return \@new_results;
}
# UPDATE METHOD
sub update_aggregations {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $meta = $args{'meta'};
my $name = $args{'name'};
my $new_name = $args{'new_name'};
my $max_age = $args{'max_age'};
my $eval_position = $args{'eval_position'};
my $values = $args{'values'};
# convert numeric params to ints
$eval_position = int $eval_position if(defined($eval_position));
$max_age = int $max_age if(defined($max_age));
my $query = {'name'=> $name};
if (!defined($name) || $name eq '') {
$self->error("You must specify a name to update an aggregation/expiration.");
return;
}
if (exists($args{'new_name'}) && (!defined($new_name) || $new_name eq '')) {
$self->error("You must enter text for the new_name field");
return;
}
if (defined($values)){
return if (!$self->_validate_values($values, $measurement_type));
}
# get the aggregate collection
my $agg_col = $self->mongo_rw()->get_collection($measurement_type, "aggregate");
if(!$agg_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure this aggregate record exists
if(!$self->_agg_exp_exists( col => $agg_col, name => $name )){
$self->error("Aggregation named $name doesn't exist");
return;
}
# reorder eval positions
if(defined($eval_position)){
my $position_res = $self->_update_eval_positions(
collection => $agg_col,
name => $name,
eval_position => $eval_position
);
}
my $set = {};
my $id;
$set->{'meta'} = $meta if(exists($args{'meta'}));
$set->{'values'} = $values if(exists($args{'values'}));
$set->{'name'} = $new_name if(exists($args{'new_name'}));
if(!%$set && !exists($args{'eval_position'})){
$self->error( "You must pass in at least 1 field to update" );
return;
}
if(%$set){
$id = $agg_col->update_one($query, { '$set' => $set } );
if(!$id) {
$self->error( "Error updating values in aggregate with name $name");
return;
}
}
return [{ 'success' => 1 }];
}
sub update_expirations {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $meta = $args{'meta'};
my $name = $args{'name'};
my $new_name = $args{'new_name'};
my $max_age = $args{'max_age'};
my $eval_position = $args{'eval_position'};
my $values = $args{'values'};
# convert numeric params to ints
$eval_position = int $eval_position if(defined($eval_position));
$max_age = int $max_age if(defined($max_age));
if (!defined($name) || $name eq '') {
$self->error("You must specify a name to update an aggregation/expiration.");
return;
}
if (exists($args{'new_name'}) && (!defined($new_name) || $new_name eq '')) {
$self->error("You must enter text for the new_name field");
return;
}
# get the expire collection
my $exp_col = $self->mongo_rw()->get_collection($measurement_type, "expire");
if(!$exp_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure this aggregate record exists
if(!$self->_agg_exp_exists( col => $exp_col, name => $name )){
$self->error("Expiration named $name doesn't exist");
return;
}
# reorder eval positions
if(defined($eval_position)){
my $position_res = $self->_update_eval_positions(
collection => $exp_col,
name => $name,
eval_position => $eval_position
);
}
# figure out which fields were modifying for the expire record
my $set = {};
$set->{'meta'} = $meta if(exists($args{'meta'}));
$set->{'max_age'} = $max_age if(exists($args{'max_age'}));
$set->{'name'} = $new_name if(exists($args{'new_name'}));
# if it's the default expire record don't allow them to edit anything but max_age
if($name eq 'default'){
foreach my $field (keys %$set){
if($field ne 'max_age'){
$self->error( "You can only edit the max_age on the default expire record");
return;
}
}
}
if(%$set){
my $id = $exp_col->update_one({ name => $name }, { '$set' => $set } );
if(!$id) {
$self->error( "Error updating values in expiration with name $name");
return;
}
}
return [{ 'success' => 1 }];
}
# INSERT METHOD
sub add_aggregation {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $interval = $args{'interval'};
my $meta = $args{'meta'};
my $name = $args{'name'};
my $values = $args{'values'};
#sanity checks
if (!defined($name) || $name eq '') {
$self->error("You must specify a name for the aggregation.");
return;
}
if (defined($values)){
return if (! $self->_validate_values($values, $measurement_type));
}
if(!defined($interval)){
$self->error("You must specify an interval to aggregate the data on");
return;
}
my $set = {};
$set->{'interval'} = int($interval) if(defined($interval));
$set->{'name'} = $name if(defined($name));
$set->{'values'} = $values if(defined($values));
# meta might not be passed in, it needs to be set to empty object to avoid problem with deletion
if(defined($meta)) {
$set->{'meta'} = $meta;
}
else {
$set->{'meta'} = "{}";
}
# get the aggregate collections
my $agg_col = $self->mongo_rw()->get_collection($measurement_type, "aggregate");
if(!$agg_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure this aggregation doesn't already exists
if($self->_agg_exp_exists( col => $agg_col, name => $name )){
$self->error("Aggregation named, $name, already exist");
return;
}
# figure out the highest eval_position currently used (if any)
my $highest_eval_position = $self->_agg_highest_eval_position( col => $agg_col );
my $new_eval_position = $highest_eval_position + 10;
$set->{'eval_position'} = $new_eval_position;
# create the data_[interval] collection
if(!$self->mongo_root()->add_collection_shard( $measurement_type, "data_$interval" , $GRNOC::TSDS::MongoDB::DATA_SHARDING )){
$self->error( "Error adding collection shard for data_$interval measurement_type: ".$self->mongo_rw()->error() );
return;
}
my $agg_data_col = $self->mongo_rw()->get_collection( $measurement_type, "data_$interval", create => 1 );
my $indexes = $agg_data_col->indexes();
$indexes->create_one([start => 1]);
$indexes->create_one([end => 1]);
$indexes->create_one([updated => 1, identifier => 1]);
$indexes->create_one([identifier => 1, start => 1, end => 1]);
my $id = $agg_col->insert_one($set);
if(!$id) {
$self->error( "Error inserting values in aggregate with interval $interval and meta $meta");
return;
}
return [{ 'success' => 1 }];
}
sub add_expiration {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $interval = $args{'interval'};
my $meta = $args{'meta'};
my $name = $args{'name'};
my $max_age = $args{'max_age'};
#sanity checks
if (!defined($name) || $name eq '') {
$self->error("You must specify a name for the expiration.");
return;
}
if(!defined($max_age)){
$self->error("You must specify the max_age of the data of the expiration.");
return;
}
my $set = {};
$set->{'interval'} = int($interval) if(defined($interval));
$set->{'meta'} = $meta if(defined($meta));
$set->{'name'} = $name if(defined($name));
$set->{'max_age'} = int($max_age) if(defined($max_age));
# if they've set an interval make sure an aggregation with the same interval exists
# (we can't expire aggregated data that doesn't exists)
if(defined($interval)){
my $found_interval = 0;
my $aggregations = $self->get_aggregations( measurement_type => $measurement_type );
foreach my $aggregation (@$aggregations){
next if($aggregation->{'interval'} ne $interval);
$found_interval = 1;
last;
}
if(!$found_interval){
$self->error("Can not add expiration at interval $interval. There must be an aggregation at interval, $interval to expire");
return;
}
}
my $exp_col = $self->mongo_rw()->get_collection($measurement_type, "expire");
if(!$exp_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure this expiration doesn't already exists
if($self->_agg_exp_exists( col => $exp_col, name => $name )){
$self->error("Expiration named, $name, already exist");
return;
}
# figure out the highest eval_position currently used (if any)
my $highest_eval_position = $self->_agg_highest_eval_position( col => $exp_col );
my $new_eval_position = $highest_eval_position + 10;
$set->{'eval_position'} = $new_eval_position;
my $id = $exp_col->insert_one( $set );
if(!$id) {
$self->error( "Error inserting values in expiration with interval $interval and meta $meta");
return;
}
return [{ 'success' => 1 }];
}
sub _agg_exp_exists {
my ( $self, %args ) = @_;
my $col = $args{'col'};
my $name = $args{'name'};
# make sure a agg doesn't already exist with this name
my $count = $col->count({ name => $name });
return 1 if $count;
return 0;
}
sub _agg_highest_eval_position {
my ( $self, %args ) = @_;
my $col = $args{'col'};
my @aggregates = $col->find( {} )->all();
my $highest_eval_position = 0;
foreach my $aggregate ( @aggregates ) {
my $eval_position = $aggregate->{'eval_position'};
if ( $eval_position && $eval_position > $highest_eval_position ) {
$highest_eval_position = $eval_position;
}
}
return $highest_eval_position;
}
# DELETE METHOD
sub delete_aggregations {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $name = $args{'name'};
# sanity checks
if (!defined($name) || $name eq '') {
$self->error("You must specify a name to delete an aggregation/expiration.");
return;
}
# get the aggregate collection
my $agg_col = $self->mongo_rw()->get_collection($measurement_type, "aggregate");
if(!$agg_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure the aggregate rule with this name exists
if(!$self->_agg_exp_exists( col => $agg_col, name => $name )){
$self->error("Aggregation named, $name, doesn't exist");
return;
}
# remove the data_$interval collection
my $interval = $agg_col->find({ name => $name })->next()->{'interval'};
my $agg_data_col = $self->mongo_rw()->get_collection($measurement_type, "data_$interval");
# now delete the relevant data from the aggregate data collection and possbilly the whole
# collection if no data if left after the delete
$self->_delete_aggregation_data(
interval => $interval,
measurement_type => $measurement_type,
agg_col => $agg_col,
agg_data_col => $agg_data_col,
name => $name
) || return;
# remove the aggregate rule from the collection
my $id = $agg_col->delete_one({name => $name});
if(!$id) {
$self->error( "Error removing aggregate rule for $name.");
return;
}
# get the related expire rule and remove it from the expire collection
my $exp_col = $self->mongo_rw()->get_collection($measurement_type, "expire");
if(!$exp_col){
$self->error($self->mongo_rw()->error());
return;
}
$id = $exp_col->delete_one({ name => $name });
if(!$id) {
$self->error( "Error removing values from expiration with name $name.");
return;
}
return [{ 'success' => 1 }];
}
sub _delete_aggregation_data {
my ( $self, %args ) = @_;
my $interval = $args{'interval'};
my $measurement_type = $args{'measurement_type'};
my $agg_col = $args{'agg_col'};
my $agg_data_col = $args{'agg_data_col'};
my $name = $args{'name'}; # the name of the aggregation being deleted
# build an array of all of the meta data from the aggregations we're not deleting
# within this interval
my $nor = [];
my $cur = $agg_col->find({});
while (my $agg = $cur->next()) {
next if($name ne $agg->{'name'});
next if($interval ne $agg->{'interval'});
my $meta;
eval {
$meta = decode_json( $agg->{'meta'} );
};
if($@){
$self->error("Problem decoding meta scope for aggregate ".$agg->{'name'}.": $@");
return;
}
push(@$nor, $meta);
}
# grab the measurement collection for this measurement_type
my $meas_col = $self->mongo_rw()->get_collection($measurement_type, "aggregate");
if(!$meas_col){
$self->error($self->mongo_rw()->error());
return;
}
# now find all the identifiers that do not match that meta data
# of the remaining aggregations
my $ids = [];
if(@$nor){
$cur = $meas_col->find({ '$nor' => $nor }, { identifier => 1 });
while (my $meas = $cur->next()) {
push(@$ids, $meas->{'identifier'});
}
}
# if there's other aggregations besides the one we are deleting
# delete everything in data_$interval that doesn't match their metadata scope
if(@$ids){
my $res = $agg_data_col->delete_many({ identifier => { '$in' => $ids } });
if(!$res) {
$self->error( "Error removing values from aggregate with name $name.");
return;
}
}
# if there's no data left in the agg data cursor drop it
if ($agg_data_col->count({}) == 0) {
$agg_data_col->drop();
}
return 1;
}
sub delete_expirations {
my ( $self, %args ) = @_;
my $measurement_type = $args{'measurement_type'};
my $name = $args{'name'};
# sanity checks
if (!defined($name) || $name eq '') {
$self->error("You must specify a name to delete an aggregation/expiration.");
return;
}
if ($name eq 'default'){
$self->error("You can not delete the default expire rule.");
return;
}
# get the expire rule and remove it from the expire collection
my $exp_col = $self->mongo_rw()->get_collection($measurement_type, "expire");
if(!$exp_col){
$self->error($self->mongo_rw()->error());
return;
}
# make sure the aggregate rule with this name exists
if(!$self->_agg_exp_exists( col => $exp_col, name => $name )){
$self->error("Aggregation named, $name, doesn't exist");
return;
}
my $id = $exp_col->delete_one({name => $name});
if(!$id) {
$self->error( "Error removing values from expiration with name $name.");
return;
}
return [{ 'success' => 1 }];
}
sub _get_agg_exp_fields {
my ($self, $cursor) = @_;
my @results = ();
while (my $doc = $cursor->next()) {
my %row;
foreach my $key (keys %$doc) {
next if $key eq '_id';
my $value = $doc->{$key};
$row{$key} = $value;
}
push @results, \%row;
}
return \@results;
}
sub _update_eval_positions {
my ($self, %args) = @_;
my $col = $args{'collection'};
my $name = $args{'name'};
my $new_eval_position = $args{'eval_position'} || 10;
my $query = {'name' => $name};
my $old_eval_position = $self->_get_eval_position( col => $col, name => $name);
# see if there is another rule with the same eval_position
my $same_eval_position = $self->_eval_position_in_use(
'eval_position' => $new_eval_position,
'name', => $name,
'col' => $col
);
# if this eval position isn't in use by another rule
if (!$same_eval_position && ($old_eval_position == $new_eval_position)) {
return { 'success' => 1 };
}
# see if there are values (other than this one) that
# lack eval_positions
my $has_empty_values = $self->_has_null_eval_positions(
'name' => $name,
'col' => $col
);
# if there is no conflict, and there are no other null values,
# just update the current rule
if (!$same_eval_position && !$has_empty_values) {
my $result = $self->_set_eval_position(
'eval_position' => $new_eval_position,
'name' => $name,
'col' => $col
);
# if there is a conflict, we need to reorder
} else {
my $result = $self->_recalculate_eval_positions(
'old_eval_position' => $old_eval_position,
'new_eval_position' => $new_eval_position,
'name' => $name,
'col' => $col
);
}
}
sub _has_null_eval_positions {
my ($self, %args) = @_;
my $name = $args{'name'};
my $col = $args{'col'};
my $query = { 'eval_position' => { '$exists' => 0 }, 'name' => { '$ne' => $name } };
if ($col->count($query)) {
return 1;
}
return 0;
}
sub _recalculate_eval_positions {
my ( $self, %args ) = @_;
my $new_eval_position = $args{'new_eval_position'};
my $old_eval_position = $args{'old_eval_position'};
my $name = $args{'name'};
my $col = $args{'col'};
my $query = { 'name' => $name };
# these are the other docregations that didn't get updated / aren't getting their position replaced
my $other_cur = $col->find( { 'eval_position' => {'$ne' => $new_eval_position},
'name' => {'$ne' => $name} } );
my $other_docs = [];
# detect error
return if ( !defined( $other_docs ) );
while (my $doc = $other_cur->next()) {
push @$other_docs, $doc;
}
# get the other docregations in the table that are getting their position replaced
my $replaced_docs = [];
my $replaced_cur = $col->find( {'eval_position' => $new_eval_position, 'name' => {'$ne' => $name} } );
while (my $doc = $replaced_cur->next()) {
my %row = ();
push @$replaced_docs, $doc;
}
# detect error
return if ( !defined( $replaced_cur ) );
my $updated_doc = $col->find_one( $query );
$updated_doc->{'eval_position'} = $new_eval_position;
return if ( !defined( $updated_doc ) );
# does the updated rule need to go *below* the rule its taking place of? (drdocing down
# or there is no old eval position)
if (defined($old_eval_position) && $new_eval_position > $old_eval_position ) {
push( @$replaced_docs, $updated_doc );
} else {
# the update rule needs to go *above* the rule its taking place of. (drdocing up)
unshift( @$replaced_docs, $updated_doc );
}
# generate the new full list in the correct order
my @new_list = sort by_blanklast ( @$other_docs, @$replaced_docs );
# update every rule's eval_position from 10 .. based upon the new order
my $i = 10;
foreach my $rule ( @new_list ) {
#warn 'updating ' . $rule->{'name'} . ' to eval position: ' . $i;
my $update_query = { 'name' => $rule->{'name'} };
my $set = { 'eval_position' => $i };
my $exp_res = $col->update_one($update_query, {'$set' => $set });
$i += 10;
}
}
sub _set_eval_position {
my ( $self, %args ) = @_;
my $eval_position = $args{'eval_position'};
my $name = $args{'name'};
my $col = $args{'col'};
my $query = { 'name' => $name };
my $set = { 'eval_position' => $eval_position };
my $exp_res = $col->update_one($query, { '$set' => $set });
if (!$exp_res) {
return 0;
}
return 1;
}
sub _eval_position_in_use {
my ( $self, %args ) = @_;
my $eval_position = $args{'eval_position'};
my $name = $args{'name'};
my $col = $args{'col'};
my $query = { 'eval_position' => $eval_position, 'name' => {'$ne' => $name} };
my $exp_res = $col->find($query);
my $in_use = $col->count();
return $in_use;
}
sub _get_eval_position {my ( $self, %args ) = @_;
my $col = $args{'col'};
my $name = $args{'name'};
# make sure the collection/name exists
my $result = $col->find_one({ name => $name });
if(!$result){
return;
}
my $eval_position = $result->{'eval_position'};
return $eval_position;
}
sub _validate_values {
my $self = shift;
my $obj = shift;
my $type = shift;
if (ref $obj ne 'HASH'){
$self->error("values must be an object");
return;
}
my $metadata = $self->mongo_rw()->get_collection($type, 'metadata');
if (! $metadata){
$self->error($self->mongo_rw()->error());
return;
}
$metadata = $metadata->find_one();
# Make sure each value exists and that the values we're passing
# in for aggregation configuration make sense
foreach my $value_name (keys %$obj){
if (! exists $metadata->{'values'}{$value_name}){
$self->error("Unknown value \"$value_name\"");
return;
}
foreach my $key (keys %{$obj->{$value_name}}){
my $key_value = $obj->{$value_name}{$key};
# Make sure we only passed in keys that we know about
if ($key ne 'hist_res' && $key ne 'hist_min_width'){
$self->error("Unknown value field \"$key\" for value \"$value_name\"");
return;
}
# A null value is okay
if (! defined $key_value || $key_value eq ''){
$obj->{$value_name}{$key} = undef;
}
# Make sure they are numbers
else {
if ($key_value !~ /^\d+(\.\d+)?$/){
$self->error("Value field \"$key\" for value \"$value_name\" must be a number");
return;
}
# Make sure the fields are sane
if ($key eq 'hist_res'){
if ($key_value >= 100 || $key_value <= 0){
$self->error("hist_res for value \"$value_name\" must be between 0 and 100");
return;
}
}
elsif ($key eq 'hist_min_width'){
if ($key_value <= 0){
$self->error("hist_min_width for value \"$value_name\" must be greater than 0");
return;
}
}
}
}
}
return 1;
}
# sort by eval_position, putting the rows that lack an eval_position
# at the bottom
sub by_blanklast {
# if only one object doesn't have an eval position set put the object
# without an eval position at the end
if (!exists($a->{'eval_position'}) ^ !exists($b->{'eval_position'})){
return exists($b->{'eval_position'}) - exists($a->{'eval_position'});
}
# if both objects don't have an eval position set sort by name
elsif(!exists($a->{'eval_position'}) && !exists($b->{'eval_position'})){
return $a->{'name'} cmp $b->{'name'};
}
# otherwise just sort by the eval position
return $a->{'eval_position'} cmp $b->{'eval_position'};
}
1;
| Java |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.problems;
import org.apache.flex.compiler.common.ISourceLocation;
/**
* This problem gets created when a variable has two Embed meta data tags
* associated with it.
*/
public final class EmbedMultipleMetaTagsProblem extends CompilerProblem
{
public static final String DESCRIPTION =
"A variable can only only have one [${EMBED}] metadata tag";
public static final int errorCode = 1344;
public EmbedMultipleMetaTagsProblem(ISourceLocation site)
{
super(site);
}
// Prevent these from being localized.
public final String EMBED = "Embed";
}
| Java |
/*
* Copyright 2022 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.server.materials;
import com.thoughtworks.go.config.CruiseConfig;
import com.thoughtworks.go.config.PipelineConfig;
import com.thoughtworks.go.config.materials.dependency.DependencyMaterial;
import com.thoughtworks.go.config.remote.ConfigRepoConfig;
import com.thoughtworks.go.domain.materials.Material;
import com.thoughtworks.go.domain.packagerepository.PackageDefinition;
import com.thoughtworks.go.domain.packagerepository.PackageRepository;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.listener.ConfigChangedListener;
import com.thoughtworks.go.listener.EntityConfigChangedListener;
import com.thoughtworks.go.server.service.GoConfigService;
import com.thoughtworks.go.server.service.MaterialConfigConverter;
import com.thoughtworks.go.util.SystemEnvironment;
import org.slf4j.Logger;
import org.joda.time.DateTimeUtils;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
/**
* Provides a list of unique SCMMaterials to be updated which will be consumed by MaterialUpdateService
*/
@Component
public class SCMMaterialSource extends EntityConfigChangedListener<ConfigRepoConfig> implements ConfigChangedListener, MaterialSource, MaterialUpdateCompleteListener {
private static final Logger LOGGER = LoggerFactory.getLogger(SCMMaterialSource.class);
private final GoConfigService goConfigService;
private ConcurrentMap<Material, Long> materialLastUpdateTimeMap = new ConcurrentHashMap<>();
private final MaterialConfigConverter materialConfigConverter;
private final MaterialUpdateService materialUpdateService;
private final long materialUpdateInterval;
private Set<Material> schedulableMaterials;
@Autowired
public SCMMaterialSource(GoConfigService goConfigService, SystemEnvironment systemEnvironment,
MaterialConfigConverter materialConfigConverter, MaterialUpdateService materialUpdateService) {
this.goConfigService = goConfigService;
this.materialConfigConverter = materialConfigConverter;
this.materialUpdateService = materialUpdateService;
this.materialUpdateInterval = systemEnvironment.getMaterialUpdateIdleInterval();
}
public void initialize() {
goConfigService.register(this);
goConfigService.register(new InternalConfigChangeListener() {
@Override
public void onEntityConfigChange(Object entity) {
updateSchedulableMaterials(true);
}
});
materialUpdateService.registerMaterialSources(this);
materialUpdateService.registerMaterialUpdateCompleteListener(this);
}
@Override
public Set<Material> materialsForUpdate() {
updateSchedulableMaterials(false);
return materialsWithUpdateIntervalElapsed();
}
@Override
public void onMaterialUpdate(Material material) {
if (!(material instanceof DependencyMaterial)) {
updateLastUpdateTimeForScmMaterial(material);
}
}
@Override
public void onConfigChange(CruiseConfig newCruiseConfig) {
updateSchedulableMaterials(true);
}
@Override
public void onEntityConfigChange(ConfigRepoConfig entity) {
updateSchedulableMaterials(true);
}
protected EntityConfigChangedListener<PipelineConfig> pipelineConfigChangedListener() {
final SCMMaterialSource self = this;
return new EntityConfigChangedListener<PipelineConfig>() {
@Override
public void onEntityConfigChange(PipelineConfig pipelineConfig) {
self.onConfigChange(null);
}
};
}
private Set<Material> materialsWithUpdateIntervalElapsed() {
Set<Material> materialsForUpdate = new HashSet<>();
for (Material material : schedulableMaterials) {
if (hasUpdateIntervalElapsedForScmMaterial(material)) {
materialsForUpdate.add(material);
}
}
return materialsForUpdate;
}
boolean hasUpdateIntervalElapsedForScmMaterial(Material material) {
Long lastMaterialUpdateTime = materialLastUpdateTimeMap.get(material);
if (lastMaterialUpdateTime != null) {
boolean shouldUpdateMaterial = (DateTimeUtils.currentTimeMillis() - lastMaterialUpdateTime) >= materialUpdateInterval;
if (LOGGER.isDebugEnabled() && !shouldUpdateMaterial) {
LOGGER.debug("[Material Update] Skipping update of material {} which has been last updated at {}", material, new Date(lastMaterialUpdateTime));
}
return shouldUpdateMaterial;
}
return true;
}
private void updateLastUpdateTimeForScmMaterial(Material material) {
materialLastUpdateTimeMap.put(material, DateTimeUtils.currentTimeMillis());
}
private void updateSchedulableMaterials(boolean forceLoad) {
if (forceLoad || schedulableMaterials == null) {
schedulableMaterials = materialConfigConverter.toMaterials(goConfigService.getSchedulableSCMMaterials());
}
}
private abstract class InternalConfigChangeListener extends EntityConfigChangedListener<Object> {
private final List<Class<?>> securityConfigClasses = Arrays.asList(
PipelineConfig.class,
PackageDefinition.class,
PackageRepository.class,
SCM.class
);
@Override
public boolean shouldCareAbout(Object entity) {
return securityConfigClasses.stream().anyMatch(aClass -> aClass.isAssignableFrom(entity.getClass()));
}
}
}
| Java |
#include "config/config.h"
#include <sys/types.h>
#include <sys/wait.h>
#include <stdarg.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <errno.h>
#include <sys/errno.h>
#include <string.h>
#include "dispatch_test.h"
#define _test_print(_file, _line, _desc, \
_expr, _fmt1, _val1, _fmt2, _val2) do { \
const char* _exprstr = _expr ? "PASS" : "FAIL"; \
char _linestr[BUFSIZ]; \
if (!_expr) { \
snprintf(_linestr, sizeof(_linestr), \
" (%s:%ld)", _file, _line); \
} else { \
_linestr[0] = 0; \
} \
if (_fmt2 == 0) { \
printf("\tValue: " _fmt1 "\n" \
"[%s] %s%s\n", \
_val1, \
_exprstr, \
_desc, \
_linestr); \
} else { \
printf("\tActual: " _fmt1 "\n" \
"\tExpected: " _fmt2 "\n" \
"[%s] %s%s\n", \
_val1, \
_val2, \
_exprstr, \
_desc, \
_linestr); \
} \
if (!_expr) { \
printf("\t%s:%ld\n", _file, _line); \
} \
fflush(stdout); \
} while (0);
void
test_start(const char* desc) {
printf("\n==================================================\n");
printf("[TEST] %s\n", desc);
printf("[PID] %d\n", getpid());
printf("==================================================\n\n");
usleep(100000); // give 'gdb --waitfor=' a chance to find this proc
}
#define test_ptr_null(a,b) _test_ptr_null(__FILE__, __LINE__, a, b)
void
_test_ptr_null(const char* file, long line, const char* desc, const void* ptr) {
_test_print(file, line, desc,
(ptr == NULL), "%p", ptr, "%p", (void*)0);
}
#define test_ptr_notnull(a,b) _test_ptr_notnull(__FILE__, __LINE__, a, b)
void
_test_ptr_notnull(const char* file, long line, const char* desc, const void* ptr) {
_test_print(file, line, desc,
(ptr != NULL), "%p", ptr, "%p", ptr ?: (void*)~0);
}
#define test_ptr(a,b,c) _test_ptr(__FILE__, __LINE__, a, b, c)
void
_test_ptr(const char* file, long line, const char* desc, const void* actual, const void* expected) {
_test_print(file, line, desc,
(actual == expected), "%p", actual, "%p", expected);
}
#define test_long(a,b,c) _test_long(__FILE__, __LINE__, a, b, c)
void
_test_long(const char* file, long line, const char* desc, long actual, long expected) {
_test_print(file, line, desc,
(actual == expected), "%ld", actual, "%ld", expected);
}
#define test_long_less_than(a, b, c) _test_long_less_than(__FILE__, __LINE__, a, b, c)
void
_test_long_less_than(const char* file, long line, const char* desc, long actual, long expected_max) {
_test_print(file, line, desc, (actual < expected_max), "%ld", actual, "<%ld", expected_max);
}
#define test_double_less_than(d, v, m) _test_double_less_than(__FILE__, __LINE__, d, v, m)
void
_test_double_less_than(const char* file, long line, const char* desc, double val, double max_expected) {
_test_print(file, line, desc, (val < max_expected), "%f", val, "<%f", max_expected);
}
#define test_double_less_than_or_equal(d, v, m) _test_double_less_than(__FILE__, __LINE__, d, v, m)
void
_test_double_less_than_or_equal(const char* file, long line, const char* desc, double val, double max_expected) {
_test_print(file, line, desc, (val <= max_expected), "%f", val, "<%f", max_expected);
}
#define test_errno(a,b,c) _test_errno(__FILE__, __LINE__, a, b, c)
void
_test_errno(const char* file, long line, const char* desc, long actual, long expected) {
char* actual_str;
char* expected_str;
asprintf(&actual_str, "%ld\t%s", actual, actual ? strerror(actual) : "");
asprintf(&expected_str, "%ld\t%s", expected, expected ? strerror(expected) : "");
_test_print(file, line, desc,
(actual == expected), "%s", actual_str, "%s", expected_str);
free(actual_str);
free(expected_str);
}
//#include <spawn.h>
extern char **environ;
void
test_stop(void) {
test_stop_after_delay((void *)(intptr_t)0);
}
void
test_stop_after_delay(void *delay) {
#if HAVE_LEAKS
int res;
pid_t pid;
char pidstr[10];
#endif
if (delay != NULL) {
sleep((int)(intptr_t)delay);
}
#if HAVE_LEAKS
if (getenv("NOLEAKS")) _exit(EXIT_SUCCESS);
/* leaks doesn't work against debug variant malloc */
if (getenv("DYLD_IMAGE_SUFFIX")) _exit(EXIT_SUCCESS);
snprintf(pidstr, sizeof(pidstr), "%d", getpid());
char* args[] = { "./leaks-wrapper", pidstr, NULL };
res = posix_spawnp(&pid, args[0], NULL, NULL, args, environ);
if (res == 0 && pid > 0) {
int status;
waitpid(pid, &status, 0);
test_long("Leaks", status, 0);
} else {
perror(args[0]);
}
#endif
_exit(EXIT_SUCCESS);
}
| Java |
/* ---------------------------------------------------------------------------- */
/* Atmel Microcontroller Software Support */
/* SAM Software Package License */
/* ---------------------------------------------------------------------------- */
/* Copyright (c) 2014, Atmel Corporation */
/* */
/* All rights reserved. */
/* */
/* Redistribution and use in source and binary forms, with or without */
/* modification, are permitted provided that the following condition is met: */
/* */
/* - Redistributions of source code must retain the above copyright notice, */
/* this list of conditions and the disclaimer below. */
/* */
/* Atmel's name may not be used to endorse or promote products derived from */
/* this software without specific prior written permission. */
/* */
/* DISCLAIMER: THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR */
/* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE */
/* DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR ANY DIRECT, INDIRECT, */
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT */
/* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, */
/* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING */
/* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, */
/* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */
/* ---------------------------------------------------------------------------- */
#ifndef _SAM4E_HSMCI_COMPONENT_
#define _SAM4E_HSMCI_COMPONENT_
/* ============================================================================= */
/** SOFTWARE API DEFINITION FOR High Speed MultiMedia Card Interface */
/* ============================================================================= */
/** \addtogroup SAM4E_HSMCI High Speed MultiMedia Card Interface */
/*@{*/
#if !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
/** \brief Hsmci hardware registers */
typedef struct {
__O uint32_t HSMCI_CR; /**< \brief (Hsmci Offset: 0x00) Control Register */
__IO uint32_t HSMCI_MR; /**< \brief (Hsmci Offset: 0x04) Mode Register */
__IO uint32_t HSMCI_DTOR; /**< \brief (Hsmci Offset: 0x08) Data Timeout Register */
__IO uint32_t HSMCI_SDCR; /**< \brief (Hsmci Offset: 0x0C) SD/SDIO Card Register */
__IO uint32_t HSMCI_ARGR; /**< \brief (Hsmci Offset: 0x10) Argument Register */
__O uint32_t HSMCI_CMDR; /**< \brief (Hsmci Offset: 0x14) Command Register */
__IO uint32_t HSMCI_BLKR; /**< \brief (Hsmci Offset: 0x18) Block Register */
__IO uint32_t HSMCI_CSTOR; /**< \brief (Hsmci Offset: 0x1C) Completion Signal Timeout Register */
__I uint32_t HSMCI_RSPR[4]; /**< \brief (Hsmci Offset: 0x20) Response Register */
__I uint32_t HSMCI_RDR; /**< \brief (Hsmci Offset: 0x30) Receive Data Register */
__O uint32_t HSMCI_TDR; /**< \brief (Hsmci Offset: 0x34) Transmit Data Register */
__I uint32_t Reserved1[2];
__I uint32_t HSMCI_SR; /**< \brief (Hsmci Offset: 0x40) Status Register */
__O uint32_t HSMCI_IER; /**< \brief (Hsmci Offset: 0x44) Interrupt Enable Register */
__O uint32_t HSMCI_IDR; /**< \brief (Hsmci Offset: 0x48) Interrupt Disable Register */
__I uint32_t HSMCI_IMR; /**< \brief (Hsmci Offset: 0x4C) Interrupt Mask Register */
__I uint32_t Reserved2[1];
__IO uint32_t HSMCI_CFG; /**< \brief (Hsmci Offset: 0x54) Configuration Register */
__I uint32_t Reserved3[35];
__IO uint32_t HSMCI_WPMR; /**< \brief (Hsmci Offset: 0xE4) Write Protection Mode Register */
__I uint32_t HSMCI_WPSR; /**< \brief (Hsmci Offset: 0xE8) Write Protection Status Register */
__I uint32_t Reserved4[5];
__IO uint32_t HSMCI_RPR; /**< \brief (Hsmci Offset: 0x100) Receive Pointer Register */
__IO uint32_t HSMCI_RCR; /**< \brief (Hsmci Offset: 0x104) Receive Counter Register */
__IO uint32_t HSMCI_TPR; /**< \brief (Hsmci Offset: 0x108) Transmit Pointer Register */
__IO uint32_t HSMCI_TCR; /**< \brief (Hsmci Offset: 0x10C) Transmit Counter Register */
__IO uint32_t HSMCI_RNPR; /**< \brief (Hsmci Offset: 0x110) Receive Next Pointer Register */
__IO uint32_t HSMCI_RNCR; /**< \brief (Hsmci Offset: 0x114) Receive Next Counter Register */
__IO uint32_t HSMCI_TNPR; /**< \brief (Hsmci Offset: 0x118) Transmit Next Pointer Register */
__IO uint32_t HSMCI_TNCR; /**< \brief (Hsmci Offset: 0x11C) Transmit Next Counter Register */
__O uint32_t HSMCI_PTCR; /**< \brief (Hsmci Offset: 0x120) Transfer Control Register */
__I uint32_t HSMCI_PTSR; /**< \brief (Hsmci Offset: 0x124) Transfer Status Register */
__I uint32_t Reserved5[54];
__IO uint32_t HSMCI_FIFO[256]; /**< \brief (Hsmci Offset: 0x200) FIFO Memory Aperture0 */
} Hsmci;
#endif /* !(defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
/* -------- HSMCI_CR : (HSMCI Offset: 0x00) Control Register -------- */
#define HSMCI_CR_MCIEN (0x1u << 0) /**< \brief (HSMCI_CR) Multi-Media Interface Enable */
#define HSMCI_CR_MCIDIS (0x1u << 1) /**< \brief (HSMCI_CR) Multi-Media Interface Disable */
#define HSMCI_CR_PWSEN (0x1u << 2) /**< \brief (HSMCI_CR) Power Save Mode Enable */
#define HSMCI_CR_PWSDIS (0x1u << 3) /**< \brief (HSMCI_CR) Power Save Mode Disable */
#define HSMCI_CR_SWRST (0x1u << 7) /**< \brief (HSMCI_CR) Software Reset */
/* -------- HSMCI_MR : (HSMCI Offset: 0x04) Mode Register -------- */
#define HSMCI_MR_CLKDIV_Pos 0
#define HSMCI_MR_CLKDIV_Msk (0xffu << HSMCI_MR_CLKDIV_Pos) /**< \brief (HSMCI_MR) Clock Divider */
#define HSMCI_MR_CLKDIV(value) ((HSMCI_MR_CLKDIV_Msk & ((value) << HSMCI_MR_CLKDIV_Pos)))
#define HSMCI_MR_PWSDIV_Pos 8
#define HSMCI_MR_PWSDIV_Msk (0x7u << HSMCI_MR_PWSDIV_Pos) /**< \brief (HSMCI_MR) Power Saving Divider */
#define HSMCI_MR_PWSDIV(value) ((HSMCI_MR_PWSDIV_Msk & ((value) << HSMCI_MR_PWSDIV_Pos)))
#define HSMCI_MR_RDPROOF (0x1u << 11) /**< \brief (HSMCI_MR) Read Proof Enable */
#define HSMCI_MR_WRPROOF (0x1u << 12) /**< \brief (HSMCI_MR) Write Proof Enable */
#define HSMCI_MR_FBYTE (0x1u << 13) /**< \brief (HSMCI_MR) Force Byte Transfer */
#define HSMCI_MR_PADV (0x1u << 14) /**< \brief (HSMCI_MR) Padding Value */
#define HSMCI_MR_PDCMODE (0x1u << 15) /**< \brief (HSMCI_MR) PDC-oriented Mode */
#define HSMCI_MR_CLKODD (0x1u << 16) /**< \brief (HSMCI_MR) Clock divider is odd */
/* -------- HSMCI_DTOR : (HSMCI Offset: 0x08) Data Timeout Register -------- */
#define HSMCI_DTOR_DTOCYC_Pos 0
#define HSMCI_DTOR_DTOCYC_Msk (0xfu << HSMCI_DTOR_DTOCYC_Pos) /**< \brief (HSMCI_DTOR) Data Timeout Cycle Number */
#define HSMCI_DTOR_DTOCYC(value) ((HSMCI_DTOR_DTOCYC_Msk & ((value) << HSMCI_DTOR_DTOCYC_Pos)))
#define HSMCI_DTOR_DTOMUL_Pos 4
#define HSMCI_DTOR_DTOMUL_Msk (0x7u << HSMCI_DTOR_DTOMUL_Pos) /**< \brief (HSMCI_DTOR) Data Timeout Multiplier */
#define HSMCI_DTOR_DTOMUL_1 (0x0u << 4) /**< \brief (HSMCI_DTOR) DTOCYC */
#define HSMCI_DTOR_DTOMUL_16 (0x1u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 16 */
#define HSMCI_DTOR_DTOMUL_128 (0x2u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 128 */
#define HSMCI_DTOR_DTOMUL_256 (0x3u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 256 */
#define HSMCI_DTOR_DTOMUL_1024 (0x4u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 1024 */
#define HSMCI_DTOR_DTOMUL_4096 (0x5u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 4096 */
#define HSMCI_DTOR_DTOMUL_65536 (0x6u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 65536 */
#define HSMCI_DTOR_DTOMUL_1048576 (0x7u << 4) /**< \brief (HSMCI_DTOR) DTOCYC x 1048576 */
/* -------- HSMCI_SDCR : (HSMCI Offset: 0x0C) SD/SDIO Card Register -------- */
#define HSMCI_SDCR_SDCSEL_Pos 0
#define HSMCI_SDCR_SDCSEL_Msk (0x3u << HSMCI_SDCR_SDCSEL_Pos) /**< \brief (HSMCI_SDCR) SDCard/SDIO Slot */
#define HSMCI_SDCR_SDCSEL_SLOTA (0x0u << 0) /**< \brief (HSMCI_SDCR) Slot A is selected. */
#define HSMCI_SDCR_SDCSEL_SLOTB (0x1u << 0) /**< \brief (HSMCI_SDCR) - */
#define HSMCI_SDCR_SDCSEL_SLOTC (0x2u << 0) /**< \brief (HSMCI_SDCR) - */
#define HSMCI_SDCR_SDCSEL_SLOTD (0x3u << 0) /**< \brief (HSMCI_SDCR) - */
#define HSMCI_SDCR_SDCBUS_Pos 6
#define HSMCI_SDCR_SDCBUS_Msk (0x3u << HSMCI_SDCR_SDCBUS_Pos) /**< \brief (HSMCI_SDCR) SDCard/SDIO Bus Width */
#define HSMCI_SDCR_SDCBUS_1 (0x0u << 6) /**< \brief (HSMCI_SDCR) 1 bit */
#define HSMCI_SDCR_SDCBUS_4 (0x2u << 6) /**< \brief (HSMCI_SDCR) 4 bits */
#define HSMCI_SDCR_SDCBUS_8 (0x3u << 6) /**< \brief (HSMCI_SDCR) 8 bits */
/* -------- HSMCI_ARGR : (HSMCI Offset: 0x10) Argument Register -------- */
#define HSMCI_ARGR_ARG_Pos 0
#define HSMCI_ARGR_ARG_Msk (0xffffffffu << HSMCI_ARGR_ARG_Pos) /**< \brief (HSMCI_ARGR) Command Argument */
#define HSMCI_ARGR_ARG(value) ((HSMCI_ARGR_ARG_Msk & ((value) << HSMCI_ARGR_ARG_Pos)))
/* -------- HSMCI_CMDR : (HSMCI Offset: 0x14) Command Register -------- */
#define HSMCI_CMDR_CMDNB_Pos 0
#define HSMCI_CMDR_CMDNB_Msk (0x3fu << HSMCI_CMDR_CMDNB_Pos) /**< \brief (HSMCI_CMDR) Command Number */
#define HSMCI_CMDR_CMDNB(value) ((HSMCI_CMDR_CMDNB_Msk & ((value) << HSMCI_CMDR_CMDNB_Pos)))
#define HSMCI_CMDR_RSPTYP_Pos 6
#define HSMCI_CMDR_RSPTYP_Msk (0x3u << HSMCI_CMDR_RSPTYP_Pos) /**< \brief (HSMCI_CMDR) Response Type */
#define HSMCI_CMDR_RSPTYP_NORESP (0x0u << 6) /**< \brief (HSMCI_CMDR) No response */
#define HSMCI_CMDR_RSPTYP_48_BIT (0x1u << 6) /**< \brief (HSMCI_CMDR) 48-bit response */
#define HSMCI_CMDR_RSPTYP_136_BIT (0x2u << 6) /**< \brief (HSMCI_CMDR) 136-bit response */
#define HSMCI_CMDR_RSPTYP_R1B (0x3u << 6) /**< \brief (HSMCI_CMDR) R1b response type */
#define HSMCI_CMDR_SPCMD_Pos 8
#define HSMCI_CMDR_SPCMD_Msk (0x7u << HSMCI_CMDR_SPCMD_Pos) /**< \brief (HSMCI_CMDR) Special Command */
#define HSMCI_CMDR_SPCMD_STD (0x0u << 8) /**< \brief (HSMCI_CMDR) Not a special CMD. */
#define HSMCI_CMDR_SPCMD_INIT (0x1u << 8) /**< \brief (HSMCI_CMDR) Initialization CMD: 74 clock cycles for initialization sequence. */
#define HSMCI_CMDR_SPCMD_SYNC (0x2u << 8) /**< \brief (HSMCI_CMDR) Synchronized CMD: Wait for the end of the current data block transfer before sending the pending command. */
#define HSMCI_CMDR_SPCMD_CE_ATA (0x3u << 8) /**< \brief (HSMCI_CMDR) CE-ATA Completion Signal disable Command. The host cancels the ability for the device to return a command completion signal on the command line. */
#define HSMCI_CMDR_SPCMD_IT_CMD (0x4u << 8) /**< \brief (HSMCI_CMDR) Interrupt command: Corresponds to the Interrupt Mode (CMD40). */
#define HSMCI_CMDR_SPCMD_IT_RESP (0x5u << 8) /**< \brief (HSMCI_CMDR) Interrupt response: Corresponds to the Interrupt Mode (CMD40). */
#define HSMCI_CMDR_SPCMD_BOR (0x6u << 8) /**< \brief (HSMCI_CMDR) Boot Operation Request. Start a boot operation mode, the host processor can read boot data from the MMC device directly. */
#define HSMCI_CMDR_SPCMD_EBO (0x7u << 8) /**< \brief (HSMCI_CMDR) End Boot Operation. This command allows the host processor to terminate the boot operation mode. */
#define HSMCI_CMDR_OPDCMD (0x1u << 11) /**< \brief (HSMCI_CMDR) Open Drain Command */
#define HSMCI_CMDR_OPDCMD_PUSHPULL (0x0u << 11) /**< \brief (HSMCI_CMDR) Push pull command. */
#define HSMCI_CMDR_OPDCMD_OPENDRAIN (0x1u << 11) /**< \brief (HSMCI_CMDR) Open drain command. */
#define HSMCI_CMDR_MAXLAT (0x1u << 12) /**< \brief (HSMCI_CMDR) Max Latency for Command to Response */
#define HSMCI_CMDR_MAXLAT_5 (0x0u << 12) /**< \brief (HSMCI_CMDR) 5-cycle max latency. */
#define HSMCI_CMDR_MAXLAT_64 (0x1u << 12) /**< \brief (HSMCI_CMDR) 64-cycle max latency. */
#define HSMCI_CMDR_TRCMD_Pos 16
#define HSMCI_CMDR_TRCMD_Msk (0x3u << HSMCI_CMDR_TRCMD_Pos) /**< \brief (HSMCI_CMDR) Transfer Command */
#define HSMCI_CMDR_TRCMD_NO_DATA (0x0u << 16) /**< \brief (HSMCI_CMDR) No data transfer */
#define HSMCI_CMDR_TRCMD_START_DATA (0x1u << 16) /**< \brief (HSMCI_CMDR) Start data transfer */
#define HSMCI_CMDR_TRCMD_STOP_DATA (0x2u << 16) /**< \brief (HSMCI_CMDR) Stop data transfer */
#define HSMCI_CMDR_TRDIR (0x1u << 18) /**< \brief (HSMCI_CMDR) Transfer Direction */
#define HSMCI_CMDR_TRDIR_WRITE (0x0u << 18) /**< \brief (HSMCI_CMDR) Write. */
#define HSMCI_CMDR_TRDIR_READ (0x1u << 18) /**< \brief (HSMCI_CMDR) Read. */
#define HSMCI_CMDR_TRTYP_Pos 19
#define HSMCI_CMDR_TRTYP_Msk (0x7u << HSMCI_CMDR_TRTYP_Pos) /**< \brief (HSMCI_CMDR) Transfer Type */
#define HSMCI_CMDR_TRTYP_SINGLE (0x0u << 19) /**< \brief (HSMCI_CMDR) MMC/SD Card Single Block */
#define HSMCI_CMDR_TRTYP_MULTIPLE (0x1u << 19) /**< \brief (HSMCI_CMDR) MMC/SD Card Multiple Block */
#define HSMCI_CMDR_TRTYP_STREAM (0x2u << 19) /**< \brief (HSMCI_CMDR) MMC Stream */
#define HSMCI_CMDR_TRTYP_BYTE (0x4u << 19) /**< \brief (HSMCI_CMDR) SDIO Byte */
#define HSMCI_CMDR_TRTYP_BLOCK (0x5u << 19) /**< \brief (HSMCI_CMDR) SDIO Block */
#define HSMCI_CMDR_IOSPCMD_Pos 24
#define HSMCI_CMDR_IOSPCMD_Msk (0x3u << HSMCI_CMDR_IOSPCMD_Pos) /**< \brief (HSMCI_CMDR) SDIO Special Command */
#define HSMCI_CMDR_IOSPCMD_STD (0x0u << 24) /**< \brief (HSMCI_CMDR) Not an SDIO Special Command */
#define HSMCI_CMDR_IOSPCMD_SUSPEND (0x1u << 24) /**< \brief (HSMCI_CMDR) SDIO Suspend Command */
#define HSMCI_CMDR_IOSPCMD_RESUME (0x2u << 24) /**< \brief (HSMCI_CMDR) SDIO Resume Command */
#define HSMCI_CMDR_ATACS (0x1u << 26) /**< \brief (HSMCI_CMDR) ATA with Command Completion Signal */
#define HSMCI_CMDR_ATACS_NORMAL (0x0u << 26) /**< \brief (HSMCI_CMDR) Normal operation mode. */
#define HSMCI_CMDR_ATACS_COMPLETION (0x1u << 26) /**< \brief (HSMCI_CMDR) This bit indicates that a completion signal is expected within a programmed amount of time (HSMCI_CSTOR). */
#define HSMCI_CMDR_BOOT_ACK (0x1u << 27) /**< \brief (HSMCI_CMDR) Boot Operation Acknowledge */
/* -------- HSMCI_BLKR : (HSMCI Offset: 0x18) Block Register -------- */
#define HSMCI_BLKR_BCNT_Pos 0
#define HSMCI_BLKR_BCNT_Msk (0xffffu << HSMCI_BLKR_BCNT_Pos) /**< \brief (HSMCI_BLKR) MMC/SDIO Block Count - SDIO Byte Count */
#define HSMCI_BLKR_BCNT(value) ((HSMCI_BLKR_BCNT_Msk & ((value) << HSMCI_BLKR_BCNT_Pos)))
#define HSMCI_BLKR_BLKLEN_Pos 16
#define HSMCI_BLKR_BLKLEN_Msk (0xffffu << HSMCI_BLKR_BLKLEN_Pos) /**< \brief (HSMCI_BLKR) Data Block Length */
#define HSMCI_BLKR_BLKLEN(value) ((HSMCI_BLKR_BLKLEN_Msk & ((value) << HSMCI_BLKR_BLKLEN_Pos)))
/* -------- HSMCI_CSTOR : (HSMCI Offset: 0x1C) Completion Signal Timeout Register -------- */
#define HSMCI_CSTOR_CSTOCYC_Pos 0
#define HSMCI_CSTOR_CSTOCYC_Msk (0xfu << HSMCI_CSTOR_CSTOCYC_Pos) /**< \brief (HSMCI_CSTOR) Completion Signal Timeout Cycle Number */
#define HSMCI_CSTOR_CSTOCYC(value) ((HSMCI_CSTOR_CSTOCYC_Msk & ((value) << HSMCI_CSTOR_CSTOCYC_Pos)))
#define HSMCI_CSTOR_CSTOMUL_Pos 4
#define HSMCI_CSTOR_CSTOMUL_Msk (0x7u << HSMCI_CSTOR_CSTOMUL_Pos) /**< \brief (HSMCI_CSTOR) Completion Signal Timeout Multiplier */
#define HSMCI_CSTOR_CSTOMUL_1 (0x0u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 1 */
#define HSMCI_CSTOR_CSTOMUL_16 (0x1u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 16 */
#define HSMCI_CSTOR_CSTOMUL_128 (0x2u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 128 */
#define HSMCI_CSTOR_CSTOMUL_256 (0x3u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 256 */
#define HSMCI_CSTOR_CSTOMUL_1024 (0x4u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 1024 */
#define HSMCI_CSTOR_CSTOMUL_4096 (0x5u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 4096 */
#define HSMCI_CSTOR_CSTOMUL_65536 (0x6u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 65536 */
#define HSMCI_CSTOR_CSTOMUL_1048576 (0x7u << 4) /**< \brief (HSMCI_CSTOR) CSTOCYC x 1048576 */
/* -------- HSMCI_RSPR[4] : (HSMCI Offset: 0x20) Response Register -------- */
#define HSMCI_RSPR_RSP_Pos 0
#define HSMCI_RSPR_RSP_Msk (0xffffffffu << HSMCI_RSPR_RSP_Pos) /**< \brief (HSMCI_RSPR[4]) Response */
/* -------- HSMCI_RDR : (HSMCI Offset: 0x30) Receive Data Register -------- */
#define HSMCI_RDR_DATA_Pos 0
#define HSMCI_RDR_DATA_Msk (0xffffffffu << HSMCI_RDR_DATA_Pos) /**< \brief (HSMCI_RDR) Data to Read */
/* -------- HSMCI_TDR : (HSMCI Offset: 0x34) Transmit Data Register -------- */
#define HSMCI_TDR_DATA_Pos 0
#define HSMCI_TDR_DATA_Msk (0xffffffffu << HSMCI_TDR_DATA_Pos) /**< \brief (HSMCI_TDR) Data to Write */
#define HSMCI_TDR_DATA(value) ((HSMCI_TDR_DATA_Msk & ((value) << HSMCI_TDR_DATA_Pos)))
/* -------- HSMCI_SR : (HSMCI Offset: 0x40) Status Register -------- */
#define HSMCI_SR_CMDRDY (0x1u << 0) /**< \brief (HSMCI_SR) Command Ready */
#define HSMCI_SR_RXRDY (0x1u << 1) /**< \brief (HSMCI_SR) Receiver Ready */
#define HSMCI_SR_TXRDY (0x1u << 2) /**< \brief (HSMCI_SR) Transmit Ready */
#define HSMCI_SR_BLKE (0x1u << 3) /**< \brief (HSMCI_SR) Data Block Ended */
#define HSMCI_SR_DTIP (0x1u << 4) /**< \brief (HSMCI_SR) Data Transfer in Progress */
#define HSMCI_SR_NOTBUSY (0x1u << 5) /**< \brief (HSMCI_SR) HSMCI Not Busy */
#define HSMCI_SR_ENDRX (0x1u << 6) /**< \brief (HSMCI_SR) End of RX Buffer */
#define HSMCI_SR_ENDTX (0x1u << 7) /**< \brief (HSMCI_SR) End of TX Buffer */
#define HSMCI_SR_SDIOIRQA (0x1u << 8) /**< \brief (HSMCI_SR) SDIO Interrupt for Slot A */
#define HSMCI_SR_SDIOWAIT (0x1u << 12) /**< \brief (HSMCI_SR) SDIO Read Wait Operation Status */
#define HSMCI_SR_CSRCV (0x1u << 13) /**< \brief (HSMCI_SR) CE-ATA Completion Signal Received */
#define HSMCI_SR_RXBUFF (0x1u << 14) /**< \brief (HSMCI_SR) RX Buffer Full */
#define HSMCI_SR_TXBUFE (0x1u << 15) /**< \brief (HSMCI_SR) TX Buffer Empty */
#define HSMCI_SR_RINDE (0x1u << 16) /**< \brief (HSMCI_SR) Response Index Error */
#define HSMCI_SR_RDIRE (0x1u << 17) /**< \brief (HSMCI_SR) Response Direction Error */
#define HSMCI_SR_RCRCE (0x1u << 18) /**< \brief (HSMCI_SR) Response CRC Error */
#define HSMCI_SR_RENDE (0x1u << 19) /**< \brief (HSMCI_SR) Response End Bit Error */
#define HSMCI_SR_RTOE (0x1u << 20) /**< \brief (HSMCI_SR) Response Time-out Error */
#define HSMCI_SR_DCRCE (0x1u << 21) /**< \brief (HSMCI_SR) Data CRC Error */
#define HSMCI_SR_DTOE (0x1u << 22) /**< \brief (HSMCI_SR) Data Time-out Error */
#define HSMCI_SR_CSTOE (0x1u << 23) /**< \brief (HSMCI_SR) Completion Signal Time-out Error */
#define HSMCI_SR_FIFOEMPTY (0x1u << 26) /**< \brief (HSMCI_SR) FIFO empty flag */
#define HSMCI_SR_XFRDONE (0x1u << 27) /**< \brief (HSMCI_SR) Transfer Done flag */
#define HSMCI_SR_ACKRCV (0x1u << 28) /**< \brief (HSMCI_SR) Boot Operation Acknowledge Received */
#define HSMCI_SR_ACKRCVE (0x1u << 29) /**< \brief (HSMCI_SR) Boot Operation Acknowledge Error */
#define HSMCI_SR_OVRE (0x1u << 30) /**< \brief (HSMCI_SR) Overrun */
#define HSMCI_SR_UNRE (0x1u << 31) /**< \brief (HSMCI_SR) Underrun */
/* -------- HSMCI_IER : (HSMCI Offset: 0x44) Interrupt Enable Register -------- */
#define HSMCI_IER_CMDRDY (0x1u << 0) /**< \brief (HSMCI_IER) Command Ready Interrupt Enable */
#define HSMCI_IER_RXRDY (0x1u << 1) /**< \brief (HSMCI_IER) Receiver Ready Interrupt Enable */
#define HSMCI_IER_TXRDY (0x1u << 2) /**< \brief (HSMCI_IER) Transmit Ready Interrupt Enable */
#define HSMCI_IER_BLKE (0x1u << 3) /**< \brief (HSMCI_IER) Data Block Ended Interrupt Enable */
#define HSMCI_IER_DTIP (0x1u << 4) /**< \brief (HSMCI_IER) Data Transfer in Progress Interrupt Enable */
#define HSMCI_IER_NOTBUSY (0x1u << 5) /**< \brief (HSMCI_IER) Data Not Busy Interrupt Enable */
#define HSMCI_IER_ENDRX (0x1u << 6) /**< \brief (HSMCI_IER) End of Receive Buffer Interrupt Enable */
#define HSMCI_IER_ENDTX (0x1u << 7) /**< \brief (HSMCI_IER) End of Transmit Buffer Interrupt Enable */
#define HSMCI_IER_SDIOIRQA (0x1u << 8) /**< \brief (HSMCI_IER) SDIO Interrupt for Slot A Interrupt Enable */
#define HSMCI_IER_SDIOWAIT (0x1u << 12) /**< \brief (HSMCI_IER) SDIO Read Wait Operation Status Interrupt Enable */
#define HSMCI_IER_CSRCV (0x1u << 13) /**< \brief (HSMCI_IER) Completion Signal Received Interrupt Enable */
#define HSMCI_IER_RXBUFF (0x1u << 14) /**< \brief (HSMCI_IER) Receive Buffer Full Interrupt Enable */
#define HSMCI_IER_TXBUFE (0x1u << 15) /**< \brief (HSMCI_IER) Transmit Buffer Empty Interrupt Enable */
#define HSMCI_IER_RINDE (0x1u << 16) /**< \brief (HSMCI_IER) Response Index Error Interrupt Enable */
#define HSMCI_IER_RDIRE (0x1u << 17) /**< \brief (HSMCI_IER) Response Direction Error Interrupt Enable */
#define HSMCI_IER_RCRCE (0x1u << 18) /**< \brief (HSMCI_IER) Response CRC Error Interrupt Enable */
#define HSMCI_IER_RENDE (0x1u << 19) /**< \brief (HSMCI_IER) Response End Bit Error Interrupt Enable */
#define HSMCI_IER_RTOE (0x1u << 20) /**< \brief (HSMCI_IER) Response Time-out Error Interrupt Enable */
#define HSMCI_IER_DCRCE (0x1u << 21) /**< \brief (HSMCI_IER) Data CRC Error Interrupt Enable */
#define HSMCI_IER_DTOE (0x1u << 22) /**< \brief (HSMCI_IER) Data Time-out Error Interrupt Enable */
#define HSMCI_IER_CSTOE (0x1u << 23) /**< \brief (HSMCI_IER) Completion Signal Timeout Error Interrupt Enable */
#define HSMCI_IER_FIFOEMPTY (0x1u << 26) /**< \brief (HSMCI_IER) FIFO empty Interrupt enable */
#define HSMCI_IER_XFRDONE (0x1u << 27) /**< \brief (HSMCI_IER) Transfer Done Interrupt enable */
#define HSMCI_IER_ACKRCV (0x1u << 28) /**< \brief (HSMCI_IER) Boot Acknowledge Interrupt Enable */
#define HSMCI_IER_ACKRCVE (0x1u << 29) /**< \brief (HSMCI_IER) Boot Acknowledge Error Interrupt Enable */
#define HSMCI_IER_OVRE (0x1u << 30) /**< \brief (HSMCI_IER) Overrun Interrupt Enable */
#define HSMCI_IER_UNRE (0x1u << 31) /**< \brief (HSMCI_IER) Underrun Interrupt Enable */
/* -------- HSMCI_IDR : (HSMCI Offset: 0x48) Interrupt Disable Register -------- */
#define HSMCI_IDR_CMDRDY (0x1u << 0) /**< \brief (HSMCI_IDR) Command Ready Interrupt Disable */
#define HSMCI_IDR_RXRDY (0x1u << 1) /**< \brief (HSMCI_IDR) Receiver Ready Interrupt Disable */
#define HSMCI_IDR_TXRDY (0x1u << 2) /**< \brief (HSMCI_IDR) Transmit Ready Interrupt Disable */
#define HSMCI_IDR_BLKE (0x1u << 3) /**< \brief (HSMCI_IDR) Data Block Ended Interrupt Disable */
#define HSMCI_IDR_DTIP (0x1u << 4) /**< \brief (HSMCI_IDR) Data Transfer in Progress Interrupt Disable */
#define HSMCI_IDR_NOTBUSY (0x1u << 5) /**< \brief (HSMCI_IDR) Data Not Busy Interrupt Disable */
#define HSMCI_IDR_ENDRX (0x1u << 6) /**< \brief (HSMCI_IDR) End of Receive Buffer Interrupt Disable */
#define HSMCI_IDR_ENDTX (0x1u << 7) /**< \brief (HSMCI_IDR) End of Transmit Buffer Interrupt Disable */
#define HSMCI_IDR_SDIOIRQA (0x1u << 8) /**< \brief (HSMCI_IDR) SDIO Interrupt for Slot A Interrupt Disable */
#define HSMCI_IDR_SDIOWAIT (0x1u << 12) /**< \brief (HSMCI_IDR) SDIO Read Wait Operation Status Interrupt Disable */
#define HSMCI_IDR_CSRCV (0x1u << 13) /**< \brief (HSMCI_IDR) Completion Signal received interrupt Disable */
#define HSMCI_IDR_RXBUFF (0x1u << 14) /**< \brief (HSMCI_IDR) Receive Buffer Full Interrupt Disable */
#define HSMCI_IDR_TXBUFE (0x1u << 15) /**< \brief (HSMCI_IDR) Transmit Buffer Empty Interrupt Disable */
#define HSMCI_IDR_RINDE (0x1u << 16) /**< \brief (HSMCI_IDR) Response Index Error Interrupt Disable */
#define HSMCI_IDR_RDIRE (0x1u << 17) /**< \brief (HSMCI_IDR) Response Direction Error Interrupt Disable */
#define HSMCI_IDR_RCRCE (0x1u << 18) /**< \brief (HSMCI_IDR) Response CRC Error Interrupt Disable */
#define HSMCI_IDR_RENDE (0x1u << 19) /**< \brief (HSMCI_IDR) Response End Bit Error Interrupt Disable */
#define HSMCI_IDR_RTOE (0x1u << 20) /**< \brief (HSMCI_IDR) Response Time-out Error Interrupt Disable */
#define HSMCI_IDR_DCRCE (0x1u << 21) /**< \brief (HSMCI_IDR) Data CRC Error Interrupt Disable */
#define HSMCI_IDR_DTOE (0x1u << 22) /**< \brief (HSMCI_IDR) Data Time-out Error Interrupt Disable */
#define HSMCI_IDR_CSTOE (0x1u << 23) /**< \brief (HSMCI_IDR) Completion Signal Time out Error Interrupt Disable */
#define HSMCI_IDR_FIFOEMPTY (0x1u << 26) /**< \brief (HSMCI_IDR) FIFO empty Interrupt Disable */
#define HSMCI_IDR_XFRDONE (0x1u << 27) /**< \brief (HSMCI_IDR) Transfer Done Interrupt Disable */
#define HSMCI_IDR_ACKRCV (0x1u << 28) /**< \brief (HSMCI_IDR) Boot Acknowledge Interrupt Disable */
#define HSMCI_IDR_ACKRCVE (0x1u << 29) /**< \brief (HSMCI_IDR) Boot Acknowledge Error Interrupt Disable */
#define HSMCI_IDR_OVRE (0x1u << 30) /**< \brief (HSMCI_IDR) Overrun Interrupt Disable */
#define HSMCI_IDR_UNRE (0x1u << 31) /**< \brief (HSMCI_IDR) Underrun Interrupt Disable */
/* -------- HSMCI_IMR : (HSMCI Offset: 0x4C) Interrupt Mask Register -------- */
#define HSMCI_IMR_CMDRDY (0x1u << 0) /**< \brief (HSMCI_IMR) Command Ready Interrupt Mask */
#define HSMCI_IMR_RXRDY (0x1u << 1) /**< \brief (HSMCI_IMR) Receiver Ready Interrupt Mask */
#define HSMCI_IMR_TXRDY (0x1u << 2) /**< \brief (HSMCI_IMR) Transmit Ready Interrupt Mask */
#define HSMCI_IMR_BLKE (0x1u << 3) /**< \brief (HSMCI_IMR) Data Block Ended Interrupt Mask */
#define HSMCI_IMR_DTIP (0x1u << 4) /**< \brief (HSMCI_IMR) Data Transfer in Progress Interrupt Mask */
#define HSMCI_IMR_NOTBUSY (0x1u << 5) /**< \brief (HSMCI_IMR) Data Not Busy Interrupt Mask */
#define HSMCI_IMR_ENDRX (0x1u << 6) /**< \brief (HSMCI_IMR) End of Receive Buffer Interrupt Mask */
#define HSMCI_IMR_ENDTX (0x1u << 7) /**< \brief (HSMCI_IMR) End of Transmit Buffer Interrupt Mask */
#define HSMCI_IMR_SDIOIRQA (0x1u << 8) /**< \brief (HSMCI_IMR) SDIO Interrupt for Slot A Interrupt Mask */
#define HSMCI_IMR_SDIOWAIT (0x1u << 12) /**< \brief (HSMCI_IMR) SDIO Read Wait Operation Status Interrupt Mask */
#define HSMCI_IMR_CSRCV (0x1u << 13) /**< \brief (HSMCI_IMR) Completion Signal Received Interrupt Mask */
#define HSMCI_IMR_RXBUFF (0x1u << 14) /**< \brief (HSMCI_IMR) Receive Buffer Full Interrupt Mask */
#define HSMCI_IMR_TXBUFE (0x1u << 15) /**< \brief (HSMCI_IMR) Transmit Buffer Empty Interrupt Mask */
#define HSMCI_IMR_RINDE (0x1u << 16) /**< \brief (HSMCI_IMR) Response Index Error Interrupt Mask */
#define HSMCI_IMR_RDIRE (0x1u << 17) /**< \brief (HSMCI_IMR) Response Direction Error Interrupt Mask */
#define HSMCI_IMR_RCRCE (0x1u << 18) /**< \brief (HSMCI_IMR) Response CRC Error Interrupt Mask */
#define HSMCI_IMR_RENDE (0x1u << 19) /**< \brief (HSMCI_IMR) Response End Bit Error Interrupt Mask */
#define HSMCI_IMR_RTOE (0x1u << 20) /**< \brief (HSMCI_IMR) Response Time-out Error Interrupt Mask */
#define HSMCI_IMR_DCRCE (0x1u << 21) /**< \brief (HSMCI_IMR) Data CRC Error Interrupt Mask */
#define HSMCI_IMR_DTOE (0x1u << 22) /**< \brief (HSMCI_IMR) Data Time-out Error Interrupt Mask */
#define HSMCI_IMR_CSTOE (0x1u << 23) /**< \brief (HSMCI_IMR) Completion Signal Time-out Error Interrupt Mask */
#define HSMCI_IMR_FIFOEMPTY (0x1u << 26) /**< \brief (HSMCI_IMR) FIFO Empty Interrupt Mask */
#define HSMCI_IMR_XFRDONE (0x1u << 27) /**< \brief (HSMCI_IMR) Transfer Done Interrupt Mask */
#define HSMCI_IMR_ACKRCV (0x1u << 28) /**< \brief (HSMCI_IMR) Boot Operation Acknowledge Received Interrupt Mask */
#define HSMCI_IMR_ACKRCVE (0x1u << 29) /**< \brief (HSMCI_IMR) Boot Operation Acknowledge Error Interrupt Mask */
#define HSMCI_IMR_OVRE (0x1u << 30) /**< \brief (HSMCI_IMR) Overrun Interrupt Mask */
#define HSMCI_IMR_UNRE (0x1u << 31) /**< \brief (HSMCI_IMR) Underrun Interrupt Mask */
/* -------- HSMCI_CFG : (HSMCI Offset: 0x54) Configuration Register -------- */
#define HSMCI_CFG_FIFOMODE (0x1u << 0) /**< \brief (HSMCI_CFG) HSMCI Internal FIFO control mode */
#define HSMCI_CFG_FERRCTRL (0x1u << 4) /**< \brief (HSMCI_CFG) Flow Error flag reset control mode */
#define HSMCI_CFG_HSMODE (0x1u << 8) /**< \brief (HSMCI_CFG) High Speed Mode */
#define HSMCI_CFG_LSYNC (0x1u << 12) /**< \brief (HSMCI_CFG) Synchronize on the last block */
/* -------- HSMCI_WPMR : (HSMCI Offset: 0xE4) Write Protection Mode Register -------- */
#define HSMCI_WPMR_WPEN (0x1u << 0) /**< \brief (HSMCI_WPMR) Write Protect Enable */
#define HSMCI_WPMR_WPKEY_Pos 8
#define HSMCI_WPMR_WPKEY_Msk (0xffffffu << HSMCI_WPMR_WPKEY_Pos) /**< \brief (HSMCI_WPMR) Write Protect Key */
#define HSMCI_WPMR_WPKEY_PASSWD (0x4D4349u << 8) /**< \brief (HSMCI_WPMR) Writing any other value in this field aborts the write operation of the WPEN bit.Always reads as 0. */
/* -------- HSMCI_WPSR : (HSMCI Offset: 0xE8) Write Protection Status Register -------- */
#define HSMCI_WPSR_WPVS (0x1u << 0) /**< \brief (HSMCI_WPSR) Write Protection Violation Status */
#define HSMCI_WPSR_WPVSRC_Pos 8
#define HSMCI_WPSR_WPVSRC_Msk (0xffffu << HSMCI_WPSR_WPVSRC_Pos) /**< \brief (HSMCI_WPSR) Write Protection Violation Source */
/* -------- HSMCI_RPR : (HSMCI Offset: 0x100) Receive Pointer Register -------- */
#define HSMCI_RPR_RXPTR_Pos 0
#define HSMCI_RPR_RXPTR_Msk (0xffffffffu << HSMCI_RPR_RXPTR_Pos) /**< \brief (HSMCI_RPR) Receive Pointer Register */
#define HSMCI_RPR_RXPTR(value) ((HSMCI_RPR_RXPTR_Msk & ((value) << HSMCI_RPR_RXPTR_Pos)))
/* -------- HSMCI_RCR : (HSMCI Offset: 0x104) Receive Counter Register -------- */
#define HSMCI_RCR_RXCTR_Pos 0
#define HSMCI_RCR_RXCTR_Msk (0xffffu << HSMCI_RCR_RXCTR_Pos) /**< \brief (HSMCI_RCR) Receive Counter Register */
#define HSMCI_RCR_RXCTR(value) ((HSMCI_RCR_RXCTR_Msk & ((value) << HSMCI_RCR_RXCTR_Pos)))
/* -------- HSMCI_TPR : (HSMCI Offset: 0x108) Transmit Pointer Register -------- */
#define HSMCI_TPR_TXPTR_Pos 0
#define HSMCI_TPR_TXPTR_Msk (0xffffffffu << HSMCI_TPR_TXPTR_Pos) /**< \brief (HSMCI_TPR) Transmit Counter Register */
#define HSMCI_TPR_TXPTR(value) ((HSMCI_TPR_TXPTR_Msk & ((value) << HSMCI_TPR_TXPTR_Pos)))
/* -------- HSMCI_TCR : (HSMCI Offset: 0x10C) Transmit Counter Register -------- */
#define HSMCI_TCR_TXCTR_Pos 0
#define HSMCI_TCR_TXCTR_Msk (0xffffu << HSMCI_TCR_TXCTR_Pos) /**< \brief (HSMCI_TCR) Transmit Counter Register */
#define HSMCI_TCR_TXCTR(value) ((HSMCI_TCR_TXCTR_Msk & ((value) << HSMCI_TCR_TXCTR_Pos)))
/* -------- HSMCI_RNPR : (HSMCI Offset: 0x110) Receive Next Pointer Register -------- */
#define HSMCI_RNPR_RXNPTR_Pos 0
#define HSMCI_RNPR_RXNPTR_Msk (0xffffffffu << HSMCI_RNPR_RXNPTR_Pos) /**< \brief (HSMCI_RNPR) Receive Next Pointer */
#define HSMCI_RNPR_RXNPTR(value) ((HSMCI_RNPR_RXNPTR_Msk & ((value) << HSMCI_RNPR_RXNPTR_Pos)))
/* -------- HSMCI_RNCR : (HSMCI Offset: 0x114) Receive Next Counter Register -------- */
#define HSMCI_RNCR_RXNCTR_Pos 0
#define HSMCI_RNCR_RXNCTR_Msk (0xffffu << HSMCI_RNCR_RXNCTR_Pos) /**< \brief (HSMCI_RNCR) Receive Next Counter */
#define HSMCI_RNCR_RXNCTR(value) ((HSMCI_RNCR_RXNCTR_Msk & ((value) << HSMCI_RNCR_RXNCTR_Pos)))
/* -------- HSMCI_TNPR : (HSMCI Offset: 0x118) Transmit Next Pointer Register -------- */
#define HSMCI_TNPR_TXNPTR_Pos 0
#define HSMCI_TNPR_TXNPTR_Msk (0xffffffffu << HSMCI_TNPR_TXNPTR_Pos) /**< \brief (HSMCI_TNPR) Transmit Next Pointer */
#define HSMCI_TNPR_TXNPTR(value) ((HSMCI_TNPR_TXNPTR_Msk & ((value) << HSMCI_TNPR_TXNPTR_Pos)))
/* -------- HSMCI_TNCR : (HSMCI Offset: 0x11C) Transmit Next Counter Register -------- */
#define HSMCI_TNCR_TXNCTR_Pos 0
#define HSMCI_TNCR_TXNCTR_Msk (0xffffu << HSMCI_TNCR_TXNCTR_Pos) /**< \brief (HSMCI_TNCR) Transmit Counter Next */
#define HSMCI_TNCR_TXNCTR(value) ((HSMCI_TNCR_TXNCTR_Msk & ((value) << HSMCI_TNCR_TXNCTR_Pos)))
/* -------- HSMCI_PTCR : (HSMCI Offset: 0x120) Transfer Control Register -------- */
#define HSMCI_PTCR_RXTEN (0x1u << 0) /**< \brief (HSMCI_PTCR) Receiver Transfer Enable */
#define HSMCI_PTCR_RXTDIS (0x1u << 1) /**< \brief (HSMCI_PTCR) Receiver Transfer Disable */
#define HSMCI_PTCR_TXTEN (0x1u << 8) /**< \brief (HSMCI_PTCR) Transmitter Transfer Enable */
#define HSMCI_PTCR_TXTDIS (0x1u << 9) /**< \brief (HSMCI_PTCR) Transmitter Transfer Disable */
/* -------- HSMCI_PTSR : (HSMCI Offset: 0x124) Transfer Status Register -------- */
#define HSMCI_PTSR_RXTEN (0x1u << 0) /**< \brief (HSMCI_PTSR) Receiver Transfer Enable */
#define HSMCI_PTSR_TXTEN (0x1u << 8) /**< \brief (HSMCI_PTSR) Transmitter Transfer Enable */
/*@}*/
#endif /* _SAM4E_HSMCI_COMPONENT_ */
| Java |
# Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "google/cloud/logging"
module Google
module Cloud
module Logging
##
# Default log name to be used for Stackdriver Logging
DEFAULT_LOG_NAME = Middleware::DEFAULT_LOG_NAME
##
# Railtie
#
# Adds the {Google::Cloud::Logging::Middleware} to Rack in a Rails
# environment. The middleware will set `env['rack.logger']` to a
# {Google::Cloud::Logging::Logger} instance to be used by the Rails
# application.
#
# The middleware is loaded only when certain conditions are met. These
# conditions are when the configuration
# `Google::Cloud.configure.use_logging` (also available as
# `Rails.application.config.google_cloud.use_logging` for a Rails
# application) is set to `true`, or, if the configuration is left unset
# but `Rails.env.production?` is `true`.
#
# When loaded, the {Google::Cloud::Logging::Middleware} will be inserted
# before the `Rails::Rack::Logger Middleware`, which allows it to set the
# `env['rack.logger']` in place of Rails's default logger.
# See the [Configuration
# Guide](https://googleapis.dev/ruby/stackdriver/latest/file.INSTRUMENTATION_CONFIGURATION.html)
# on how to configure the Railtie and Middleware.
#
class Railtie < ::Rails::Railtie
config.google_cloud = ::ActiveSupport::OrderedOptions.new unless
config.respond_to? :google_cloud
config.google_cloud.logging = ::ActiveSupport::OrderedOptions.new
config.google_cloud.logging.monitored_resource =
::ActiveSupport::OrderedOptions.new
config.google_cloud.logging.monitored_resource.labels =
::ActiveSupport::OrderedOptions.new
initializer "Stackdriver.Logging", before: :initialize_logger do |app|
self.class.consolidate_rails_config app.config
self.class.init_middleware app if Cloud.configure.use_logging
end
##
# @private Init Logging integration for Rails. Setup configuration and
# insert the Middleware.
def self.init_middleware app
project_id = Logging.configure.project_id
credentials = Logging.configure.credentials
resource_type = Logging.configure.monitored_resource.type
resource_labels = Logging.configure.monitored_resource.labels
log_name = Logging.configure.log_name
labels = Logging.configure.labels
logging = Google::Cloud::Logging.new project_id: project_id,
credentials: credentials
resource =
Logging::Middleware.build_monitored_resource resource_type,
resource_labels
Middleware.logger = logging.logger log_name, resource, labels
# Set the default Rails logger
if Logging.configure.set_default_logger_on_rails_init
set_default_logger
end
init_callback = -> { set_default_logger }
app.middleware.insert_before Rails::Rack::Logger,
Google::Cloud::Logging::Middleware,
logger: Middleware.logger,
on_init: init_callback
end
##
# This should be called once the application determines that it is safe
# to start background threads and open gRPC connections. It informs the
# middleware system that it is safe to use Google Cloud Logging. This is
# called during Rails initialization when the
# `set_default_logger_on_rails_init` configuration is set.
#
# Generally, this matters if the application forks worker processes;
# this method should be called only after workers are forked, since
# threads and network connections interact badly with fork. For example,
# when running Puma in [clustered
# mode](https://github.com/puma/puma#clustered-mode), this method should
# be called in an `on_worker_boot` block.
#
# If the application does no forking, this method can be called any time
# early in the application initialization process. Or by setting the
# `set_default_logger_on_rails_init` configuration.
#
# If the `set_default_logger_on_rails_init` configuration is not set,
# and {Railtie.set_default_logger} is not called in a post-fork hook,
# the default Rails logger object will not be set to use the Google
# Cloud Logging Logger object. For best results, an application should
# call this method at the appropriate time, such as a post-fork hook.
#
def self.set_default_logger
return if Middleware.logger.nil?
return if Rails.logger.is_a? Google::Cloud::Logging::Logger
# configure the Middleware logger to use the same settings as Rails
Middleware.logger.level = Rails.logger.level
# TODO: are there more settings to be set here?
# Replace the Rails default logger
Rails.application.config.logger = Middleware.logger
Rails.logger = Middleware.logger
end
##
# @private Consolidate Rails configuration into Logging instrumentation
# configuration. Also consolidate the `use_logging` setting by verifying
# credentials and Rails environment. The `use_logging` setting will be
# true if credentials are valid, and the setting is manually set to true
# or Rails is in production environment.
#
# @param [Rails::Railtie::Configuration] config The
# Rails.application.config
#
def self.consolidate_rails_config config
merge_rails_config config
init_default_config
# Done if Google::Cloud.configure.use_logging is explicitly false
return if Google::Cloud.configure.use_logging == false
# Verify credentials and set use_logging to false if
# credentials are invalid
unless valid_credentials? Logging.configure.project_id,
Logging.configure.keyfile
Cloud.configure.use_logging = false
return
end
# Otherwise set use_logging to true if Rails is running in production
Google::Cloud.configure.use_logging ||= Rails.env.production?
end
##
# @private Merge Rails configuration into Logging instrumentation
# configuration.
def self.merge_rails_config rails_config # rubocop:disable AbcSize
gcp_config = rails_config.google_cloud
log_config = gcp_config.logging
if Cloud.configure.use_logging.nil?
Cloud.configure.use_logging = gcp_config.use_logging
end
Logging.configure do |config|
config.project_id ||= config.project
config.project_id ||= log_config.project_id || log_config.project
config.project_id ||= gcp_config.project_id || gcp_config.project
config.credentials ||= config.keyfile
config.credentials ||= log_config.credentials || log_config.keyfile
config.credentials ||= gcp_config.credentials || gcp_config.keyfile
config.log_name ||= log_config.log_name
config.labels ||= log_config.labels
config.log_name_map ||= log_config.log_name_map
config.monitored_resource.type ||=
log_config.monitored_resource.type
config.monitored_resource.labels ||=
log_config.monitored_resource.labels.to_h
if config.set_default_logger_on_rails_init.nil?
config.set_default_logger_on_rails_init = \
log_config.set_default_logger_on_rails_init
end
end
end
##
# Fallback to default config values if config parameters not provided.
def self.init_default_config
Logging.configure.project_id ||= Logging.default_project_id
Logging.configure.log_name ||= Middleware::DEFAULT_LOG_NAME
end
##
# @private Verify credentials
def self.valid_credentials? project_id, credentials
# Try authenticate authorize client API. Return false if unable to
# authorize.
begin
# if credentials is nil, get default
credentials ||= Logging::Credentials.default
# only create a new Credentials object if the val isn't one already
unless credentials.is_a? Google::Auth::Credentials
# if credentials is not a Credentials object, create one
Logging::Credentials.new credentials
end
rescue Exception => e
STDOUT.puts "Note: Google::Cloud::Logging is disabled because " \
"it failed to authorize with the service. (#{e.message}) " \
"Falling back to the default Rails logger."
return false
end
if project_id.to_s.empty?
STDOUT.puts "Note: Google::Cloud::Logging is disabled because " \
"the project ID could not be determined. " \
"Falling back to the default Rails logger."
return false
end
true
end
private_class_method :merge_rails_config,
:init_default_config,
:valid_credentials?
end
end
end
end
| Java |
/*
* Copyright 2020 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.core.impl.score.stream.drools.quad;
import java.util.function.Function;
import java.util.function.Supplier;
import org.optaplanner.core.api.function.PentaFunction;
import org.optaplanner.core.api.function.QuadFunction;
import org.optaplanner.core.api.score.stream.quad.QuadConstraintCollector;
import org.optaplanner.core.impl.score.stream.drools.common.BiTuple;
import org.optaplanner.core.impl.score.stream.drools.common.DroolsAbstractUniCollectingGroupByCollectorProcessor;
import org.optaplanner.core.impl.score.stream.drools.common.QuadTuple;
import org.optaplanner.core.impl.score.stream.drools.common.TriTuple;
final class DroolsQuadToTriGroupByCollectorProcessor<A, B, C, D, ResultContainer, NewA, NewB, NewC> extends
DroolsAbstractUniCollectingGroupByCollectorProcessor<ResultContainer, QuadTuple<A, B, C, D>, BiTuple<NewA, NewB>, TriTuple<NewA, NewB, NewC>> {
private final QuadFunction<A, B, C, D, NewA> groupKeyAMapping;
private final QuadFunction<A, B, C, D, NewB> groupKeyBMapping;
private final Supplier<ResultContainer> supplier;
private final PentaFunction<ResultContainer, A, B, C, D, Runnable> accumulator;
private final Function<ResultContainer, NewC> finisher;
public DroolsQuadToTriGroupByCollectorProcessor(QuadFunction<A, B, C, D, NewA> groupKeyAMapping,
QuadFunction<A, B, C, D, NewB> groupKeyBMapping,
QuadConstraintCollector<A, B, C, D, ResultContainer, NewC> collector) {
this.groupKeyAMapping = groupKeyAMapping;
this.groupKeyBMapping = groupKeyBMapping;
this.supplier = collector.supplier();
this.accumulator = collector.accumulator();
this.finisher = collector.finisher();
}
@Override
protected BiTuple<NewA, NewB> toKey(QuadTuple<A, B, C, D> abcdQuadTuple) {
return new BiTuple<>(groupKeyAMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d),
groupKeyBMapping.apply(abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d));
}
@Override
protected ResultContainer newContainer() {
return supplier.get();
}
@Override
protected Runnable process(QuadTuple<A, B, C, D> abcdQuadTuple, ResultContainer container) {
return accumulator.apply(container, abcdQuadTuple.a, abcdQuadTuple.b, abcdQuadTuple.c, abcdQuadTuple.d);
}
@Override
protected TriTuple<NewA, NewB, NewC> toResult(BiTuple<NewA, NewB> key, ResultContainer container) {
return new TriTuple<>(key.a, key.b, finisher.apply(container));
}
}
| Java |
#
# Author:: Daniel DeLeo (<[email protected]>)
# Copyright:: Copyright (c) 2013 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "support/shared/integration/integration_helper"
require "chef/mixin/shell_out"
describe "chef-client" do
include IntegrationSupport
include Chef::Mixin::ShellOut
let(:chef_zero_opts) { {:host => "::1"} }
let(:validation_pem) do
<<-END_VALIDATION_PEM
-----BEGIN RSA PRIVATE KEY-----
MIIEogIBAAKCAQEApubutqtYYQ5UiA9QhWP7UvSmsfHsAoPKEVVPdVW/e8Svwpyf
0Xef6OFWVmBE+W442ZjLOe2y6p2nSnaq4y7dg99NFz6X+16mcKiCbj0RCiGqCvCk
NftHhTgO9/RFvCbmKZ1RKNob1YzLrFpxBHaSh9po+DGWhApcd+I+op+ZzvDgXhNn
0nauZu3rZmApI/r7EEAOjFedAXs7VPNXhhtZAiLSAVIrwU3ZajtSzgXOxbNzgj5O
AAAMmThK+71qPdffAdO4J198H6/MY04qgtFo7vumzCq0UCaGZfmeI1UNE4+xQWwP
HJ3pDAP61C6Ebx2snI2kAd9QMx9Y78nIedRHPwIDAQABAoIBAHssRtPM1GacWsom
8zfeN6ZbI4KDlbetZz0vhnqDk9NVrpijWlcOP5dwZXVNitnB/HaqCqFvyPDY9JNB
zI/pEFW4QH59FVDP42mVEt0keCTP/1wfiDDGh1vLqVBYl/ZphscDcNgDTzNkuxMx
k+LFVxKnn3w7rGc59lALSkpeGvbbIDjp3LUMlUeCF8CIFyYZh9ZvXe4OCxYdyjxb
i8tnMLKvJ4Psbh5jMapsu3rHQkfPdqzztQUz8vs0NYwP5vWge46FUyk+WNm/IhbJ
G3YM22nwUS8Eu2bmTtADSJolATbCSkOwQ1D+Fybz/4obfYeGaCdOqB05ttubhenV
ShsAb7ECgYEA20ecRVxw2S7qA7sqJ4NuYOg9TpfGooptYNA1IP971eB6SaGAelEL
awYkGNuu2URmm5ElZpwJFFTDLGA7t2zB2xI1FeySPPIVPvJGSiZoFQOVlIg9WQzK
7jTtFQ/tOMrF+bigEUJh5bP1/7HzqSpuOsPjEUb2aoCTp+tpiRGL7TUCgYEAwtns
g3ysrSEcTzpSv7fQRJRk1lkBhatgNd0oc+ikzf74DaVLhBg1jvSThDhiDCdB59mr
Jh41cnR1XqE8jmdQbCDRiFrI1Pq6TPaDZFcovDVE1gue9x86v3FOH2ukPG4d2/Xy
HevXjThtpMMsWFi0JYXuzXuV5HOvLZiP8sN3lSMCgYANpdxdGM7RRbE9ADY0dWK2
V14ReTLcxP7fyrWz0xLzEeCqmomzkz3BsIUoouu0DCTSw+rvAwExqcDoDylIVlWO
fAifz7SeZHbcDxo+3TsXK7zwnLYsx7YNs2+aIv6hzUUbMNmNmXMcZ+IEwx+mRMTN
lYmZdrA5mr0V83oDFPt/jQKBgC74RVE03pMlZiObFZNtheDiPKSG9Bz6wMh7NWMr
c37MtZLkg52mEFMTlfPLe6ceV37CM8WOhqe+dwSGrYhOU06dYqUR7VOZ1Qr0aZvo
fsNPu/Y0+u7rMkgv0fs1AXQnvz7kvKaF0YITVirfeXMafuKEtJoH7owRbur42cpV
YCAtAoGAP1rHOc+w0RUcBK3sY7aErrih0OPh9U5bvJsrw1C0FIZhCEoDVA+fNIQL
syHLXYFNy0OxMtH/bBAXBGNHd9gf5uOnqh0pYcbe/uRAxumC7Rl0cL509eURiA2T
+vFmf54y9YdnLXaqv+FhJT6B6V7WX7IpU9BMqJY1cJYXHuHG2KA=
-----END RSA PRIVATE KEY-----
END_VALIDATION_PEM
end
let(:cache_path) do
Dir.mktmpdir
end
let(:basic_config_file) do
<<-END_CLIENT_RB
chef_server_url "http://[::1]:8900"
validation_key '#{path_to('config/validator.pem')}'
cache_path '#{cache_path}'
client_key '#{cache_path}/client.pem'
END_CLIENT_RB
end
let(:client_rb_content) do
basic_config_file
end
let(:chef_dir) { File.join(File.dirname(__FILE__), "..", "..", "..", "bin") }
let(:chef_client_cmd) { %Q{ruby '#{chef_dir}/chef-client' --minimal-ohai -c "#{path_to('config/client.rb')}" -lwarn} }
after do
FileUtils.rm_rf(cache_path)
end
# Some Solaris test platforms are too old for IPv6. These tests should not
# otherwise be platform dependent, so exclude solaris
when_the_chef_server "is running on IPv6", :not_supported_on_solaris, :not_supported_on_gce do
when_the_repository "has a cookbook with a no-op recipe" do
before do
cookbook "noop", "1.0.0", { }, "recipes" => {"default.rb" => "#raise 'foo'"}
file "config/client.rb", client_rb_content
file "config/validator.pem", validation_pem
end
it "should complete with success" do
result = shell_out("#{chef_client_cmd} -o 'noop::default'", :cwd => chef_dir)
result.error!
end
end
when_the_repository "has a cookbook that hits server APIs" do
before do
recipe=<<-END_RECIPE
actual_item = data_bag_item("expect_bag", "expect_item")
if actual_item.key?("expect_key") and actual_item["expect_key"] == "expect_value"
Chef::Log.info "lookin good"
else
Chef::Log.error("!" * 80)
raise "unexpected data bag item content \#{actual_item.inspect}"
Chef::Log.error("!" * 80)
end
END_RECIPE
data_bag("expect_bag", { "expect_item" => {"expect_key" => "expect_value"} })
cookbook "api-smoke-test", "1.0.0", { }, "recipes" => {"default.rb" => recipe}
end
before do
file "config/client.rb", client_rb_content
file "config/validator.pem", validation_pem
end
it "should complete with success" do
result = shell_out("#{chef_client_cmd} -o 'api-smoke-test::default'", :cwd => chef_dir)
result.error!
end
end
end
end
| Java |
/*
* OCILIB - C Driver for Oracle (C Wrapper for Oracle OCI)
*
* Website: http://www.ocilib.net
*
* Copyright (c) 2007-2016 Vincent ROGIER <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "ocilib_internal.h"
/* ********************************************************************************************* *
* STRINGS MESSAGES
* ********************************************************************************************* */
static otext * OCILib_TypeNames[OCI_IPC_COUNT] =
{
OTEXT("Oracle memory"),
OTEXT("boolean pointer"),
OTEXT("generic pointer"),
OTEXT("short pointer"),
OTEXT("int pointer"),
OTEXT("big_int pointer"),
OTEXT("double pointer"),
OTEXT("float pointer"),
OTEXT("string pointer"),
OTEXT("function callback"),
OTEXT("Error handle"),
OTEXT("TypeInfo handle"),
OTEXT("Connection handle"),
OTEXT("Pool handle"),
OTEXT("Transaction handle"),
OTEXT("Statement handle"),
OTEXT("Resultset handle"),
OTEXT("Column handle"),
OTEXT("Date handle"),
OTEXT("Timestamp handle"),
OTEXT("Interval handle"),
OTEXT("Lob handle"),
OTEXT("File handle"),
OTEXT("Long handle"),
OTEXT("Object handle"),
OTEXT("Collection handle"),
OTEXT("Collection iterator handle"),
OTEXT("Collection element handle"),
OTEXT("Hash Table handle"),
OTEXT("Thread handle"),
OTEXT("Mutex handle"),
OTEXT("Bind handle"),
OTEXT("Ref handle"),
OTEXT("Direct Path handle"),
OTEXT("Subscription handle"),
OTEXT("Event handle"),
OTEXT("Array handle"),
OTEXT("Message handle"),
OTEXT("Enqueue handle"),
OTEXT("Dequeue handle"),
OTEXT("Agent handle"),
OTEXT("Internal list handle"),
OTEXT("Internal list item handle"),
OTEXT("Internal array of bind handles"),
OTEXT("Internal define handle"),
OTEXT("Internal array of define handles"),
OTEXT("Internal hash entry handle"),
OTEXT("Internal array of hash entry handles"),
OTEXT("Internal hash value handle"),
OTEXT("Internal thread key handle"),
OTEXT("Internal Oracle date handle"),
OTEXT("Internal C time structure"),
OTEXT("Internal array of resultset handles"),
OTEXT("Internal array of PL/SQL sizes integers"),
OTEXT("Internal array of PL/SQL return codes integers"),
OTEXT("Internal server output handle"),
OTEXT("Internal array of indicator integers"),
OTEXT("Internal array of buffer length integers"),
OTEXT("Internal array of data buffers"),
OTEXT("Internal Long handle data buffer"),
OTEXT("Internal trace info structure"),
OTEXT("Internal array of direct path columns"),
OTEXT("Internal array of batch error objects")
};
#if defined(OCI_CHARSET_WIDE) && !defined(_MSC_VER)
static otext * OCILib_ErrorMsg[OCI_ERR_COUNT] =
{
OTEXT("No error"),
OTEXT("OCILIB has not been initialized"),
OTEXT("Cannot load OCI shared library (%ls)"),
OTEXT("Cannot load OCI symbols from shared library"),
OTEXT("OCILIB has not been initialized in multi threaded mode"),
OTEXT("Memory allocation failure (type %ls, size : %d)"),
OTEXT("Feature not available (%ls) "),
OTEXT("A null %ls has been provided"),
OTEXT("Oracle data type (sql code %d) not supported for this operation "),
OTEXT("Unknown identifier %c while parsing SQL"),
OTEXT("Unknown argument %d while retrieving data"),
OTEXT("Index %d out of bounds"),
OTEXT("Found %d non freed %ls"),
OTEXT("Maximum number of binds (%d) already reached"),
OTEXT("Object attribute '%ls' not found"),
OTEXT("The integer parameter value must be at least %d"),
OTEXT("Elements are not compatible"),
OTEXT("The statement must be %ls to perform this operation"),
OTEXT("The statement is not scrollable"),
OTEXT("Name or position '%ls' already binded to the statement"),
OTEXT("Invalid new size for bind arrays (initial %d, current %d, new %d)"),
OTEXT("Column '%ls' not find in table '%ls'"),
OTEXT("Unable to perform this operation on a %ls direct path process"),
OTEXT("Cannot create OCI environment"),
OTEXT("Name or position '%ls' previously binded with different data type"),
OTEXT("Object '%ls' type does not match the requested object type"),
OTEXT("Item '%ls' (type %d) not found"),
OTEXT("Argument '%ls' : Invalid value %d")
};
#else
static otext * OCILib_ErrorMsg[OCI_ERR_COUNT] =
{
OTEXT("No error"),
OTEXT("OCILIB has not been initialized"),
OTEXT("Cannot load OCI shared library (%s)"),
OTEXT("Cannot load OCI symbols from shared library"),
OTEXT("OCILIB has not been initialized in multi threaded mode"),
OTEXT("Memory allocation failure (type %s, size : %d)"),
OTEXT("Feature not available (%s) "),
OTEXT("A null %s has been provided"),
OTEXT("Oracle data type (sql code %d) not supported for this operation "),
OTEXT("Unknown identifier %c while parsing SQL : "),
OTEXT("Unknown argument %d while retrieving data"),
OTEXT("Index %d out of bounds"),
OTEXT("Found %d non freed %s"),
OTEXT("Maximum number of binds (%d) already reached"),
OTEXT("Object attribute '%s' not found"),
OTEXT("The integer parameter value must be at least %d"),
OTEXT("Elements are not compatible"),
OTEXT("The statement must be %s to perform this operation"),
OTEXT("The statement is not scrollable"),
OTEXT("Name or position '%s' already binded to the statement"),
OTEXT("Invalid new size for bind arrays (initial %d, current %d, new %d)"),
OTEXT("Column '%s' not find in table '%s'"),
OTEXT("Unable to perform this operation on a %s direct path process"),
OTEXT("Cannot create OCI environment"),
OTEXT("Name or position '%s' previously binded with different datatype"),
OTEXT("Object '%s' type does not match the requested object type"),
OTEXT("Item '%s' (type %d) not found"),
OTEXT("Argument '%s' : Invalid value %d")
};
#endif
static otext * OCILib_OraFeatures[OCI_FEATURE_COUNT] =
{
OTEXT("Oracle 9.0 support for Unicode data"),
OTEXT("Oracle 9.0 Timestamps and Intervals"),
OTEXT("Oracle 9.2 Direct path date caching"),
OTEXT("Oracle 9.2 Statement caching"),
OTEXT("Oracle 10g R1 LOBs size extensions"),
OTEXT("Oracle 10g R2 Database change notification"),
OTEXT("Oracle 10g R2 remote database startup/shutdown"),
OTEXT("Oracle 10g R2 High Availability"),
OTEXT("Oracle XA Connections"),
OTEXT("Oracle 12c R1 PL/SQL extended support")
};
typedef struct OCI_StmtStateTable
{
int state;
otext *name;
} OCI_StmtStateTable;
static OCI_StmtStateTable OCILib_StmtStates[OCI_STMT_STATES_COUNT] =
{
{ OCI_STMT_CLOSED, OTEXT("closed") },
{ OCI_STMT_PARSED, OTEXT("parsed") },
{ OCI_STMT_PREPARED, OTEXT("prepared") },
{ OCI_STMT_DESCRIBED, OTEXT("described") },
{ OCI_STMT_EXECUTED, OTEXT("executed") }
};
static otext * OCILib_DirPathStates[OCI_DPS_COUNT] =
{
OTEXT("non prepared"),
OTEXT("prepared"),
OTEXT("converted"),
OTEXT("terminated")
};
static otext * OCILib_HandleNames[OCI_HDLE_COUNT] =
{
OTEXT("OCI handle"),
OTEXT("OCI descriptors"),
OTEXT("OCI Object handles")
};
/* ********************************************************************************************* *
* PRIVATE FUNCTIONS
* ********************************************************************************************* */
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionGetError
* --------------------------------------------------------------------------------------------- */
OCI_Error * OCI_ExceptionGetError
(
void
)
{
OCI_Error *err = OCI_ErrorGet(TRUE);
if (err)
{
OCI_ErrorReset(err);
}
return err;
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionRaise
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionRaise
(
OCI_Error *err
)
{
if (err)
{
err->active = TRUE;
if (OCILib.error_handler)
{
OCILib.error_handler(err);
}
err->active = FALSE;
}
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionOCI
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionOCI
(
OCIError *p_err,
OCI_Connection *con,
OCI_Statement *stmt,
boolean warning
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
int dbsize = (int) (osizeof(err->str) - (size_t) 1);
dbtext *dbstr = OCI_StringGetOracleString(err->str, &dbsize);
err->type = (warning ? OCI_ERR_WARNING : OCI_ERR_ORACLE);
err->con = con;
err->stmt = stmt;
/* get oracle description */
OCIErrorGet((dvoid *) p_err, (ub4) 1, (OraText *) NULL, &err->sqlcode,
(OraText *) dbstr, (ub4) dbsize, (ub4) OCI_HTYPE_ERROR);
OCI_StringCopyOracleStringToNativeString(dbstr, err->str, dbcharcount(dbsize));
OCI_StringReleaseOracleString(dbstr);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionNotInitialized
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionNotInitialized
(
void
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_NOT_INITIALIZED;
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_NOT_INITIALIZED], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionLoadingShareLib
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionLoadingSharedLib
(
void
)
{
#ifdef OCI_IMPORT_RUNTIME
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_LOADING_SHARED_LIB;
osprintf(err->str, osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_LOADING_SHARED_LIB],
OCI_DL_META_NAME);
}
OCI_ExceptionRaise(err);
#endif
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionLoadingSymbols
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionLoadingSymbols
(
void
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_LOADING_SYMBOLS;
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_LOADING_SYMBOLS], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionNotMultithreaded
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionNotMultithreaded
(
void
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_MULTITHREADED;
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_MULTITHREADED], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionNullPointer
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionNullPointer
(
int type
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_NULL_POINTER;
osprintf(err->str, osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_NULL_POINTER],
OCILib_TypeNames[type+1]);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionMemory
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionMemory
(
int type,
size_t nb_bytes,
OCI_Connection *con,
OCI_Statement *stmt
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_MEMORY;
err->con = con;
err->stmt = stmt;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_MEMORY],
OCILib_TypeNames[type+1],
nb_bytes);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionNotAvailable
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionNotAvailable
(
OCI_Connection *con,
int feature
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_NOT_AVAILABLE;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_NOT_AVAILABLE],
OCILib_OraFeatures[feature-1]);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionDatatypeNotSupported
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionDatatypeNotSupported
(
OCI_Connection *con,
OCI_Statement *stmt,
int code
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_DATATYPE_NOT_SUPPORTED;
err->con = con;
err->stmt = stmt;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_DATATYPE_NOT_SUPPORTED],
code);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionParsingError
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionParsingToken
(
OCI_Connection *con,
OCI_Statement *stmt,
otext token
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_PARSE_TOKEN;
err->con = con;
err->stmt = stmt;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_PARSE_TOKEN],
token);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionMappingArgument
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionMappingArgument
(
OCI_Connection *con,
OCI_Statement *stmt,
int arg
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_MAP_ARGUMENT;
err->con = con;
err->stmt = stmt;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_MAP_ARGUMENT],
arg);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionOutOfBounds
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionOutOfBounds
(
OCI_Connection *con,
int value
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_OUT_OF_BOUNDS;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_OUT_OF_BOUNDS],
value);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionUnfreedData
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionUnfreedData
(
int type_elem,
int nb_elem
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_UNFREED_DATA;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_UNFREED_DATA],
nb_elem, OCILib_HandleNames[type_elem-1]);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionRuntimeLoading
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionMaxBind
(
OCI_Statement *stmt
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_MAX_BIND;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_MAX_BIND],
OCI_BIND_MAX);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionAttributeNotFound
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionAttributeNotFound
(
OCI_Connection *con,
const otext *attr
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_ATTR_NOT_FOUND;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_ATTR_NOT_FOUND],
attr);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionMinimumValue
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionMinimumValue
(
OCI_Connection *con,
OCI_Statement *stmt,
int min
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_MIN_VALUE;
err->con = con;
err->stmt = stmt;
osprintf(err->str, osizeof(err->str) - (size_t) 1, OCILib_ErrorMsg[OCI_ERR_MIN_VALUE], min);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionTypeNotCompatible
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionTypeNotCompatible
(
OCI_Connection *con
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_NOT_COMPATIBLE;
err->con = con;
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_NOT_COMPATIBLE], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionStatementState
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionStatementState
(
OCI_Statement *stmt,
int state
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
int i, index = 0;
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_STMT_STATE;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
for(i = 0; i < OCI_STMT_STATES_COUNT; i++)
{
if (state == OCILib_StmtStates[i].state)
{
index = i;
break;
}
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_STMT_STATE],
OCILib_StmtStates[index].name);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionStatementNotScrollable
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionStatementNotScrollable
(
OCI_Statement *stmt
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_STMT_NOT_SCROLLABLE;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_STMT_NOT_SCROLLABLE], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionBindAlreadyUsed
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionBindAlreadyUsed
(
OCI_Statement *stmt,
const otext * bind
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_BIND_ALREADY_USED;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_BIND_ALREADY_USED],
bind);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionBindArraySize
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionBindArraySize
(
OCI_Statement *stmt,
unsigned int maxsize,
unsigned int cursize,
unsigned int newsize
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_BIND_ARRAY_SIZE;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_BIND_ARRAY_SIZE],
maxsize, cursize, newsize);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionDirPathColNotFound
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionDirPathColNotFound
(
OCI_DirPath *dp,
const otext * column,
const otext *table
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_DIRPATH_STATE;
err->stmt = NULL;
if (dp)
{
err->con = dp->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_COLUMN_NOT_FOUND],
column,
table);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionDirPathState
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionDirPathState
(
OCI_DirPath *dp,
int state
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_DIRPATH_STATE;
err->stmt = NULL;
if (dp)
{
err->con = dp->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_DIRPATH_STATE],
OCILib_DirPathStates[state-1]);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionOCIEnvironment
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionOCIEnvironment
(
void
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_CREATE_OCI_ENVIRONMENT;
ostrncat(err->str, OCILib_ErrorMsg[OCI_ERR_CREATE_OCI_ENVIRONMENT], osizeof(err->str) - (size_t) 1);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionRebindBadDatatype
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionRebindBadDatatype
(
OCI_Statement *stmt,
const otext * bind
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_REBIND_BAD_DATATYPE;
err->stmt = stmt;
if (stmt)
{
err->con = stmt->con;
}
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_REBIND_BAD_DATATYPE],
bind);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionTypeInfoWrongType
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionTypeInfoWrongType
(
OCI_Connection *con,
const otext * name
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_TYPEINFO_DATATYPE;
err->stmt = NULL;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t) 1,
OCILib_ErrorMsg[OCI_ERR_TYPEINFO_DATATYPE],
name);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionItemNotFound
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionItemNotFound
(
OCI_Connection *con,
OCI_Statement *stmt,
const otext *name,
unsigned int type
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_ITEM_NOT_FOUND;
err->stmt = stmt;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t)1,
OCILib_ErrorMsg[OCI_ERR_ITEM_NOT_FOUND],
name, type);
}
OCI_ExceptionRaise(err);
}
/* --------------------------------------------------------------------------------------------- *
* OCI_ExceptionArgInvalidValue
* --------------------------------------------------------------------------------------------- */
void OCI_ExceptionArgInvalidValue
(
OCI_Connection *con,
OCI_Statement *stmt,
const otext *name,
unsigned int value
)
{
OCI_Error *err = OCI_ExceptionGetError();
if (err)
{
err->type = OCI_ERR_OCILIB;
err->libcode = OCI_ERR_ITEM_NOT_FOUND;
err->stmt = stmt;
err->con = con;
osprintf(err->str,
osizeof(err->str) - (size_t)1,
OCILib_ErrorMsg[OCI_ERR_ARG_INVALID_VALUE],
name, value);
}
OCI_ExceptionRaise(err);
}
| Java |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/lookoutequipment/LookoutEquipment_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace LookoutEquipment
{
namespace Model
{
/**
* <p> Specifies configuration information for the input data for the inference,
* including input data S3 location. </p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/lookoutequipment-2020-12-15/InferenceS3InputConfiguration">AWS
* API Reference</a></p>
*/
class AWS_LOOKOUTEQUIPMENT_API InferenceS3InputConfiguration
{
public:
InferenceS3InputConfiguration();
InferenceS3InputConfiguration(Aws::Utils::Json::JsonView jsonValue);
InferenceS3InputConfiguration& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline const Aws::String& GetBucket() const{ return m_bucket; }
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline bool BucketHasBeenSet() const { return m_bucketHasBeenSet; }
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline void SetBucket(const Aws::String& value) { m_bucketHasBeenSet = true; m_bucket = value; }
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline void SetBucket(Aws::String&& value) { m_bucketHasBeenSet = true; m_bucket = std::move(value); }
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline void SetBucket(const char* value) { m_bucketHasBeenSet = true; m_bucket.assign(value); }
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithBucket(const Aws::String& value) { SetBucket(value); return *this;}
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithBucket(Aws::String&& value) { SetBucket(std::move(value)); return *this;}
/**
* <p>The bucket containing the input dataset for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithBucket(const char* value) { SetBucket(value); return *this;}
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline const Aws::String& GetPrefix() const{ return m_prefix; }
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline bool PrefixHasBeenSet() const { return m_prefixHasBeenSet; }
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline void SetPrefix(const Aws::String& value) { m_prefixHasBeenSet = true; m_prefix = value; }
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline void SetPrefix(Aws::String&& value) { m_prefixHasBeenSet = true; m_prefix = std::move(value); }
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline void SetPrefix(const char* value) { m_prefixHasBeenSet = true; m_prefix.assign(value); }
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithPrefix(const Aws::String& value) { SetPrefix(value); return *this;}
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithPrefix(Aws::String&& value) { SetPrefix(std::move(value)); return *this;}
/**
* <p>The prefix for the S3 bucket used for the input data for the inference. </p>
*/
inline InferenceS3InputConfiguration& WithPrefix(const char* value) { SetPrefix(value); return *this;}
private:
Aws::String m_bucket;
bool m_bucketHasBeenSet;
Aws::String m_prefix;
bool m_prefixHasBeenSet;
};
} // namespace Model
} // namespace LookoutEquipment
} // namespace Aws
| Java |
/**
* Copyright (C) 2013-2016 The Rythm Engine project
* for LICENSE and other details see:
* https://github.com/rythmengine/rythmengine
*/
package org.rythmengine.cache;
/*-
* #%L
* Rythm Template Engine
* %%
* Copyright (C) 2017 - 2021 OSGL (Open Source General Library)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.rythmengine.extension.ICacheService;
import org.rythmengine.extension.ICacheServiceFactory;
/**
* Created with IntelliJ IDEA.
* User: luog
* Date: 2/12/13
* Time: 8:45 AM
* To change this template use File | Settings | File Templates.
*/
class EhCacheServiceFactory implements ICacheServiceFactory {
@Override
public ICacheService get() {
return EhCacheService.INSTANCE;
}
}
| Java |
/**
* Licensed to the Austrian Association for Software Tool Integration (AASTI)
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. The AASTI licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.openengsb.core.api.security;
/**
* Serves as baseclass for all Security-related Exceptions in the OpenEngSB (similar to
* {@link java.security.GeneralSecurityException}
*
*/
public abstract class OpenEngSBSecurityException extends Exception {
private static final long serialVersionUID = -2939758040088724227L;
public OpenEngSBSecurityException() {
}
public OpenEngSBSecurityException(String message, Throwable cause) {
super(message, cause);
}
public OpenEngSBSecurityException(String message) {
super(message);
}
public OpenEngSBSecurityException(Throwable cause) {
super(cause);
}
}
| Java |
package org.andidev.applicationname.format.custom;
import java.util.Locale;
import org.andidev.applicationname.format.annotation.CustomFormat;
import org.apache.commons.lang3.StringUtils;
import org.springframework.expression.EvaluationContext;
import org.springframework.expression.ExpressionParser;
import org.springframework.expression.spel.SpelParseException;
import org.springframework.expression.spel.standard.SpelExpressionParser;
import org.springframework.format.Printer;
public class CustomPrinter implements Printer<Object> {
private final String spelExpression;
private final EvaluationContext evaluationContext;
public CustomPrinter(String spelExpression, EvaluationContext evaluationContext) {
this.spelExpression = StringUtils.defaultIfBlank(spelExpression, null);
this.evaluationContext = evaluationContext;
}
@Override
public String print(Object object, Locale locale) {
if (spelExpression == null) {
return null;
}
ExpressionParser parser = new SpelExpressionParser();
try {
Object result = parser.parseExpression(spelExpression).getValue(evaluationContext, object);
return result.toString();
} catch (SpelParseException e) {
throw new CustomFormatException("Could not parse spel expression = \"" + spelExpression + "\" in " + CustomFormat.class.getSimpleName() + " annotation: " + e.getMessage());
}
}
}
| Java |
package com.capitalone.dashboard.model;
import com.capitalone.dashboard.util.FeatureCollectorConstants;
import org.springframework.stereotype.Component;
/**
* Collector implementation for Feature that stores system configuration
* settings required for source system data connection (e.g., API tokens, etc.)
*/
@Component
public class TestResultCollector extends Collector {
/**
* Creates a static prototype of the Feature Collector, which includes any
* specific settings or configuration required for the use of this
* collector, including settings for connecting to any source systems.
*
* @return A configured TestResult Collector prototype
*/
public static TestResultCollector prototype() {
TestResultCollector protoType = new TestResultCollector();
protoType.setName(FeatureCollectorConstants.JIRA_XRAY);
protoType.setOnline(true);
protoType.setEnabled(true);
protoType.setCollectorType(CollectorType.Test);
protoType.setLastExecuted(System.currentTimeMillis());
return protoType;
}
} | Java |
/*
* Copyright 2015 - 2017 Atlarge Research Team,
* operating at Technische Universiteit Delft
* and Vrije Universiteit Amsterdam, the Netherlands.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package science.atlarge.granula.modeller.rule.derivation.time;
import science.atlarge.granula.modeller.platform.info.BasicInfo;
import science.atlarge.granula.modeller.platform.info.InfoSource;
import science.atlarge.granula.modeller.platform.operation.Operation;
import science.atlarge.granula.modeller.rule.derivation.DerivationRule;
import science.atlarge.granula.modeller.platform.info.Info;
import science.atlarge.granula.modeller.platform.info.Source;
import java.util.ArrayList;
import java.util.List;
public class ParentalEndTimeDerivation extends DerivationRule {
public ParentalEndTimeDerivation(int level) {
super(level);
}
@Override
public boolean execute() {
Operation operation = (Operation) entity;
Operation parent = operation.getParent();
Info sourceInfo = parent.getInfo("EndTime");
long endTime = Long.parseLong(sourceInfo.getValue());
BasicInfo info = new BasicInfo("EndTime");
List<Source> sources = new ArrayList<>();
sources.add(new InfoSource("ParentalEndTime", sourceInfo));
info.setDescription("The [EndTime] of an (abstract) operation is derived from the largest value of [FilialEndTimes], which are [EndTime]s of all child operations.");
info.addInfo(String.valueOf(endTime), sources);
operation.addInfo(info);
return true;
}
}
| Java |
<!DOCTYPE html>
<html lang="en-us">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8"/>
<meta name="viewport" content="width=device-width, initial-scale=1, maximum-scale=1.0, user-scalable=no"/>
<title>Generates</title>
<link href="//fonts.googleapis.com/icon?family=Material+Icons" rel="stylesheet">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/css/materialize.min.css" type="text/css" media="screen,projection">
<link href="//goswagger.io//css/style.css" type="text/css" rel="stylesheet" media="screen,projection"/>
<link rel="stylesheet" href="//goswagger.io//css/prism.css" />
<style type="text/css">
footer.page-footer{background-image: url(//goswagger.io//images/default.png);}
</style>
</head>
<body>
<ul id="slide-out" class="side-nav">
<li><a href="//goswagger.io/"><i class="mdi-action-home left"></i>Home</a></li>
<li><a href="//goswagger.io//generate/spec/"><i class="mdi-action-loyalty left"></i>Swagger.json</a></li>
<li><a href="//goswagger.io//generate/client/"><i class="mdi-action-polymer left"></i>Generate Client</a></li>
<li><a href="//goswagger.io//generate/server/"><i class="mdi-action-perm-data-setting left"></i>Generate Server</a></li>
<li><a href="//goswagger.io//usage/validate/"><i class="mdi-action-perm-media left"></i>Validate Spec</a></li>
</ul>
<div id="index-banner" class="parallax-container">
<a data-activates="slide-out" class="btn-floating button-collapse" style="top: 5px; left: 5px;"><i class="mdi-navigation-menu"></i></a>
<div class="section no-pad-bot">
<div class="container">
<h1 class="header center teal-text text-lighten-2"><a href="//goswagger.io/">go-swagger toolkit</a></h1>
<div class="row center">
<h5 class="header col s12 light">Swagger 2.0 describes your API's for you, so you don't have to</h5>
</div>
<div class="row center">
<a href="https://github.com/go-swagger/go-swagger"><img src="//goswagger.io//images/github2-dreamstale35.png"></a>
</div>
</div>
</div>
<div class="parallax">
<img src="//goswagger.io//images/default.png">
</div>
</div>
<div class="container">
<div class="section">
<div class="row">
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/">swagger.json generation</a></h4>
<p>
<a href="//goswagger.io//categories/generate/">generate</a>
</p>
<p><p>The toolkit has a command that will let you generate a swagger spec document from your code.
The command integrates with go doc comments, and makes use of structs when it needs to know of
types.</p>
</p>
<p>
9 Nov 2015
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/client/">Generate an API client</a></h4>
<p>
<a href="//goswagger.io//categories/generate/">generate</a>
<a href="//goswagger.io//categories/client/">client</a>
</p>
<p><p>The toolkit has a command that will let you generate a client.</p>
</p>
<p>
23 Oct 2015
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/server/">Generate a server for a swagger spec</a></h4>
<p>
<a href="//goswagger.io//categories/generate/">generate</a>
<a href="//goswagger.io//categories/server/">server</a>
</p>
<p><p>The toolkit has a command that will let you generate a docker friendly server with support for TLS.
You can configure it through environment variables that are commonly used on PaaS services.</p>
</p>
<p>
22 Nov 2015
</p>
</div>
</div>
</div>
<div class="row">
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/meta/">swagger:meta</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p>The swagger:meta annotation flags a file as source for metadata about the API. This is typically a doc.go file with your package documentation. You can specify a Consumes and Produces key which has a new content type on each line Schemes is a tag that is required and allows for a comma separated string composed of: http, https, ws or wss Host and BasePath can be specified but those values will be defaults, they should get substituted when serving the swagger spec.</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/meta-data/">#meta data</a>
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/route/">swagger:route</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p><p>A <strong>swagger:route</strong> annotation links a path to a method.
This operation gets a unique id, which is used in various places as method name.
One such usage is in method names for client generation for example.</p>
<p>Because there are many routers available, this tool does not try to parse the paths
you provided to your routing library of choice. So you have to specify your path pattern
yourself in valid swagger syntax.</p>
</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/operations/">#operations</a>
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/params/">swagger:params</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p><p>The <strong>swagger:params</strong> annotation links a struct to one or more operations. The params in the resulting swagger spec can be composed of several structs.
There are no guarantees given on how property name overlaps are resolved when several structs apply to the same operation.
This tag works very similar to the swagger:model tag except that it produces valid parameter objects instead of schema
objects.</p>
</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/operations/">#operations</a>
</p>
</div>
</div>
</div>
<div class="row">
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/response/">swagger:response</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p><p>Reads a struct decorated with <strong>swagger:response</strong> and uses that information to fill up the headers and the schema for a response.
A swagger:route can specify a response name for a status code and then the matching response will be used for that operation in the swagger definition.</p>
</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/operations/">#operations</a>
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/model/">swagger:model</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p><p>A <strong>swagger:model</strong> annotation optionally gets a model name as extra data on the line.
when this appears anywhere in a comment for a struct, then that struct becomes a schema
in the definitions object of swagger.</p>
</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/definitions/">#definitions</a>
</p>
</div>
</div>
<div class="col m4 s12">
<div class="card-panel">
<h4><a href="//goswagger.io/generate/spec/allOf/">swagger:allOf</a></h4>
<p>
<a href="//goswagger.io//categories/spec/">spec</a>
</p>
<p><p>Marks an embedded type as a member for allOf</p>
</p>
<p>
14 Nov 2015
<a href="//goswagger.io//tags/polymorphism/">#polymorphism</a>
</p>
</div>
</div>
</div>
<div class="row">
<div class="col s3 m1">
<a class="btn-floating btn-large disabled"><i class="mdi-navigation-arrow-back"></i></a>
</div>
<div class="col s6 m10 center"> </div>
<div class="col s3 m1">
<a class="btn-floating btn-large waves-effect waves-light" href="/generate/page/2/"><i class="mdi-navigation-arrow-forward"></i></a>
</div>
</div>
</div>
</div>
<footer class="page-footer">
<div class="footer-copyright">
<div class="container">
© 2015 go-swagger contributors
<div class="right">Design <a class="grey-text text-lighten-4" href="http://pdevty.github.io/blog/">pdevty</a></div>
</div>
</div>
</footer>
<script src="//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.4/jquery.min.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/materialize/0.97.3/js/materialize.min.js"></script>
<script src="//goswagger.io//js/init.js"></script>
<script src="//cdnjs.cloudflare.com/ajax/libs/prism/0.0.1/prism.min.js"></script>
</body>
</html>
| Java |
/*
* Copyright (C) 2015 Apptik Project
* Copyright (C) 2014 Kalin Maldzhanski
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apptik.comm.jus.error;
import io.apptik.comm.jus.NetworkResponse;
/**
* Error indicating that there was an authentication failure when performing a Request.
*/
@SuppressWarnings("serial")
public class AuthError extends RequestError {
public AuthError(NetworkResponse response) {
super(response);
}
public AuthError(NetworkResponse response, String exceptionMessage) {
super(response, exceptionMessage);
}
public AuthError(NetworkResponse response, String exceptionMessage, Throwable reason) {
super(response, exceptionMessage, reason);
}
public AuthError(NetworkResponse response, Throwable reason) {
super(response, reason);
}
}
| Java |
/*!
* UI development toolkit for HTML5 (OpenUI5)
* (c) Copyright 2009-2016 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides control sap.m.PageAccessibleLandmarkInfo.
sap.ui.define(['sap/ui/core/Element', './library'],
function(Element, library) {
"use strict";
/**
* Constructor for a new <code>sap.m.PageAccessibleLandmarkInfo</code> element.
*
* @param {string} [sId] Id for the new element, generated automatically if no id is given
* @param {object} [mSettings] Initial settings for the new element
*
* @class
* Settings for accessible landmarks which can be applied to the container elements of a <code>sap.m.Page</code> control.
* These landmarks are e.g. used by assistive technologies (like screenreaders) to provide a meaningful page overview.
* @extends sap.ui.core.Element
*
* @author SAP SE
* @version 1.42.8
*
* @constructor
* @public
* @alias sap.m.PageAccessibleLandmarkInfo
* @ui5-metamodel This control/element also will be described in the UI5 (legacy) designtime metamodel
*/
var PageAccessibleLandmarkInfo = Element.extend("sap.m.PageAccessibleLandmarkInfo", /** @lends sap.m.PageAccessibleLandmarkInfo.prototype */ { metadata : {
library : "sap.m",
properties : {
/**
* Landmark role of the root container of the corresponding <code>sap.m.Page</code> control.
*
* If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container.
*/
rootRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"},
/**
* Texts which describes the landmark of the root container of the corresponding <code>sap.m.Page</code> control.
*
* If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text
* is used.
*/
rootLabel : {type : "string", defaultValue : null},
/**
* Landmark role of the content container of the corresponding <code>sap.m.Page</code> control.
*
* If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container.
*/
contentRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Main"},
/**
* Texts which describes the landmark of the content container of the corresponding <code>sap.m.Page</code> control.
*
* If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text
* is used.
*/
contentLabel : {type : "string", defaultValue : null},
/**
* Landmark role of the header container of the corresponding <code>sap.m.Page</code> control.
*
* If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container.
*/
headerRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"},
/**
* Texts which describes the landmark of the header container of the corresponding <code>sap.m.Page</code> control.
*
* If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text
* is used.
*/
headerLabel : {type : "string", defaultValue : null},
/**
* Landmark role of the subheader container of the corresponding <code>sap.m.Page</code> control.
*
* If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container.
*/
subHeaderRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : null},
/**
* Texts which describes the landmark of the subheader container of the corresponding <code>sap.m.Page</code> control.
*
* If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text
* is used.
*/
subHeaderLabel : {type : "string", defaultValue : null},
/**
* Landmark role of the footer container of the corresponding <code>sap.m.Page</code> control.
*
* If set to <code>sap.ui.core.AccessibleLandmarkRole.None</code>, no landmark will be added to the container.
*/
footerRole : {type : "sap.ui.core.AccessibleLandmarkRole", defaultValue : "Region"},
/**
* Texts which describes the landmark of the header container of the corresponding <code>sap.m.Page</code> control.
*
* If not set (and a landmark different than <code>sap.ui.core.AccessibleLandmarkRole.None</code> is defined), a predefined text
* is used.
*/
footerLabel : {type : "string", defaultValue : null}
}
}});
/**
* Returns the landmark information of the given <code>sap.m.PageAccessibleLandmarkInfo</code> instance
* of the given area (e.g. <code>"root"</code>).
*
* Must only be used with the <code>sap.m.Page</code> control!
*
* @private
*/
PageAccessibleLandmarkInfo._getLandmarkInfo = function(oInstance, sArea) {
if (!oInstance) {
return null;
}
var sRole = null;
var sText = null;
var oPropertyInfo = oInstance.getMetadata().getProperty(sArea + "Role");
if (oPropertyInfo) {
sRole = oInstance[oPropertyInfo._sGetter]();
}
if (!sRole) {
return null;
}
oPropertyInfo = oInstance.getMetadata().getProperty(sArea + "Label");
if (oPropertyInfo) {
sText = oInstance[oPropertyInfo._sGetter]();
}
return [sRole.toLowerCase(), sText];
};
/**
* Writes the landmark information of the given page and area (e.g. <code>"root"</code>).
*
* Must only be used with the <code>sap.m.Page</code> control!
*
* @private
*/
PageAccessibleLandmarkInfo._writeLandmarkInfo = function(oRm, oPage, sArea) {
if (!sap.ui.getCore().getConfiguration().getAccessibility()) {
return;
}
var oInfo = PageAccessibleLandmarkInfo._getLandmarkInfo(oPage.getLandmarkInfo(), sArea);
if (!oInfo) {
return;
}
var oLandMarks = {
role: oInfo[0]
};
if (oInfo[1]) {
oLandMarks["label"] = oInfo[1];
}
oRm.writeAccessibilityState(oPage, oLandMarks);
};
return PageAccessibleLandmarkInfo;
});
| Java |
#
# Author:: Sean OMeara (<[email protected]>)
# Cookbook Name:: selinux
# Recipe:: permissive
#
# Copyright 2011, Opscode, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
execute "enable selinux as permissive" do
not_if "getenforce | egrep -qx 'Permissive|Disabled'"
command "setenforce 0"
ignore_failure true
action :run
end
template "/etc/selinux/config" do
source "sysconfig/selinux.erb"
not_if "getenforce | grep -qx 'Disabled'"
variables(
:selinux => "permissive",
:selinuxtype => "targeted",
)
end
| Java |
/**
* File: app/project/ProjOpen.js
* Author: liusha
*/
Ext.define('xdfn.project.ProjOpen', {
extend: 'xdfn.project.ui.ProjOpen',
grid: null,
initComponent: function() {
var me = this;
me.openStore = Ext.create('xdfn.project.store.ProjOpenJsonStore');
me.rowEditing = Ext.create('Ext.grid.plugin.RowEditing', {
errorSummary: false
});
me.callParent(arguments);
me.down('button[text="增加记录"]').on('click', me.OnAddProjOpenBtnClick, me);
me.down('button[text="删除记录"]').on('click', me.OnDeleteProjOpenBtnClick, me);
me.down('button[text="导出"]').on('click', me.OnExportProjOpenBtnClick, me);
me.rowEditing.on('edit', me.OnGridEdit, me);
me.rowEditing.on('beforeedit', me.OnGridBeforeEdit, me);
},
OnGridBeforeEdit: function(editor, e, epts) {
xdfn.user.Rights.noRights('XMGL-XMZL-31', function() {
editor.cancelEdit();
});
},
OnGridEdit: function(editor, e) {
var me = this;
if (!e.record.dirty) return;
var url = './proExec.do?method=modifyKbjl';
if (Ext.isEmpty(e.record.get('ID_VIEW'))) {
var rows = me.grid.getSelectionModel().getSelection();
e.record.set('ID_VIEW', rows[0].get('ID_VIEW'));
url = './proExec.do?method=addProKbjl';
}
e.record.commit();
Ext.Ajax.request({
url: url,
method: 'post',
params: {
ID: e.record.get('ID_VIEW'),
V_MANU: e.record.get('V_MANU_VIEW'),
V_MACHINE: e.record.get('V_MACHINE_VIEW'),
N_CAP: e.record.get('N_CAP_VIEW'),
N_SUM_NUM: e.record.get('N_SUM_NUM_VIEW'),
N_SUM_MONEY: e.record.get('N_SUM_MONEY_VIEW'),
V_MEMO: e.record.get('V_MEMO_VIEW')
},
success: function(response, opts) {
var result = Ext.JSON.decode(response.responseText); //服务端返回新建ID
e.record.set(result.data);
e.record.commit();
},
failure: function(response, opts) {
Ext.Msg.alert('提示','提交失败!');
}
});
},
OnAddProjOpenBtnClick: function(self, e, options) {
var me = this,
sm = me.grid.getSelectionModel(),
rows = sm.getSelection();
xdfn.user.Rights.hasRights('XMGL-XMZL-30', function() {
if (rows.length > 0) {
me.rowEditing.cancelEdit();
me.openStore.insert(0, {});
me.rowEditing.startEdit(0, 0);
} else {
Ext.Msg.alert('提示','请先选择相应的项目!');
}
});
},
OnDeleteProjOpenBtnClick: function(self, e, options) {
var me = this,
grid = self.up('gridpanel'),
store = grid.getStore(),
sm = grid.getSelectionModel(),
rows = sm.getSelection();
xdfn.user.Rights.hasRights('XMGL-XMZL-32', function() {
if (rows.length > 0) {
if (Ext.isEmpty(rows[0].get('ID_VIEW'))) {
me.rowEditing.cancelEdit();
var i = store.indexOf(rows[0]);
store.remove(rows);
var count = store.getCount();
if (count > 0) {
sm.select((i == count)? --i : i);
}
return;
}
Ext.MessageBox.confirm('提示', '确定删除该记录吗?', function(id) {
if (id == 'yes') {
//TODO 删除记录
Ext.Ajax.request({
url: './proExec.do?method=deleteKbjl', //改为实际的删除请求url
method: 'get',
params: {
ID: rows[0].get('ID_VIEW')
},
success: function(response, opts) {
me.rowEditing.cancelEdit();
var i = store.indexOf(rows[0]);
store.remove(rows);
var count = store.getCount();
if (count > 0) {
sm.select((i == count)? --i : i);
}
},
failure: function(response, opts) {
Ext.Msg.alert('提示','删除失败!');
}
});
}
});
} else {
Ext.Msg.alert('提示','请选择要删除的记录!');
}
});
},
OnExportProjOpenBtnClick: function(self, e, options) {
var me = this;
//导出为excel文件
xdfn.user.Rights.hasRights('XMGL-XMZL-33', function() {
me.openStore.load({
limit: me.openStore.getTotalCount(),
scope: this,
callback: function(records, operation, success) {
var excelXml = Ext.ux.exporter.Exporter.exportGrid(self.up('gridpanel'), 'excel', {title: '项目开标记录'});
document.location = 'data:application/vnd.ms-excel;base64,' + Ext.ux.exporter.Base64.encode(excelXml);
}
});
});
}
}); | Java |
/*
* Copyright 2006-2008 Kazuyuki Shudo.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package dhtaccess.tools;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.util.Properties;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser;
import dhtaccess.core.DHTAccessor;
public class Remove {
private static final String COMMAND = "rm";
private static final String ENCODE = "UTF-8";
private static final String DEFAULT_GATEWAY = "http://opendht.nyuld.net:5851/";
private static void usage(String command) {
System.out.println("usage: " + command
+ " [-h] [-g <gateway>] [-t <ttl (sec)>] <key> <value> <secret>");
}
public static void main(String[] args) {
int ttl = 3600;
// parse properties
Properties prop = System.getProperties();
String gateway = prop.getProperty("dhtaccess.gateway");
if (gateway == null || gateway.length() <= 0) {
gateway = DEFAULT_GATEWAY;
}
// parse options
Options options = new Options();
options.addOption("h", "help", false, "print help");
options.addOption("g", "gateway", true, "gateway URI, list at http://opendht.org/servers.txt");
options.addOption("t", "ttl", true, "how long (in seconds) to store the value");
CommandLineParser parser = new PosixParser();
CommandLine cmd = null;
try {
cmd = parser.parse(options, args);
} catch (ParseException e) {
System.out.println("There is an invalid option.");
e.printStackTrace();
System.exit(1);
}
String optVal;
if (cmd.hasOption('h')) {
usage(COMMAND);
System.exit(1);
}
optVal = cmd.getOptionValue('g');
if (optVal != null) {
gateway = optVal;
}
optVal = cmd.getOptionValue('t');
if (optVal != null) {
ttl = Integer.parseInt(optVal);
}
args = cmd.getArgs();
// parse arguments
if (args.length < 3) {
usage(COMMAND);
System.exit(1);
}
byte[] key = null, value = null, secret = null;
try {
key = args[0].getBytes(ENCODE);
value = args[1].getBytes(ENCODE);
secret = args[2].getBytes(ENCODE);
} catch (UnsupportedEncodingException e1) {
// NOTREACHED
}
// prepare for RPC
DHTAccessor accessor = null;
try {
accessor = new DHTAccessor(gateway);
} catch (MalformedURLException e) {
e.printStackTrace();
System.exit(1);
}
// RPC
int res = accessor.remove(key, value, ttl, secret);
String resultString;
switch (res) {
case 0:
resultString = "Success";
break;
case 1:
resultString = "Capacity";
break;
case 2:
resultString = "Again";
break;
default:
resultString = "???";
}
System.out.println(resultString);
}
}
| Java |
{% extends "partials/layout.html" %}
{% block body %}
<div class="ui container">
<div class="ui padded segment raised">
<h1>About Me</h1>
<p>I am a software developer with an interest in artificial intelligence and machine
learning.</p>
<p>I try to write some interesting things, be it software or articles. One day I hope to manage it.</p>
</div>
</div>
{% endblock %}
| Java |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.flex.compiler.problems;
import org.apache.flex.compiler.tree.as.IASNode;
/**
* Diagnostic emitted when the code generator detects
* a definition that conflicts with an inherited definition
* from a superclass.
*/
public final class ConflictingInheritedNameInNamespaceProblem extends SemanticProblem
{
public static final String DESCRIPTION =
"A conflict exists with inherited definition ${declName} in namespace ${nsName}.";
public static final int errorCode = 1152;
public ConflictingInheritedNameInNamespaceProblem(IASNode site, String declName, String nsName)
{
super(site);
this.declName = declName;
this.nsName = nsName;
}
public final String declName;
public final String nsName;
}
| Java |
// jQueryTemplate.cs
// Script#/Libraries/jQuery/Templating
// This source code is subject to terms and conditions of the Apache License, Version 2.0.
//
using System;
using System.Collections;
using System.Html;
using System.Net;
using System.Runtime.CompilerServices;
using jQueryApi;
namespace jQueryApi.Templating {
/// <summary>
/// Represents a jQuery template that has been parsed and can
/// be used to generate HTML.
/// </summary>
[ScriptIgnoreNamespace]
[ScriptImport]
public sealed class jQueryTemplate {
private jQueryTemplate() {
}
}
}
| Java |
/*
* Copyright 2018 The Kubeflow Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import * as React from 'react';
import Hr from './Hr';
import { create } from 'react-test-renderer';
describe('Hr', () => {
it('renders with the right styles', () => {
const tree = create(<Hr fields={[]} />);
expect(tree).toMatchSnapshot();
});
});
| Java |
package org.elasticsearch.painless;
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/** Currently just a dummy class for testing a few features not yet exposed by whitelist! */
public class FeatureTestObject2 {
public FeatureTestObject2() {super();}
public static int staticNumberArgument(int injected, int userArgument) {
return injected * userArgument;
}
public static int staticNumberArgument2(int userArgument1, int userArgument2) {
return userArgument1 * userArgument2;
}
}
| Java |
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/*
* Content policy implementation that prevents all loads of images,
* subframes, etc from protocols that don't return data but rather open
* applications (such as mailto).
*/
#include "nsNoDataProtocolContentPolicy.h"
#include "nsIDOMWindow.h"
#include "nsString.h"
#include "nsIProtocolHandler.h"
#include "nsIIOService.h"
#include "nsIExternalProtocolHandler.h"
#include "nsNetUtil.h"
NS_IMPL_ISUPPORTS1(nsNoDataProtocolContentPolicy, nsIContentPolicy)
NS_IMETHODIMP
nsNoDataProtocolContentPolicy::ShouldLoad(uint32_t aContentType,
nsIURI *aContentLocation,
nsIURI *aRequestingLocation,
nsISupports *aRequestingContext,
const nsACString &aMimeGuess,
nsISupports *aExtra,
nsIPrincipal *aRequestPrincipal,
int16_t *aDecision)
{
*aDecision = nsIContentPolicy::ACCEPT;
// Don't block for TYPE_OBJECT since such URIs are sometimes loaded by the
// plugin, so they don't necessarily open external apps
// TYPE_WEBSOCKET loads can only go to ws:// or wss://, so we don't need to
// concern ourselves with them.
if (aContentType != TYPE_DOCUMENT &&
aContentType != TYPE_SUBDOCUMENT &&
aContentType != TYPE_OBJECT &&
aContentType != TYPE_WEBSOCKET) {
// The following are just quick-escapes for the most common cases
// where we would allow the content to be loaded anyway.
nsAutoCString scheme;
aContentLocation->GetScheme(scheme);
if (scheme.EqualsLiteral("http") ||
scheme.EqualsLiteral("https") ||
scheme.EqualsLiteral("ftp") ||
scheme.EqualsLiteral("file") ||
scheme.EqualsLiteral("chrome")) {
return NS_OK;
}
bool shouldBlock;
nsresult rv = NS_URIChainHasFlags(aContentLocation,
nsIProtocolHandler::URI_DOES_NOT_RETURN_DATA,
&shouldBlock);
if (NS_SUCCEEDED(rv) && shouldBlock) {
*aDecision = nsIContentPolicy::REJECT_REQUEST;
}
}
return NS_OK;
}
NS_IMETHODIMP
nsNoDataProtocolContentPolicy::ShouldProcess(uint32_t aContentType,
nsIURI *aContentLocation,
nsIURI *aRequestingLocation,
nsISupports *aRequestingContext,
const nsACString &aMimeGuess,
nsISupports *aExtra,
nsIPrincipal *aRequestPrincipal,
int16_t *aDecision)
{
return ShouldLoad(aContentType, aContentLocation, aRequestingLocation,
aRequestingContext, aMimeGuess, aExtra, aRequestPrincipal,
aDecision);
}
| Java |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.rest.provider;
import org.jboss.pnc.model.ProductMilestone;
import org.jboss.pnc.model.ProductMilestoneRelease;
import org.jboss.pnc.rest.restmodel.ProductMilestoneReleaseRest;
import org.jboss.pnc.spi.datastore.repositories.PageInfoProducer;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneReleaseRepository;
import org.jboss.pnc.spi.datastore.repositories.ProductMilestoneRepository;
import org.jboss.pnc.spi.datastore.repositories.SortInfoProducer;
import org.jboss.pnc.spi.datastore.repositories.api.RSQLPredicateProducer;
import javax.ejb.Stateless;
import javax.inject.Inject;
import java.util.function.Function;
@Stateless
public class ProductMilestoneReleaseProvider extends AbstractProvider<ProductMilestoneRelease, ProductMilestoneReleaseRest> {
private ProductMilestoneRepository milestoneRepository;
private ProductMilestoneReleaseRepository releaseRepository;
@Inject
public ProductMilestoneReleaseProvider(ProductMilestoneReleaseRepository releaseRepository,
ProductMilestoneRepository milestoneRepository,
RSQLPredicateProducer rsqlPredicateProducer,
SortInfoProducer sortInfoProducer, PageInfoProducer pageInfoProducer) {
super(releaseRepository, rsqlPredicateProducer, sortInfoProducer, pageInfoProducer);
this.releaseRepository = releaseRepository;
this.milestoneRepository = milestoneRepository;
}
// needed for EJB/CDI
@Deprecated
public ProductMilestoneReleaseProvider() {
}
@Override
protected Function<? super ProductMilestoneRelease, ? extends ProductMilestoneReleaseRest> toRESTModel() {
return ProductMilestoneReleaseRest::new;
}
@Override
protected Function<? super ProductMilestoneReleaseRest, ? extends ProductMilestoneRelease> toDBModel() {
throw new IllegalStateException("ProductMilestoneRelease entity is not to be created via REST");
}
public ProductMilestoneReleaseRest latestForMilestone(Integer milestoneId) {
ProductMilestone milestone = milestoneRepository.queryById(milestoneId);
ProductMilestoneRelease release = milestone == null ? null : releaseRepository.findLatestByMilestone(milestone);
return release == null ? null : toRESTModel().apply(release);
}
}
| Java |
# Release History
## 3.0.0 (2016-08-19)
- update `fkooman/rest` and `fkooman/http` dependencies
## 2.0.0 (2015-11-19)
- major API update for new `fkooman/rest-plugin-authentication`
## 1.0.1 (2015-09-07)
- remove `fkooman/cert-parser` dependency
## 1.0.0
- update `fkooman/rest` and use `fkooman/rest-plugin-authentication`
## 0.1.2
- update `fkooman/cert-parser`
## 0.1.1
- also support `REDIRECT_SSL_CLIENT_CERT` header
## 0.1.0
- initial release
| Java |
/**
* Copyright (c) 2017, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
define(['lodash', 'log', 'event_channel', './abstract-source-gen-visitor', './connector-action-visitor',
'./variable-declaration-visitor', './connector-declaration-visitor', './statement-visitor-factory'],
function(_, log, EventChannel, AbstractSourceGenVisitor, ConnectorActionVisitor,
VariableDeclarationVisitor, ConnectorDeclarationVisitor, StatementVisitorFactory) {
/**
* @param {ASTVisitor} parent - parent visitor
* @constructor
*/
var ConnectorDefinitionVisitor = function (parent) {
AbstractSourceGenVisitor.call(this, parent);
};
ConnectorDefinitionVisitor.prototype = Object.create(AbstractSourceGenVisitor.prototype);
ConnectorDefinitionVisitor.prototype.constructor = ConnectorDefinitionVisitor;
ConnectorDefinitionVisitor.prototype.canVisitConnectorDefinition = function(connectorDefinition){
return true;
};
/**
* Begin the visit and generate the source
* @param {ConnectorDefinition} connectorDefinition - Connector Definition
*/
ConnectorDefinitionVisitor.prototype.beginVisitConnectorDefinition = function(connectorDefinition){
/**
* set the configuration start for the connector definition language construct
* If we need to add additional parameters which are dynamically added to the configuration start
* that particular source generation has to be constructed here
*/
var self = this;
var argumentsSrc = "";
_.forEach(connectorDefinition.getAnnotations(), function(annotation) {
if (!_.isEmpty(annotation.value)) {
var constructedPathAnnotation;
if (annotation.key.indexOf(":") === -1) {
constructedPathAnnotation = '@' + annotation.key + '("' + annotation.value + '")\n';
} else {
constructedPathAnnotation = '@' + annotation.key.split(":")[0] + '(' + annotation.key.split(":")[1] +
' = "' + annotation.value + '")\n';
}
self.appendSource(constructedPathAnnotation);
}
});
_.forEach(connectorDefinition.getArguments(), function(argument, index){
argumentsSrc += argument.type + " ";
argumentsSrc += argument.identifier;
if (connectorDefinition.getArguments().length - 1 != index) {
argumentsSrc += ", ";
}
});
var constructedSourceSegment = 'connector ' + connectorDefinition.getConnectorName() +
' (' + argumentsSrc + ')' + ' {\n';
this.appendSource(constructedSourceSegment);
log.debug('Begin Visit Connector Definition');
};
ConnectorDefinitionVisitor.prototype.visitConnectorDefinition = function(connectorDefinition){
log.debug('Visit Connector Definition');
};
/**
* End visiting the connector definition
* @param {ConnectorDefinition} connectorDefinition - Connector Definition
*/
ConnectorDefinitionVisitor.prototype.endVisitConnectorDefinition = function(connectorDefinition){
this.appendSource("}\n");
this.getParent().appendSource(this.getGeneratedSource());
log.debug('End Visit Connector Definition');
};
/**
* Visit Connector Action
* @param {ConnectorAction} connectorAction
*/
ConnectorDefinitionVisitor.prototype.visitConnectorAction = function(connectorAction){
var connectorActionVisitor = new ConnectorActionVisitor(this);
connectorAction.accept(connectorActionVisitor);
};
/**
* Visit Connector Declaration
* @param {ConnectorDeclaration} connectorDeclaration
*/
ConnectorDefinitionVisitor.prototype.visitConnectorDeclaration = function(connectorDeclaration){
var connectorDeclarationVisitor = new ConnectorDeclarationVisitor(this);
connectorDeclaration.accept(connectorDeclarationVisitor);
};
/**
* Visit Variable Declaration
* @param {VariableDeclaration} variableDeclaration
*/
ConnectorDefinitionVisitor.prototype.visitVariableDeclaration = function(variableDeclaration){
var variableDeclarationVisitor = new VariableDeclarationVisitor(this);
variableDeclaration.accept(variableDeclarationVisitor);
};
/**
* Visit Statements
* @param {Statement} statement
*/
ConnectorDefinitionVisitor.prototype.visitStatement = function (statement) {
var statementVisitorFactory = new StatementVisitorFactory();
var statementVisitor = statementVisitorFactory.getStatementVisitor(statement, this);
statement.accept(statementVisitor);
};
return ConnectorDefinitionVisitor;
}); | Java |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.processor;
import java.io.InputStream;
import java.io.OutputStream;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.Processor;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.TestSupport;
import org.apache.camel.impl.DefaultCamelContext;
import org.apache.camel.impl.DefaultMessage;
import org.apache.camel.spi.DataFormat;
public class UnmarshalProcessorTest extends TestSupport {
public void testDataFormatReturnsSameExchange() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange));
processor.process(exchange);
assertEquals("UnmarshalProcessor did not copy OUT from IN message", "body", exchange.getOut().getBody());
}
public void testDataFormatReturnsAnotherExchange() throws Exception {
CamelContext context = new DefaultCamelContext();
Exchange exchange = createExchangeWithBody(context, "body");
Exchange exchange2 = createExchangeWithBody(context, "body2");
Processor processor = new UnmarshalProcessor(new MyDataFormat(exchange2));
try {
processor.process(exchange);
fail("Should have thrown exception");
} catch (RuntimeCamelException e) {
assertEquals("The returned exchange " + exchange2 + " is not the same as " + exchange + " provided to the DataFormat", e.getMessage());
}
}
public void testDataFormatReturnsMessage() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Message out = new DefaultMessage();
out.setBody(new Object());
Processor processor = new UnmarshalProcessor(new MyDataFormat(out));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned OUT message", out, exchange.getOut());
assertSame("UnmarshalProcessor did change the body bound to the OUT message", out.getBody(), exchange.getOut().getBody());
}
public void testDataFormatReturnsBody() throws Exception {
Exchange exchange = createExchangeWithBody(new DefaultCamelContext(), "body");
Object unmarshalled = new Object();
Processor processor = new UnmarshalProcessor(new MyDataFormat(unmarshalled));
processor.process(exchange);
assertSame("UnmarshalProcessor did not make use of the returned object being returned while unmarshalling", unmarshalled, exchange.getOut().getBody());
}
private static class MyDataFormat implements DataFormat {
private final Object object;
MyDataFormat(Exchange exchange) {
object = exchange;
}
MyDataFormat(Message message) {
object = message;
}
MyDataFormat(Object unmarshalled) {
object = unmarshalled;
}
@Override
public void marshal(Exchange exchange, Object graph, OutputStream stream) throws Exception {
throw new IllegalAccessException("This method is not expected to be used by UnmarshalProcessor");
}
@Override
public Object unmarshal(Exchange exchange, InputStream stream) throws Exception {
return object;
}
}
}
| Java |
/* -*- Mode: C++; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/* This program reads an ELF file and computes information about
* redundancies.
*/
#include <algorithm>
#include <fstream>
#include <string>
#include <vector>
#include <map>
#include <elf.h>
#include <sys/mman.h>
#include <sys/stat.h>
#include <fcntl.h>
#include <unistd.h>
#include <errno.h>
#include <getopt.h>
//----------------------------------------------------------------------
char* opt_type = "func";
char* opt_section = ".text";
//----------------------------------------------------------------------
static void
hexdump(ostream& out, const char* bytes, size_t count)
{
hex(out);
size_t off = 0;
while (off < count) {
out.form("%08lx: ", off);
const char* p = bytes + off;
int j = 0;
while (j < 16) {
out.form("%02x", p[j++] & 0xff);
if (j + off >= count)
break;
out.form("%02x ", p[j++] & 0xff);
if (j + off >= count)
break;
}
// Pad
for (; j < 16; ++j)
out << ((j%2) ? " " : " ");
for (j = 0; j < 16; ++j) {
if (j + off < count)
out.put(isprint(p[j]) ? p[j] : '.');
}
out << endl;
off += 16;
}
}
//----------------------------------------------------------------------
int
verify_elf_header(const Elf32_Ehdr* hdr)
{
if (hdr->e_ident[EI_MAG0] != ELFMAG0
|| hdr->e_ident[EI_MAG1] != ELFMAG1
|| hdr->e_ident[EI_MAG2] != ELFMAG2
|| hdr->e_ident[EI_MAG3] != ELFMAG3) {
cerr << "not an elf file" << endl;
return -1;
}
if (hdr->e_ident[EI_CLASS] != ELFCLASS32) {
cerr << "not a 32-bit elf file" << endl;
return -1;
}
if (hdr->e_ident[EI_DATA] != ELFDATA2LSB) {
cerr << "not a little endian elf file" << endl;
return -1;
}
if (hdr->e_ident[EI_VERSION] != EV_CURRENT) {
cerr << "incompatible version" << endl;
return -1;
}
return 0;
}
//----------------------------------------------------------------------
class elf_symbol : public Elf32_Sym
{
public:
elf_symbol(const Elf32_Sym& sym)
{ ::memcpy(static_cast<Elf32_Sym*>(this), &sym, sizeof(Elf32_Sym)); }
friend bool operator==(const elf_symbol& lhs, const elf_symbol& rhs) {
return 0 == ::memcmp(static_cast<const Elf32_Sym*>(&lhs),
static_cast<const Elf32_Sym*>(&rhs),
sizeof(Elf32_Sym)); }
};
//----------------------------------------------------------------------
static const char*
st_bind(unsigned char info)
{
switch (ELF32_ST_BIND(info)) {
case STB_LOCAL: return "local";
case STB_GLOBAL: return "global";
case STB_WEAK: return "weak";
default: return "unknown";
}
}
static const char*
st_type(unsigned char info)
{
switch (ELF32_ST_TYPE(info)) {
case STT_NOTYPE: return "none";
case STT_OBJECT: return "object";
case STT_FUNC: return "func";
case STT_SECTION: return "section";
case STT_FILE: return "file";
default: return "unknown";
}
}
static unsigned char
st_type(const char* type)
{
if (strcmp(type, "none") == 0) {
return STT_NOTYPE;
}
else if (strcmp(type, "object") == 0) {
return STT_OBJECT;
}
else if (strcmp(type, "func") == 0) {
return STT_FUNC;
}
else {
return 0;
}
}
//----------------------------------------------------------------------
typedef vector<elf_symbol> elf_symbol_table;
typedef map< basic_string<char>, elf_symbol_table > elf_text_map;
void
process_mapping(char* mapping, size_t size)
{
const Elf32_Ehdr* ehdr = reinterpret_cast<Elf32_Ehdr*>(mapping);
if (verify_elf_header(ehdr) < 0)
return;
// find the section headers
const Elf32_Shdr* shdrs = reinterpret_cast<Elf32_Shdr*>(mapping + ehdr->e_shoff);
// find the section header string table, .shstrtab
const Elf32_Shdr* shstrtabsh = shdrs + ehdr->e_shstrndx;
const char* shstrtab = mapping + shstrtabsh->sh_offset;
// find the sections we care about
const Elf32_Shdr *symtabsh, *strtabsh, *textsh;
int textndx;
for (int i = 0; i < ehdr->e_shnum; ++i) {
basic_string<char> name(shstrtab + shdrs[i].sh_name);
if (name == opt_section) {
textsh = shdrs + i;
textndx = i;
}
else if (name == ".symtab") {
symtabsh = shdrs + i;
}
else if (name == ".strtab") {
strtabsh = shdrs + i;
}
}
// find the .strtab
char* strtab = mapping + strtabsh->sh_offset;
// find the .text
char* text = mapping + textsh->sh_offset;
int textaddr = textsh->sh_addr;
// find the symbol table
int nentries = symtabsh->sh_size / sizeof(Elf32_Sym);
Elf32_Sym* symtab = reinterpret_cast<Elf32_Sym*>(mapping + symtabsh->sh_offset);
// look for symbols in the .text section
elf_text_map textmap;
for (int i = 0; i < nentries; ++i) {
const Elf32_Sym* sym = symtab + i;
if (sym->st_shndx == textndx &&
ELF32_ST_TYPE(sym->st_info) == st_type(opt_type) &&
sym->st_size) {
basic_string<char> functext(text + sym->st_value - textaddr, sym->st_size);
elf_symbol_table& syms = textmap[functext];
if (syms.end() == find(syms.begin(), syms.end(), elf_symbol(*sym)))
syms.insert(syms.end(), *sym);
}
}
int uniquebytes = 0, totalbytes = 0;
int uniquecount = 0, totalcount = 0;
for (elf_text_map::const_iterator entry = textmap.begin();
entry != textmap.end();
++entry) {
const elf_symbol_table& syms = entry->second;
if (syms.size() <= 1)
continue;
int sz = syms.begin()->st_size;
uniquebytes += sz;
totalbytes += sz * syms.size();
uniquecount += 1;
totalcount += syms.size();
for (elf_symbol_table::const_iterator sym = syms.begin(); sym != syms.end(); ++sym)
cout << strtab + sym->st_name << endl;
dec(cout);
cout << syms.size() << " copies of " << sz << " bytes";
cout << " (" << ((syms.size() - 1) * sz) << " redundant bytes)" << endl;
hexdump(cout, entry->first.data(), entry->first.size());
cout << endl;
}
dec(cout);
cout << "bytes unique=" << uniquebytes << ", total=" << totalbytes << endl;
cout << "entries unique=" << uniquecount << ", total=" << totalcount << endl;
}
void
process_file(const char* name)
{
int fd = open(name, O_RDWR);
if (fd >= 0) {
struct stat statbuf;
if (fstat(fd, &statbuf) >= 0) {
size_t size = statbuf.st_size;
void* mapping = mmap(0, size, PROT_READ, MAP_SHARED, fd, 0);
if (mapping != MAP_FAILED) {
process_mapping(static_cast<char*>(mapping), size);
munmap(mapping, size);
}
}
close(fd);
}
}
static void
usage()
{
cerr << "foldelf [--section=<section>] [--type=<type>] [file ...]\n\
--section, -s the section of the ELF file to scan; defaults\n\
to ``.text''. Valid values include any section\n\
of the ELF file.\n\
--type, -t the type of object to examine in the section;\n\
defaults to ``func''. Valid values include\n\
``none'', ``func'', or ``object''.\n";
}
static struct option opts[] = {
{ "type", required_argument, 0, 't' },
{ "section", required_argument, 0, 's' },
{ "help", no_argument, 0, '?' },
{ 0, 0, 0, 0 }
};
int
main(int argc, char* argv[])
{
while (1) {
int option_index = 0;
int c = getopt_long(argc, argv, "t:s:", opts, &option_index);
if (c < 0) break;
switch (c) {
case 't':
opt_type = optarg;
break;
case 's':
opt_section = optarg;
break;
case '?':
usage();
break;
}
}
for (int i = optind; i < argc; ++i)
process_file(argv[i]);
return 0;
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.java.util.common;
import com.google.common.base.Preconditions;
import com.google.common.collect.Iterators;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.java.util.common.parsers.CloseableIterator;
import javax.annotation.Nullable;
import java.io.Closeable;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
public class CloseableIterators
{
public static <T> CloseableIterator<T> concat(List<? extends CloseableIterator<? extends T>> iterators)
{
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.concat(iterators.iterator());
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> mergeSorted(
List<? extends CloseableIterator<? extends T>> iterators,
Comparator<T> comparator
)
{
Preconditions.checkNotNull(comparator);
final Closer closer = Closer.create();
iterators.forEach(closer::register);
final Iterator<T> innerIterator = Iterators.mergeSorted(iterators, comparator);
return wrap(innerIterator, closer);
}
public static <T> CloseableIterator<T> wrap(Iterator<T> innerIterator, @Nullable Closeable closeable)
{
return new CloseableIterator<T>()
{
private boolean closed;
@Override
public boolean hasNext()
{
return innerIterator.hasNext();
}
@Override
public T next()
{
return innerIterator.next();
}
@Override
public void close() throws IOException
{
if (!closed) {
if (closeable != null) {
closeable.close();
}
closed = true;
}
}
};
}
public static <T> CloseableIterator<T> withEmptyBaggage(Iterator<T> innerIterator)
{
return wrap(innerIterator, null);
}
private CloseableIterators() {}
}
| Java |
<html>
<head>
</head>
<body>
<div class="vevent">
<h5 class="summary">Annual Employee Review</h5>
<div>posted on <abbr class="dtstamp" title="19970901T1300Z">September 1, 1997</abbr></div>
<div>UID: <span class="uid">[email protected]</span></div>
<div>Dates: <abbr class="dtstart" title="19970903T163000Z">Septempter 3, 1997, 16:30</abbr> -
<abbr class="dtend" title="19970903T190000Z">19:00 UTC</abbr></div>
<div>This meeting is <strong class="class">private</strong>.</div>
<div>Filed under:</div>
<ul>
<li class="category">Business</li>
<li class="category">Human Resources</li>
</ul>
</div>
</body>
</html> | Java |
//======= Copyright (c) Valve Corporation, All rights reserved. ===============
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEngine.Serialization;
namespace Valve.VR
{
public class SteamVR_Settings : ScriptableObject
{
private static SteamVR_Settings _instance;
public static SteamVR_Settings instance
{
get
{
LoadInstance();
return _instance;
}
}
public bool pauseGameWhenDashboardVisible = true;
public bool lockPhysicsUpdateRateToRenderFrequency = true;
public ETrackingUniverseOrigin trackingSpace
{
get
{
return trackingSpaceOrigin;
}
set
{
trackingSpaceOrigin = value;
if (SteamVR_Behaviour.isPlaying)
SteamVR_Action_Pose.SetTrackingUniverseOrigin(trackingSpaceOrigin);
}
}
[SerializeField]
[FormerlySerializedAsAttribute("trackingSpace")]
private ETrackingUniverseOrigin trackingSpaceOrigin = ETrackingUniverseOrigin.TrackingUniverseStanding;
[Tooltip("Filename local to the project root (or executable, in a build)")]
public string actionsFilePath = "actions.json";
[Tooltip("Path local to the Assets folder")]
public string steamVRInputPath = "SteamVR_Input";
public SteamVR_UpdateModes inputUpdateMode = SteamVR_UpdateModes.OnUpdate;
public SteamVR_UpdateModes poseUpdateMode = SteamVR_UpdateModes.OnPreCull;
public bool activateFirstActionSetOnStart = true;
[Tooltip("This is the app key the unity editor will use to identify your application. (can be \"steam.app.[appid]\" to persist bindings between editor steam)")]
public string editorAppKey;
[Tooltip("The SteamVR Plugin can automatically make sure VR is enabled in your player settings and if not, enable it.")]
public bool autoEnableVR = true;
[Space()]
[Tooltip("This determines if we use legacy mixed reality mode (3rd controller/tracker device connected) or the new input system mode (pose / input source)")]
public bool legacyMixedRealityCamera = true;
[Tooltip("[NON-LEGACY] This is the pose action that will be used for positioning a mixed reality camera if connected")]
public SteamVR_Action_Pose mixedRealityCameraPose = SteamVR_Input.GetPoseAction("ExternalCamera");
[Tooltip("[NON-LEGACY] This is the input source to check on the pose for the mixed reality camera")]
public SteamVR_Input_Sources mixedRealityCameraInputSource = SteamVR_Input_Sources.Camera;
[Tooltip("[NON-LEGACY] Auto enable mixed reality action set if file exists")]
public bool mixedRealityActionSetAutoEnable = true;
public bool IsInputUpdateMode(SteamVR_UpdateModes tocheck)
{
return (inputUpdateMode & tocheck) == tocheck;
}
public bool IsPoseUpdateMode(SteamVR_UpdateModes tocheck)
{
return (poseUpdateMode & tocheck) == tocheck;
}
public static void VerifyScriptableObject()
{
LoadInstance();
}
private static void LoadInstance()
{
if (_instance == null)
{
_instance = Resources.Load<SteamVR_Settings>("SteamVR_Settings");
if (_instance == null)
{
_instance = SteamVR_Settings.CreateInstance<SteamVR_Settings>();
#if UNITY_EDITOR
string folderPath = SteamVR.GetResourcesFolderPath(true);
string assetPath = System.IO.Path.Combine(folderPath, "SteamVR_Settings.asset");
UnityEditor.AssetDatabase.CreateAsset(_instance, assetPath);
UnityEditor.AssetDatabase.SaveAssets();
#endif
}
if (string.IsNullOrEmpty(_instance.editorAppKey))
{
_instance.editorAppKey = SteamVR.GenerateAppKey();
Debug.Log("<b>[SteamVR Setup]</b> Generated you an editor app key of: " + _instance.editorAppKey + ". This lets the editor tell SteamVR what project this is. Has no effect on builds. This can be changed in Assets/SteamVR/Resources/SteamVR_Settings");
#if UNITY_EDITOR
UnityEditor.EditorUtility.SetDirty(_instance);
UnityEditor.AssetDatabase.SaveAssets();
#endif
}
}
}
}
} | Java |
/*
*
* Copyright 2015, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
*/
#ifndef GRPC_IMPL_CODEGEN_GRPC_TYPES_H
#define GRPC_IMPL_CODEGEN_GRPC_TYPES_H
#include <grpc/impl/codegen/byte_buffer.h>
#include <grpc/impl/codegen/status.h>
#include <stddef.h>
#ifdef __cplusplus
extern "C" {
#endif
/** Completion Queues enable notification of the completion of asynchronous
actions. */
typedef struct grpc_completion_queue grpc_completion_queue;
/** An alarm associated with a completion queue. */
typedef struct grpc_alarm grpc_alarm;
/** The Channel interface allows creation of Call objects. */
typedef struct grpc_channel grpc_channel;
/** A server listens to some port and responds to request calls */
typedef struct grpc_server grpc_server;
/** A Call represents an RPC. When created, it is in a configuration state
allowing properties to be set until it is invoked. After invoke, the Call
can have messages written to it and read from it. */
typedef struct grpc_call grpc_call;
/** Type specifier for grpc_arg */
typedef enum {
GRPC_ARG_STRING,
GRPC_ARG_INTEGER,
GRPC_ARG_POINTER
} grpc_arg_type;
typedef struct grpc_arg_pointer_vtable {
void *(*copy)(void *p);
void (*destroy)(void *p);
int (*cmp)(void *p, void *q);
} grpc_arg_pointer_vtable;
/** A single argument... each argument has a key and a value
A note on naming keys:
Keys are namespaced into groups, usually grouped by library, and are
keys for module XYZ are named XYZ.key1, XYZ.key2, etc. Module names must
be restricted to the regex [A-Za-z][_A-Za-z0-9]{,15}.
Key names must be restricted to the regex [A-Za-z][_A-Za-z0-9]{,47}.
GRPC core library keys are prefixed by grpc.
Library authors are strongly encouraged to \#define symbolic constants for
their keys so that it's possible to change them in the future. */
typedef struct {
grpc_arg_type type;
char *key;
union {
char *string;
int integer;
struct {
void *p;
const grpc_arg_pointer_vtable *vtable;
} pointer;
} value;
} grpc_arg;
/** An array of arguments that can be passed around.
Used to set optional channel-level configuration.
These configuration options are modelled as key-value pairs as defined
by grpc_arg; keys are strings to allow easy backwards-compatible extension
by arbitrary parties.
All evaluation is performed at channel creation time (i.e. the values in
this structure need only live through the creation invocation). */
typedef struct {
size_t num_args;
grpc_arg *args;
} grpc_channel_args;
/* Channel argument keys: */
/** Enable census for tracing and stats collection */
#define GRPC_ARG_ENABLE_CENSUS "grpc.census"
/** Maximum number of concurrent incoming streams to allow on a http2
connection */
#define GRPC_ARG_MAX_CONCURRENT_STREAMS "grpc.max_concurrent_streams"
/** Maximum message length that the channel can receive */
#define GRPC_ARG_MAX_MESSAGE_LENGTH "grpc.max_message_length"
/** Initial sequence number for http2 transports */
#define GRPC_ARG_HTTP2_INITIAL_SEQUENCE_NUMBER \
"grpc.http2.initial_sequence_number"
/** Amount to read ahead on individual streams. Defaults to 64kb, larger
values can help throughput on high-latency connections.
NOTE: at some point we'd like to auto-tune this, and this parameter
will become a no-op. */
#define GRPC_ARG_HTTP2_STREAM_LOOKAHEAD_BYTES "grpc.http2.lookahead_bytes"
/** How much memory to use for hpack decoding */
#define GRPC_ARG_HTTP2_HPACK_TABLE_SIZE_DECODER \
"grpc.http2.hpack_table_size.decoder"
/** How much memory to use for hpack encoding */
#define GRPC_ARG_HTTP2_HPACK_TABLE_SIZE_ENCODER \
"grpc.http2.hpack_table_size.encoder"
/** Default authority to pass if none specified on call construction */
#define GRPC_ARG_DEFAULT_AUTHORITY "grpc.default_authority"
/** Primary user agent: goes at the start of the user-agent metadata
sent on each request */
#define GRPC_ARG_PRIMARY_USER_AGENT_STRING "grpc.primary_user_agent"
/** Secondary user agent: goes at the end of the user-agent metadata
sent on each request */
#define GRPC_ARG_SECONDARY_USER_AGENT_STRING "grpc.secondary_user_agent"
/** The maximum time between subsequent connection attempts, in ms */
#define GRPC_ARG_MAX_RECONNECT_BACKOFF_MS "grpc.max_reconnect_backoff_ms"
/* The caller of the secure_channel_create functions may override the target
name used for SSL host name checking using this channel argument which is of
type GRPC_ARG_STRING. This *should* be used for testing only.
If this argument is not specified, the name used for SSL host name checking
will be the target parameter (assuming that the secure channel is an SSL
channel). If this parameter is specified and the underlying is not an SSL
channel, it will just be ignored. */
#define GRPC_SSL_TARGET_NAME_OVERRIDE_ARG "grpc.ssl_target_name_override"
/* Maximum metadata size */
#define GRPC_ARG_MAX_METADATA_SIZE "grpc.max_metadata_size"
/** Result of a grpc call. If the caller satisfies the prerequisites of a
particular operation, the grpc_call_error returned will be GRPC_CALL_OK.
Receiving any other value listed here is an indication of a bug in the
caller. */
typedef enum grpc_call_error {
/** everything went ok */
GRPC_CALL_OK = 0,
/** something failed, we don't know what */
GRPC_CALL_ERROR,
/** this method is not available on the server */
GRPC_CALL_ERROR_NOT_ON_SERVER,
/** this method is not available on the client */
GRPC_CALL_ERROR_NOT_ON_CLIENT,
/** this method must be called before server_accept */
GRPC_CALL_ERROR_ALREADY_ACCEPTED,
/** this method must be called before invoke */
GRPC_CALL_ERROR_ALREADY_INVOKED,
/** this method must be called after invoke */
GRPC_CALL_ERROR_NOT_INVOKED,
/** this call is already finished
(writes_done or write_status has already been called) */
GRPC_CALL_ERROR_ALREADY_FINISHED,
/** there is already an outstanding read/write operation on the call */
GRPC_CALL_ERROR_TOO_MANY_OPERATIONS,
/** the flags value was illegal for this call */
GRPC_CALL_ERROR_INVALID_FLAGS,
/** invalid metadata was passed to this call */
GRPC_CALL_ERROR_INVALID_METADATA,
/** invalid message was passed to this call */
GRPC_CALL_ERROR_INVALID_MESSAGE,
/** completion queue for notification has not been registered with the
server */
GRPC_CALL_ERROR_NOT_SERVER_COMPLETION_QUEUE,
/** this batch of operations leads to more operations than allowed */
GRPC_CALL_ERROR_BATCH_TOO_BIG,
/** payload type requested is not the type registered */
GRPC_CALL_ERROR_PAYLOAD_TYPE_MISMATCH
} grpc_call_error;
/* Write Flags: */
/** Hint that the write may be buffered and need not go out on the wire
immediately. GRPC is free to buffer the message until the next non-buffered
write, or until writes_done, but it need not buffer completely or at all. */
#define GRPC_WRITE_BUFFER_HINT (0x00000001u)
/** Force compression to be disabled for a particular write
(start_write/add_metadata). Illegal on invoke/accept. */
#define GRPC_WRITE_NO_COMPRESS (0x00000002u)
/** Mask of all valid flags. */
#define GRPC_WRITE_USED_MASK (GRPC_WRITE_BUFFER_HINT | GRPC_WRITE_NO_COMPRESS)
/* Initial metadata flags */
/** Signal that the call is idempotent */
#define GRPC_INITIAL_METADATA_IDEMPOTENT_REQUEST (0x00000010u)
/** Signal that the call should not return UNAVAILABLE before it has started */
#define GRPC_INITIAL_METADATA_IGNORE_CONNECTIVITY (0x00000020u)
/** Mask of all valid flags */
#define GRPC_INITIAL_METADATA_USED_MASK \
(GRPC_INITIAL_METADATA_IDEMPOTENT_REQUEST | \
GRPC_INITIAL_METADATA_IGNORE_CONNECTIVITY)
/** A single metadata element */
typedef struct grpc_metadata {
const char *key;
const char *value;
size_t value_length;
uint32_t flags;
/** The following fields are reserved for grpc internal use.
There is no need to initialize them, and they will be set to garbage
during calls to grpc. */
struct {
void *obfuscated[4];
} internal_data;
} grpc_metadata;
/** The type of completion (for grpc_event) */
typedef enum grpc_completion_type {
/** Shutting down */
GRPC_QUEUE_SHUTDOWN,
/** No event before timeout */
GRPC_QUEUE_TIMEOUT,
/** Operation completion */
GRPC_OP_COMPLETE
} grpc_completion_type;
/** The result of an operation.
Returned by a completion queue when the operation started with tag. */
typedef struct grpc_event {
/** The type of the completion. */
grpc_completion_type type;
/** non-zero if the operation was successful, 0 upon failure.
Only GRPC_OP_COMPLETE can succeed or fail. */
int success;
/** The tag passed to grpc_call_start_batch etc to start this operation.
Only GRPC_OP_COMPLETE has a tag. */
void *tag;
} grpc_event;
typedef struct {
size_t count;
size_t capacity;
grpc_metadata *metadata;
} grpc_metadata_array;
typedef struct {
char *method;
size_t method_capacity;
char *host;
size_t host_capacity;
gpr_timespec deadline;
uint32_t flags;
void *reserved;
} grpc_call_details;
typedef enum {
/** Send initial metadata: one and only one instance MUST be sent for each
call, unless the call was cancelled - in which case this can be skipped.
This op completes after all bytes of metadata have been accepted by
outgoing flow control. */
GRPC_OP_SEND_INITIAL_METADATA = 0,
/** Send a message: 0 or more of these operations can occur for each call.
This op completes after all bytes for the message have been accepted by
outgoing flow control. */
GRPC_OP_SEND_MESSAGE,
/** Send a close from the client: one and only one instance MUST be sent from
the client, unless the call was cancelled - in which case this can be
skipped.
This op completes after all bytes for the call (including the close)
have passed outgoing flow control. */
GRPC_OP_SEND_CLOSE_FROM_CLIENT,
/** Send status from the server: one and only one instance MUST be sent from
the server unless the call was cancelled - in which case this can be
skipped.
This op completes after all bytes for the call (including the status)
have passed outgoing flow control. */
GRPC_OP_SEND_STATUS_FROM_SERVER,
/** Receive initial metadata: one and only one MUST be made on the client,
must not be made on the server.
This op completes after all initial metadata has been read from the
peer. */
GRPC_OP_RECV_INITIAL_METADATA,
/** Receive a message: 0 or more of these operations can occur for each call.
This op completes after all bytes of the received message have been
read, or after a half-close has been received on this call. */
GRPC_OP_RECV_MESSAGE,
/** Receive status on the client: one and only one must be made on the client.
This operation always succeeds, meaning ops paired with this operation
will also appear to succeed, even though they may not have. In that case
the status will indicate some failure.
This op completes after all activity on the call has completed. */
GRPC_OP_RECV_STATUS_ON_CLIENT,
/** Receive close on the server: one and only one must be made on the
server.
This op completes after the close has been received by the server.
This operation always succeeds, meaning ops paired with this operation
will also appear to succeed, even though they may not have. */
GRPC_OP_RECV_CLOSE_ON_SERVER
} grpc_op_type;
/** Operation data: one field for each op type (except SEND_CLOSE_FROM_CLIENT
which has no arguments) */
typedef struct grpc_op {
/** Operation type, as defined by grpc_op_type */
grpc_op_type op;
/** Write flags bitset for grpc_begin_messages */
uint32_t flags;
/** Reserved for future usage */
void *reserved;
union {
/** Reserved for future usage */
struct {
void *reserved[8];
} reserved;
struct {
size_t count;
grpc_metadata *metadata;
} send_initial_metadata;
grpc_byte_buffer *send_message;
struct {
size_t trailing_metadata_count;
grpc_metadata *trailing_metadata;
grpc_status_code status;
const char *status_details;
} send_status_from_server;
/** ownership of the array is with the caller, but ownership of the elements
stays with the call object (ie key, value members are owned by the call
object, recv_initial_metadata->array is owned by the caller).
After the operation completes, call grpc_metadata_array_destroy on this
value, or reuse it in a future op. */
grpc_metadata_array *recv_initial_metadata;
/** ownership of the byte buffer is moved to the caller; the caller must
call grpc_byte_buffer_destroy on this value, or reuse it in a future op.
*/
grpc_byte_buffer **recv_message;
struct {
/** ownership of the array is with the caller, but ownership of the
elements stays with the call object (ie key, value members are owned
by the call object, trailing_metadata->array is owned by the caller).
After the operation completes, call grpc_metadata_array_destroy on
this
value, or reuse it in a future op. */
grpc_metadata_array *trailing_metadata;
grpc_status_code *status;
/** status_details is a buffer owned by the application before the op
completes and after the op has completed. During the operation
status_details may be reallocated to a size larger than
*status_details_capacity, in which case *status_details_capacity will
be updated with the new array capacity.
Pre-allocating space:
size_t my_capacity = 8;
char *my_details = gpr_malloc(my_capacity);
x.status_details = &my_details;
x.status_details_capacity = &my_capacity;
Not pre-allocating space:
size_t my_capacity = 0;
char *my_details = NULL;
x.status_details = &my_details;
x.status_details_capacity = &my_capacity;
After the call:
gpr_free(my_details); */
char **status_details;
size_t *status_details_capacity;
} recv_status_on_client;
struct {
/** out argument, set to 1 if the call failed in any way (seen as a
cancellation on the server), or 0 if the call succeeded */
int *cancelled;
} recv_close_on_server;
} data;
} grpc_op;
#ifdef __cplusplus
}
#endif
#endif /* GRPC_IMPL_CODEGEN_GRPC_TYPES_H */
| Java |
using System.Collections.Generic;
namespace DocGenerator.Documentation.Blocks
{
/// <summary>
/// Used to keep a line of code (could be multiple e.g fluent syntax) and its annotations in one logical unit.
/// So they do not suffer from reordering based on line number when writing out the documentation
/// </summary>
public class CombinedBlock : IDocumentationBlock
{
public string Value { get; }
public IEnumerable<IDocumentationBlock> Blocks { get; }
public int LineNumber { get; }
public CombinedBlock(IEnumerable<IDocumentationBlock> blocks, int lineNumber)
{
Blocks = blocks;
LineNumber = lineNumber;
Value = null;
}
}
}
| Java |
# Code Size 规则
## Too Many Parameters
<dl>
<dt>标识名</dt>
<dd>too_many_parameters</dd>
<dt>文件名</dt>
<dd>TooManyParametersRule.swift</dd>
<dt>严重级别</dt>
<dd>Minor</dd>
<dt>分类</dt>
<dd>Code Size</dd>
</dl>
Methods with too many parameters are hard to understand and maintain,
and are thirsty for refactorings, like
[Replace Parameter With Method](http://www.refactoring.com/catalog/replaceParameterWithMethod.html),
[Introduce Parameter Object](http://www.refactoring.com/catalog/introduceParameterObject.html),
or
[Preserve Whole Object](http://www.refactoring.com/catalog/preserveWholeObject.html).
##### Thresholds:
<dl>
<dt>MAX_PARAMETERS_COUNT</dt>
<dd>The reporting threshold for too many parameters, default value is 10.</dd>
</dl>
##### Examples:
###### Example 1
```
func example(
a: Int,
b: Int,
c: Int,
...
z: Int
) {}
```
##### References:
Fowler, Martin (1999). *Refactoring: Improving the design of existing code.* Addison Wesley.
## Long Line
<dl>
<dt>标识名</dt>
<dd>long_line</dd>
<dt>文件名</dt>
<dd>LongLineRule.swift</dd>
<dt>严重级别</dt>
<dd>Minor</dd>
<dt>分类</dt>
<dd>Code Size</dd>
</dl>
When a line of code is very long, it largely harms the readability.
Break long lines of code into multiple lines.
##### Thresholds:
<dl>
<dt>LONG_LINE</dt>
<dd>The long line reporting threshold, default value is 100.</dd>
</dl>
##### Examples:
###### Example 1
```
let a012345678901234567890123456789...1234567890123456789012345678901234567890123456789
```
| Java |
package com.code.constant;
/**
* Created by niu on 2017/8/17.
*/
public class StringEvent {
//网络状态改变
public static String NET_STATE_CHANGE = "net_state_change";
}
| Java |
using Akka.Actor;
using Akka.Actor.Internals;
using Akka.TestKit;
using Xunit;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Akka.Tests.Actor
{
public class ActorRefProviderSpec : AkkaSpec
{
[Fact]
public void CanResolveActorRef()
{
var path = TestActor.Path.ToString();
var resolved = ((ActorSystemImpl)Sys).Provider.ResolveActorRef(path);
Assert.Same(TestActor, resolved);
}
}
}
| Java |
import numpy as np
from math import sin, pi, cos
from banti.glyph import Glyph
halfsize = 40
size = 2*halfsize + 1
picture = np.zeros((size, size))
for t in range(-135, 135):
x = round(halfsize + halfsize * cos(pi * t / 180))
y = round(halfsize + halfsize * sin(pi * t / 180))
picture[x][y] = 1
zoomsz = 1 * halfsize
b = Glyph(['O', 0, 0, size, size, 0, 0, 0, 0, None])
b.set_pix(picture)
c = Glyph()
for t in range(0, 360, 15):
x = round(zoomsz + zoomsz * cos(pi * t / 180))
y = round(zoomsz + zoomsz * sin(pi * t / 180))
b.set_xy_wh((x, y, size, size))
c = c + b
print(b)
print(c) | Java |
#
# Author:: Joshua Timberman <[email protected]>
# Author:: Joshua Sierles <[email protected]>
# Cookbook Name:: chef
# Recipe:: client
#
# Copyright 2008-2010, Opscode, Inc
# Copyright 2009, 37signals
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
root_group = value_for_platform(
["openbsd", "freebsd", "mac_os_x"] => { "default" => "wheel" },
"default" => "root"
)
chef_node_name = Chef::Config[:node_name] == node["fqdn"] ? false : Chef::Config[:node_name]
template "/etc/chef/client.rb" do
source "client.rb.erb"
owner "root"
group root_group
mode 0644
variables :chef_node_name => chef_node_name
notifies :create, "ruby_block[reload_client_config]"
end
ruby_block "reload_client_config" do
block do
Chef::Config.from_file("/etc/chef/client.rb")
end
action :nothing
end
| Java |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="content" content ="text/html; charset=utf-8" />
<title> 微通汇系统</title>
<meta http-equiv="MSThemeCompatible" content = "Yes" />
<script src="__JS__/common.js" type="text/javascript"></script>
<script src="__STATIC__/jquery-1.11.1.min.js" type="text/javascript"></script>
<script type="text/javascript" src="http://api.map.baidu.com/api?v=2.0&ak={gr-:$apikey}"></script>
<script src="__STATIC__/artDialog/jquery.artDialog.js?skin=default"></script>
<script src="__STATIC__/artDialog/plugins/iframeTools.js"></script>
<style type="text/css">
body, html,#allmap {width: 100%;height: 100%;overflow: hidden;margin:0;}
#allmap {width: 100%;height: 100%;overflow: hidden;margin:0;}
#l-map{height:100%;width:78%;float: left;border-right:2px solid #bcbcbc;}
#r-result{height:100%;width:20%;float:left;}
.search{
padding: 6px;
font-size: 14px;
margin: 5px;
color: #993300;
}
.search a{
background-color: #5d5d5d;
color: #fff;
margin-left: 5px;
border-radius: 3px;
padding: 5px;
}
.searchkeyword{
border: #606060 2px solid;
padding: 5px;
}
</style>
</head>
<body id="nv_member">
<input type="hidden" id="longitude" value="0" />
<input type="hidden" id="latitude" value="0" />
<div class="search" style="margin:0">全国范围内搜索:<input type="text" id="keyword" class="searchkeyword" /><a href="#" onclick="searchLoc();return false;">搜索</a> <a id="ok" style="float:none;background-color:#f40" href="###" >设定好就点我吧</a> 把跳动的点[拖动到或点击]你的公司地址或其它地址。</div>
<div id="l-map"></div>
<div id="r-result">搜索结果展示</div>
<script type="text/javascript">
function G(id) {
return document.getElementById(id);
}
if (art.dialog.data('longitude')) {
G('longitude').value = art.dialog.data('longitude');// 获取由主页面传递过来的数据
G('latitude').value = art.dialog.data('latitude');
};
// 关闭并返回数据到主页面
document.getElementById('ok').onclick = function () {
var origin = artDialog.open.origin;
var longitudeinput = origin.document.getElementById('longitude');
var latitudeinput = origin.document.getElementById('latitude');
longitudeinput.value = $('#longitude').attr('value');
latitudeinput.value = $('#latitude').attr('value');
art.dialog.close();
};
var map = new BMap.Map("l-map");
var myCity = new BMap.LocalCity();
myCity.get(createAniMarker);
var point = new BMap.Point($('#longitude').val(),$('#latitude').val());
map.centerAndZoom(point,12);
map.enableScrollWheelZoom(); //启用滚轮放大缩小
//右键菜单
var menu = new BMap.ContextMenu();
var txtMenuItem = [
{
text:'卫星视图',
callback:function(){ map.setMapType(BMAP_HYBRID_MAP);}
}
];
for(var i=0; i < txtMenuItem.length; i++){
menu.addItem(new BMap.MenuItem(txtMenuItem[i].text,txtMenuItem[i].callback,100));
}
map.addContextMenu(menu);
//搜索
var local = new BMap.LocalSearch("全国", {
renderOptions: {
map: map,
panel : "r-result",
autoViewport: true,
selectFirstResult: false
},
onSearchComplete:searchComplete
});
function searchLoc(keyword){
$key = (arguments[0] ) || G("keyword").value;
local.search($key);
}
function searchComplete(){
var result = local.getResults();
if(result && result.getPoi(0) ){
var pp = local.getResults().getPoi(0).point; //获取第一个智能搜索的结果
// console.log(pp);
map.centerAndZoom(pp, 18);
map.clearOverlays();
marker = new BMap.Marker(pp);
map.addOverlay(marker); //添加标注
marker.setAnimation(BMAP_ANIMATION_BOUNCE);
$('#longitude').attr('value',pp.lng);
$('#latitude').attr('value',pp.lat);
}
}
//创建标注
var marker = {};
function createAniMarker(result){
// map.clearOverlays();
if(result){
var cityName = result.name;
}
if($('#longitude').val()==0||$('#longitude').val()==''){
if(cityName){
map.setCenter(cityName);
}
p = new BMap.Point(result.center.lng,result.center.lat);
}else{
p = new BMap.Point($('#longitude').val(),$('#latitude').val());
}
marker = new BMap.Marker(p);
marker.enableDragging();
map.addOverlay(marker);
marker.setAnimation(BMAP_ANIMATION_BOUNCE);
marker.addEventListener("dragend", function(e){
$('#longitude').attr('value',e.point.lng)
$('#latitude').attr('value',e.point.lat)
})
}
//点击事件
map.addEventListener("click",function(e){
map.removeOverlay(marker);
$("#longitude").attr("value",e.point.lng);
$("#latitude").attr("value",e.point.lat);
createAniMarker();
// alert(e.point.lng + "," + e.point.lat);
});
//智能提示
var ac = new BMap.Autocomplete( //建立一个自动完成的对象
{"input" : "keyword"
,"location" : map
});
ac.addEventListener("onhighlight", function(e) { //鼠标放在下拉列表上的事件
var str = "";
var _value = e.fromitem.value;
var value = "";
if (e.fromitem.index > -1) {
value = _value.province + _value.city + _value.district + _value.street + _value.business;
}
str = "FromItem<br />index = " + e.fromitem.index + "<br />value = " + value;
value = "";
if (e.toitem.index > -1) {
_value = e.toitem.value;
value = _value.province + _value.city + _value.district + _value.street + _value.business;
}
str += "<br />ToItem<br />index = " + e.toitem.index + "<br />value = " + value;
// G("searchResultPanel").innerHTML = str;
});
var myValue;
ac.addEventListener("onconfirm", function(e) { //鼠标点击下拉列表后的事件
var _value = e.item.value;
myValue = _value.province + _value.city + _value.district + _value.street + _value.business;
// G("searchResultPanel").innerHTML ="onconfirm<br />index = " + e.item.index + "<br />myValue = " + myValue;
setPlace();
});
//设置地点
function setPlace(){
map.clearOverlays(); //清除地图上所有覆盖物
// function myFun(){
// var pp = local.getResults().getPoi(0).point; //获取第一个智能搜索的结果
// map.centerAndZoom(pp, 18);
// marker = new BMap.Marker(pp);
// map.addOverlay(marker); //添加标注
// marker.setAnimation(BMAP_ANIMATION_BOUNCE);
// }
searchLoc(myValue);
}
//console.log(marker);
</script>
</body>
</html> | Java |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/contrib/lite/testing/tflite_driver.h"
#include <iostream>
#include "tensorflow/contrib/lite/builtin_op_data.h"
#include "tensorflow/contrib/lite/testing/split.h"
namespace tflite {
namespace testing {
namespace {
// Returns the value in the given position in a tensor.
template <typename T>
T Value(const TfLitePtrUnion& data, int index);
template <>
float Value(const TfLitePtrUnion& data, int index) {
return data.f[index];
}
template <>
int32_t Value(const TfLitePtrUnion& data, int index) {
return data.i32[index];
}
template <>
int64_t Value(const TfLitePtrUnion& data, int index) {
return data.i64[index];
}
template <>
uint8_t Value(const TfLitePtrUnion& data, int index) {
return data.uint8[index];
}
template <>
bool Value(const TfLitePtrUnion& data, int index) {
return data.b[index];
}
template <typename T>
void SetTensorData(const std::vector<T>& values, TfLitePtrUnion* data) {
T* input_ptr = reinterpret_cast<T*>(data->raw);
for (const T& v : values) {
*input_ptr = v;
++input_ptr;
}
}
} // namespace
class TfLiteDriver::Expectation {
public:
Expectation() {
data_.raw = nullptr;
num_elements_ = 0;
}
~Expectation() { delete[] data_.raw; }
template <typename T>
void SetData(const string& csv_values) {
const auto& values = testing::Split<T>(csv_values, ",");
num_elements_ = values.size();
data_.raw = new char[num_elements_ * sizeof(T)];
SetTensorData(values, &data_);
}
bool Check(bool verbose, const TfLiteTensor& tensor) {
switch (tensor.type) {
case kTfLiteFloat32:
return TypedCheck<float>(verbose, tensor);
case kTfLiteInt32:
return TypedCheck<int32_t>(verbose, tensor);
case kTfLiteInt64:
return TypedCheck<int64_t>(verbose, tensor);
case kTfLiteUInt8:
return TypedCheck<uint8_t>(verbose, tensor);
case kTfLiteBool:
return TypedCheck<bool>(verbose, tensor);
default:
fprintf(stderr, "Unsupported type %d in Check\n", tensor.type);
return false;
}
}
private:
template <typename T>
bool TypedCheck(bool verbose, const TfLiteTensor& tensor) {
// TODO(ahentz): must find a way to configure the tolerance.
constexpr double kRelativeThreshold = 1e-2f;
constexpr double kAbsoluteThreshold = 1e-4f;
size_t tensor_size = tensor.bytes / sizeof(T);
if (tensor_size != num_elements_) {
std::cerr << "Expected a tensor with " << num_elements_
<< " elements, got " << tensor_size << std::endl;
return false;
}
bool good_output = true;
for (int i = 0; i < tensor_size; ++i) {
float computed = Value<T>(tensor.data, i);
float reference = Value<T>(data_, i);
float diff = std::abs(computed - reference);
bool error_is_large = false;
// For very small numbers, try absolute error, otherwise go with
// relative.
if (std::abs(reference) < kRelativeThreshold) {
error_is_large = (diff > kAbsoluteThreshold);
} else {
error_is_large = (diff > kRelativeThreshold * std::abs(reference));
}
if (error_is_large) {
good_output = false;
if (verbose) {
std::cerr << " index " << i << ": got " << computed
<< ", but expected " << reference << std::endl;
}
}
}
return good_output;
}
TfLitePtrUnion data_;
size_t num_elements_;
};
TfLiteDriver::TfLiteDriver(bool use_nnapi) : use_nnapi_(use_nnapi) {}
TfLiteDriver::~TfLiteDriver() {}
void TfLiteDriver::AllocateTensors() {
if (must_allocate_tensors_) {
if (interpreter_->AllocateTensors() != kTfLiteOk) {
Invalidate("Failed to allocate tensors");
return;
}
ResetLSTMStateTensors();
must_allocate_tensors_ = false;
}
}
void TfLiteDriver::LoadModel(const string& bin_file_path) {
if (!IsValid()) return;
model_ = FlatBufferModel::BuildFromFile(GetFullPath(bin_file_path).c_str());
if (!model_) {
Invalidate("Failed to mmap model " + bin_file_path);
return;
}
ops::builtin::BuiltinOpResolver builtins;
InterpreterBuilder(*model_, builtins)(&interpreter_);
if (!interpreter_) {
Invalidate("Failed build interpreter");
return;
}
must_allocate_tensors_ = true;
}
void TfLiteDriver::ResetTensor(int id) {
if (!IsValid()) return;
auto* tensor = interpreter_->tensor(id);
memset(tensor->data.raw, 0, tensor->bytes);
}
void TfLiteDriver::ReshapeTensor(int id, const string& csv_values) {
if (!IsValid()) return;
if (interpreter_->ResizeInputTensor(
id, testing::Split<int>(csv_values, ",")) != kTfLiteOk) {
Invalidate("Failed to resize input tensor " + std::to_string(id));
return;
}
must_allocate_tensors_ = true;
}
void TfLiteDriver::SetInput(int id, const string& csv_values) {
if (!IsValid()) return;
auto* tensor = interpreter_->tensor(id);
switch (tensor->type) {
case kTfLiteFloat32: {
const auto& values = testing::Split<float>(csv_values, ",");
if (!CheckSizes<float>(tensor->bytes, values.size())) return;
SetTensorData(values, &tensor->data);
break;
}
case kTfLiteInt32: {
const auto& values = testing::Split<int32_t>(csv_values, ",");
if (!CheckSizes<int32_t>(tensor->bytes, values.size())) return;
SetTensorData(values, &tensor->data);
break;
}
case kTfLiteInt64: {
const auto& values = testing::Split<int64_t>(csv_values, ",");
if (!CheckSizes<int64_t>(tensor->bytes, values.size())) return;
SetTensorData(values, &tensor->data);
break;
}
case kTfLiteUInt8: {
const auto& values = testing::Split<uint8_t>(csv_values, ",");
if (!CheckSizes<uint8_t>(tensor->bytes, values.size())) return;
SetTensorData(values, &tensor->data);
break;
}
case kTfLiteBool: {
const auto& values = testing::Split<bool>(csv_values, ",");
if (!CheckSizes<bool>(tensor->bytes, values.size())) return;
SetTensorData(values, &tensor->data);
break;
}
default:
fprintf(stderr, "Unsupported type %d in SetInput\n", tensor->type);
Invalidate("Unsupported tensor data type");
return;
}
}
void TfLiteDriver::SetExpectation(int id, const string& csv_values) {
if (!IsValid()) return;
auto* tensor = interpreter_->tensor(id);
if (expected_output_.count(id) != 0) {
fprintf(stderr, "Overridden expectation for tensor %d\n", id);
Invalidate("Overridden expectation");
}
expected_output_[id].reset(new Expectation);
switch (tensor->type) {
case kTfLiteFloat32:
expected_output_[id]->SetData<float>(csv_values);
break;
case kTfLiteInt32:
expected_output_[id]->SetData<int32_t>(csv_values);
break;
case kTfLiteInt64:
expected_output_[id]->SetData<int64_t>(csv_values);
break;
case kTfLiteUInt8:
expected_output_[id]->SetData<uint8_t>(csv_values);
break;
case kTfLiteBool:
expected_output_[id]->SetData<bool>(csv_values);
break;
default:
fprintf(stderr, "Unsupported type %d in SetExpectation\n", tensor->type);
Invalidate("Unsupported tensor data type");
return;
}
}
void TfLiteDriver::Invoke() {
if (!IsValid()) return;
if (interpreter_->Invoke() != kTfLiteOk) {
Invalidate("Failed to invoke interpreter");
}
}
bool TfLiteDriver::CheckResults() {
if (!IsValid()) return false;
bool success = true;
for (const auto& p : expected_output_) {
int id = p.first;
auto* tensor = interpreter_->tensor(id);
if (!p.second->Check(/*verbose=*/false, *tensor)) {
// Do not invalidate anything here. Instead, simply output the
// differences and return false. Invalidating would prevent all
// subsequent invocations from running..
std::cerr << "There were errors in invocation '" << GetInvocationId()
<< "', output tensor '" << id << "':" << std::endl;
p.second->Check(/*verbose=*/true, *tensor);
success = false;
SetOverallSuccess(false);
}
}
expected_output_.clear();
return success;
}
void TfLiteDriver::ResetLSTMStateTensors() {
// This is a workaround for initializing state tensors for LSTM.
// TODO(ycling): Refactoring and find a better way to initialize state
// tensors. Maybe write the reset instructions into the test data.
for (auto node_index : interpreter_->execution_plan()) {
const auto& node_and_reg = interpreter_->node_and_registration(node_index);
const auto& node = node_and_reg->first;
const auto& registration = node_and_reg->second;
if (registration.builtin_code == tflite::BuiltinOperator_LSTM) {
const auto* params =
reinterpret_cast<const TfLiteLSTMParams*>(node.builtin_data);
if (params->kernel_type == kTfLiteLSTMFullKernel &&
node.outputs->size >= 2) {
// The first 2 outputs of LSTM are state tensors.
for (int i = 0; i < 2; ++i) {
int node_index = node.outputs->data[i];
ResetTensor(node_index);
}
} else if (params->kernel_type == kTfLiteLSTMBasicKernel &&
node.inputs->size == 5) {
// The 2th and 5th inputs are state tensors.
for (int i : {1, 4}) {
int node_index = node.inputs->data[i];
ResetTensor(node_index);
}
}
}
}
}
} // namespace testing
} // namespace tflite
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.