code
stringlengths 4
1.01M
| language
stringclasses 2
values |
---|---|
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ThreadedComponent import threadedcomponent, threadedadaptivecommscomponent
import heapq
import time
class SchedulingComponentMixin(object):
"""
SchedulingComponent() -> new SchedulingComponent
Base class for a threadedcomponent with an inbuilt scheduler, allowing a
component to block until a scheduled event is ready or a message is received
on an inbox.
"""
Inboxes = {"inbox" : "Standard inbox for receiving data from other components",
"control" : "Standard inbox for receiving control messages from other components",
"event" : "Scheduled events which are ready to be processed"}
def __init__(self, **argd):
super(SchedulingComponentMixin, self).__init__(**argd)
self.eventQueue = []
def scheduleRel(self, message, delay, priority=1):
"""
Schedule an event to wake the component and send a message to the
"event" inbox after a delay.
"""
return self.scheduleAbs(message, time.time() + delay, priority)
def scheduleAbs(self, message, eventTime, priority=1):
"""
Schedule an event to wake the component and send a message to the
"event" inbox after at a specified time.
"""
event = eventTime, priority, message
heapq.heappush(self.eventQueue, event)
return event
def cancelEvent(self, event):
""" Remove a scheduled event from the scheduler """
self.eventQueue.remove(event)
heapq.heapify(self.eventQueue)
def eventReady(self):
""" Returns true if there is an event ready to be processed """
if self.eventQueue:
eventTime = self.eventQueue[0][0]
if time.time() >= eventTime:
return True
return False
def pause(self):
"""
Sleep until there is either an event ready or a message is received on
an inbox
"""
if self.eventReady():
self.signalEvent()
else:
if self.eventQueue:
eventTime = self.eventQueue[0][0]
super(SchedulingComponentMixin, self).pause(eventTime - time.time())
if self.eventReady():
self.signalEvent()
else:
super(SchedulingComponentMixin, self).pause()
def signalEvent(self):
"""
Put the event message of the earliest scheduled event onto the
component's "event" inbox and remove it from the scheduler.
"""
eventTime, priority, message = heapq.heappop(self.eventQueue)
#print "Signalling, late by:", (time.time() - eventTime)
if not self.inqueues["event"].full():
self.inqueues["event"].put(message)
class SchedulingComponent(SchedulingComponentMixin, threadedcomponent):
def __init__(self, **argd):
super(SchedulingComponent, self).__init__(**argd)
class SchedulingAdaptiveCommsComponent(SchedulingComponentMixin,
threadedadaptivecommscomponent):
def __init__(self, **argd):
super(SchedulingAdaptiveCommsComponent, self).__init__(**argd)
| Java |
// scalac: -Ystop-after:parser
//
object foo {
val n =
<a xmlns=""/>
n.namespace == null
}
| Java |
<?php
namespace PHPExcel\Reader\Excel5;
/**
* PHPExcel_Reader_Excel5_RC4
*
* Copyright (c) 2006 - 2015 PHPExcel
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*
* @category PHPExcel
* @package PHPExcel_Reader_Excel5
* @copyright Copyright (c) 2006 - 2015 PHPExcel (http://www.codeplex.com/PHPExcel)
* @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL
* @version ##VERSION##, ##DATE##
*/
class RC4
{
// Context
protected $s = array();
protected $i = 0;
protected $j = 0;
/**
* RC4 stream decryption/encryption constrcutor
*
* @param string $key Encryption key/passphrase
*/
public function __construct($key)
{
$len = strlen($key);
for ($this->i = 0; $this->i < 256; $this->i++) {
$this->s[$this->i] = $this->i;
}
$this->j = 0;
for ($this->i = 0; $this->i < 256; $this->i++) {
$this->j = ($this->j + $this->s[$this->i] + ord($key[$this->i % $len])) % 256;
$t = $this->s[$this->i];
$this->s[$this->i] = $this->s[$this->j];
$this->s[$this->j] = $t;
}
$this->i = $this->j = 0;
}
/**
* Symmetric decryption/encryption function
*
* @param string $data Data to encrypt/decrypt
*
* @return string
*/
public function RC4($data)
{
$len = strlen($data);
for ($c = 0; $c < $len; $c++) {
$this->i = ($this->i + 1) % 256;
$this->j = ($this->j + $this->s[$this->i]) % 256;
$t = $this->s[$this->i];
$this->s[$this->i] = $this->s[$this->j];
$this->s[$this->j] = $t;
$t = ($this->s[$this->i] + $this->s[$this->j]) % 256;
$data[$c] = chr(ord($data[$c]) ^ $this->s[$t]);
}
return $data;
}
}
| Java |
/*
* Copyright 2011 Christopher Pheby
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jadira.bindings.core.binder;
import java.lang.annotation.Annotation;
import java.net.URL;
import org.jadira.bindings.core.api.Binding;
import org.jadira.bindings.core.api.Converter;
import org.jadira.bindings.core.api.FromUnmarshaller;
import org.jadira.bindings.core.api.ToMarshaller;
public interface RegisterableBinder {
/**
* Register the configuration file (bindings.xml) at the given URL
* @param nextLocation The URL to register
*/
void registerConfiguration(URL nextLocation);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param key The converter key
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(ConverterKey<S,T> key, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(ConverterKey<S,T> key, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param key The converter key
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(ConverterKey<S,T> key, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param key The converter key
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(ConverterKey<S,T> key, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter);
/**
* Register a Binding with the given source and target class.
* A binding unifies a marshaller and an unmarshaller and both must be available to resolve a binding.
*
* The source class is considered the owning class of the binding. The source can be marshalled
* into the target class. Similarly, the target can be unmarshalled to produce an instance of the source type.
* @param sourceClass The source (owning) class
* @param targetClass The target (foreign) class
* @param converter The binding to be registered
* @param qualifier The qualifier for which the binding must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerBinding(final Class<S> sourceClass, Class<T> targetClass, Binding<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register an UnMarshaller with the given source and target class.
* The unmarshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The FromUnmarshaller to be registered
* @param qualifier The qualifier for which the unmarshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerUnmarshaller(Class<S> sourceClass, Class<T> targetClass, FromUnmarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Marshaller with the given source and target class.
* The marshaller is used as follows: Instances of the source can be marshalled into the target class.
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The ToMarshaller to be registered
* @param qualifier The qualifier for which the marshaller must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerMarshaller(Class<S> sourceClass, Class<T> targetClass, ToMarshaller<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Register a Converter with the given input and output classes. Instances of the input class can be converted into
* instances of the output class
* @param sourceClass The source (input) class
* @param targetClass The target (output) class
* @param converter The Converter to be registered
* @param qualifier The qualifier for which the converter must be registered
* @param <S> Source type
* @param <T> Target type
*/
<S, T> void registerConverter(final Class<S> sourceClass, Class<T> targetClass, Converter<S, T> converter, Class<? extends Annotation> qualifier);
/**
* Inspect each of the supplied classes, processing any of the annotated methods found
* @param classesToInspect
*/
void registerAnnotatedClasses(Class<?>... classesToInspect);
/**
* Return an iterable collection of ConverterKeys, one for each currently registered conversion
*/
Iterable<ConverterKey<?, ?>> getConverterEntries();
}
| Java |
/**
* @license
* Copyright 2013 Palantir Technologies, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { IOptions, RuleSeverity } from "./language/rule/rule";
export interface IConfigurationFile {
/**
* @deprecated property is never set
*
* The severity that is applied to rules in this config file as well as rules
* in any inherited config files which have their severity set to "default".
* Not inherited.
*/
defaultSeverity?: RuleSeverity;
/**
* An array of config files whose rules are inherited by this config file.
*/
extends: string[];
/**
* Rules that are used to lint to JavaScript files.
*/
jsRules: Map<string, Partial<IOptions>>;
/**
* A subset of the CLI options.
*/
linterOptions?: Partial<{
exclude: string[];
format: string;
}>;
/**
* Directories containing custom rules. Resolved using node module semantics.
*/
rulesDirectory: string[];
/**
* Rules that are used to lint TypeScript files.
*/
rules: Map<string, Partial<IOptions>>;
}
export interface IConfigurationLoadResult {
path?: string;
results?: IConfigurationFile;
}
export declare const JSON_CONFIG_FILENAME = "tslint.json";
/** @deprecated use `JSON_CONFIG_FILENAME` or `CONFIG_FILENAMES` instead. */
export declare const CONFIG_FILENAME = "tslint.json";
export declare const CONFIG_FILENAMES: string[];
export declare const DEFAULT_CONFIG: IConfigurationFile;
export declare const EMPTY_CONFIG: IConfigurationFile;
/**
* Searches for a TSLint configuration and returns the data from the config.
* @param configFile A path to a config file, this can be null if the location of a config is not known
* @param inputFilePath A path containing the current file being linted. This is the starting location
* of the search for a configuration.
* @returns Load status for a TSLint configuration object
*/
export declare function findConfiguration(configFile: string | null, inputFilePath: string): IConfigurationLoadResult;
export declare function findConfiguration(configFile: string, inputFilePath?: string): IConfigurationLoadResult;
/**
* Searches for a TSLint configuration and returns the path to it.
* Could return undefined if not configuration is found.
* @param suppliedConfigFilePath A path to an known config file supplied by a user. Pass null here if
* the location of the config file is not known and you want to search for one.
* @param inputFilePath A path to the current file being linted. This is the starting location
* of the search for a configuration.
* @returns An absolute path to a tslint.json or tslint.yml or tslint.yaml file
* or undefined if neither can be found.
*/
export declare function findConfigurationPath(suppliedConfigFilePath: string | null, inputFilePath: string): string | undefined;
export declare function findConfigurationPath(suppliedConfigFilePath: string, inputFilePath?: string): string | undefined;
/**
* Used Node semantics to load a configuration file given configFilePath.
* For example:
* '/path/to/config' will be treated as an absolute path
* './path/to/config' will be treated as a relative path
* 'path/to/config' will attempt to load a to/config file inside a node module named path
* @param configFilePath The configuration to load
* @param originalFilePath (deprecated) The entry point configuration file
* @returns a configuration object for TSLint loaded from the file at configFilePath
*/
export declare function loadConfigurationFromPath(configFilePath?: string, _originalFilePath?: string): IConfigurationFile;
/** Reads the configuration file from disk and parses it as raw JSON, YAML or JS depending on the extension. */
export declare function readConfigurationFile(filepath: string): RawConfigFile;
export declare function extendConfigurationFile(targetConfig: IConfigurationFile, nextConfigSource: IConfigurationFile): IConfigurationFile;
/**
* returns the absolute path (contrary to what the name implies)
*
* @deprecated use `path.resolve` instead
*/
export declare function getRelativePath(directory?: string | null, relativeTo?: string): string | undefined;
export declare function useAsPath(directory: string): boolean;
/**
* @param directories A path(s) to a directory of custom rules
* @param relativeTo A path that directories provided are relative to.
* For example, if the directories come from a tslint.json file, this path
* should be the path to the tslint.json file.
* @return An array of absolute paths to directories potentially containing rules
*/
export declare function getRulesDirectories(directories?: string | string[], relativeTo?: string): string[];
export interface RawConfigFile {
extends?: string | string[];
linterOptions?: IConfigurationFile["linterOptions"];
rulesDirectory?: string | string[];
defaultSeverity?: string;
rules?: RawRulesConfig;
jsRules?: RawRulesConfig | boolean;
}
export interface RawRulesConfig {
[key: string]: RawRuleConfig;
}
export declare type RawRuleConfig = null | undefined | boolean | any[] | {
severity?: RuleSeverity | "warn" | "none" | "default";
options?: any;
};
/**
* Parses a config file and normalizes legacy config settings.
* If `configFileDir` and `readConfig` are provided, this function will load all base configs and reduce them to the final configuration.
*
* @param configFile The raw object read from the JSON of a config file
* @param configFileDir The directory of the config file
* @param readConfig Will be used to load all base configurations while parsing. The function is called with the resolved path.
*/
export declare function parseConfigFile(configFile: RawConfigFile, configFileDir?: string, readConfig?: (path: string) => RawConfigFile): IConfigurationFile;
/**
* Fills in default values for `IOption` properties and outputs an array of `IOption`
*/
export declare function convertRuleOptions(ruleConfiguration: Map<string, Partial<IOptions>>): IOptions[];
export declare function isFileExcluded(filepath: string, configFile?: IConfigurationFile): boolean;
export declare function stringifyConfiguration(configFile: IConfigurationFile): string;
| Java |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.plugin.ij.intentions;
import com.intellij.codeInsight.CodeInsightUtilBase;
import com.intellij.codeInspection.LocalQuickFixAndIntentionActionOnPsiElement;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.project.Project;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.util.PsiMatcherImpl;
import gw.internal.gosu.parser.Expression;
import gw.internal.gosu.parser.expressions.NumericLiteral;
import gw.lang.parser.IStatement;
import gw.lang.parser.statements.IAssignmentStatement;
import gw.lang.parser.statements.IStatementList;
import gw.lang.parser.statements.IWhileStatement;
import gw.plugin.ij.lang.psi.api.statements.IGosuVariable;
import gw.plugin.ij.lang.psi.impl.statements.GosuForEachStatementImpl;
import gw.plugin.ij.lang.psi.impl.statements.GosuWhileStatementImpl;
import gw.plugin.ij.lang.psi.util.GosuPsiParseUtil;
import gw.plugin.ij.util.GosuBundle;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import static com.intellij.psi.util.PsiMatchers.hasClass;
public class WhileToForFix extends LocalQuickFixAndIntentionActionOnPsiElement {
String ident;
Expression rhs;
private IGosuVariable declarationEqualToZero;
private IAssignmentStatement increment;
public WhileToForFix(PsiElement whileStmt, String ident, Expression rhs, IGosuVariable declarationEqualToZero, IAssignmentStatement increment) {
super(whileStmt);
this.ident = ident;
this.rhs = rhs;
this.declarationEqualToZero = declarationEqualToZero;
this.increment = increment;
}
@Override
public void invoke(@NotNull Project project, @NotNull PsiFile file, @Nullable("is null when called from inspection") Editor editor, @NotNull PsiElement startElement, @NotNull PsiElement endElement) {
if (!CodeInsightUtilBase.prepareFileForWrite(startElement.getContainingFile())) {
return;
}
IWhileStatement parsedElement = ((GosuWhileStatementImpl) startElement).getParsedElement();
if (parsedElement == null) {
return;
}
IStatement statement = parsedElement.getStatement();
IStatement[] statements = ((IStatementList) statement).getStatements();
StringBuilder forStmt = new StringBuilder();
forStmt.append("for (");
forStmt.append(ident);
forStmt.append(" in 0..");
if(rhs instanceof NumericLiteral) {
Object res = rhs.evaluate();
if(res instanceof Integer) {
forStmt.append(((Integer)res)-1);
}
} else {
forStmt.append("|" + rhs);
}
forStmt.append(") {\n");
String indent = getIndet(parsedElement, statements);
for (IStatement statement1 : statements) {
if (statement1 != increment) {
forStmt.append(indent);
forStmt.append(statement1.getLocation().getTextFromTokens());
forStmt.append("\n");
}
}
forStmt.append("}");
PsiElement stub = GosuPsiParseUtil.parseProgramm(forStmt.toString(), startElement, file.getManager(), null);
PsiElement newForStmt = new PsiMatcherImpl(stub)
.descendant(hasClass(GosuForEachStatementImpl.class))
.getElement();
if (newForStmt != null) {
declarationEqualToZero.delete();
startElement.replace(newForStmt);
}
}
private String getIndet(IWhileStatement parsedElement, IStatement[] statements) {
int whileColum = parsedElement.getLocation().getColumn();
int column = statements[1].getLocation().getColumn() - whileColum;
if(column < 0) {
return " ";
}
StringBuilder out = new StringBuilder();
for(int i = 0; i <= column; i++) {
out.append(" ");
}
return out.toString();
}
private void removeVarDecl(PsiElement whileStmt, String ident) {
PsiElement prev = whileStmt.getPrevSibling();
while (prev instanceof PsiWhiteSpace) {
prev = prev.getPrevSibling();
}
if (prev instanceof IGosuVariable && ((IGosuVariable) prev).getName().equals(ident)) {
prev.delete();
}
}
@Override
public boolean isAvailable(@NotNull Project project,
@NotNull PsiFile file,
@NotNull PsiElement startElement,
@NotNull PsiElement endElement) {
return startElement instanceof GosuWhileStatementImpl;
}
@NotNull
@Override
public String getText() {
return GosuBundle.message("inspection.while.to.for");
}
@NotNull
@Override
public String getFamilyName() {
return GosuBundle.message("inspection.group.name.statement.issues");
}
}
| Java |
/*
* Author: Markus Stenberg <markus [email protected]>
* Author: Steven Barth <[email protected]>
* Author: Pierre Pfister
*
* Copyright (c) 2014-2015 cisco Systems, Inc.
*/
#pragma once
/* Anything up to INFO is compiled in by default; syslog can be used
* to filter them out. DEBUG can be quite spammy and isn't enabled by
* default. */
#define HNETD_DEFAULT_L_LEVEL 6
#ifndef L_LEVEL
#define L_LEVEL HNETD_DEFAULT_L_LEVEL
#endif /* !L_LEVEL */
#ifndef L_PREFIX
#define L_PREFIX ""
#endif /* !L_PREFIX */
#ifdef __APPLE__
/* Haha. Got to love advanced IPv6 socket API being disabled by
* default. */
#define __APPLE_USE_RFC_3542
#define IPV6_ADD_MEMBERSHIP IPV6_JOIN_GROUP
#define IPV6_DROP_MEMBERSHIP IPV6_LEAVE_GROUP
/* LIST_HEAD macro in sys/queue.h, argh.. */
#include <sys/queue.h>
#ifdef LIST_HEAD
#undef LIST_HEAD
#endif /* LIST_HEAD */
#endif /* __APPLE__ */
#include <stddef.h>
#include <stdint.h>
#include <time.h>
#include <syslog.h>
#include <sys/types.h>
#include <libubox/utils.h>
#include <inttypes.h>
#define STR_EXPAND(tok) #tok
#define STR(tok) STR_EXPAND(tok)
#define PRItime PRId64
#include "hnetd_time.h"
extern int log_level;
// Logging macros
extern void (*hnetd_log)(int priority, const char *format, ...);
#define L_INTERNAL(level, ...) \
do { \
if (hnetd_log && log_level >= level) \
hnetd_log(level, L_PREFIX __VA_ARGS__); \
} while(0)
#if L_LEVEL >= LOG_ERR
#define L_ERR(...) L_INTERNAL(LOG_ERR, __VA_ARGS__)
#else
#define L_ERR(...) do {} while(0)
#endif
#if L_LEVEL >= LOG_WARNING
#define L_WARN(...) L_INTERNAL(LOG_WARNING, __VA_ARGS__)
#else
#define L_WARN(...) do {} while(0)
#endif
#if L_LEVEL >= LOG_NOTICE
#define L_NOTICE(...) L_INTERNAL(LOG_NOTICE, __VA_ARGS__)
#else
#define L_NOTICE(...) do {} while(0)
#endif
#if L_LEVEL >= LOG_INFO
#define L_INFO(...) L_INTERNAL(LOG_INFO, __VA_ARGS__)
#else
#define L_INFO(...) do {} while(0)
#endif
#if L_LEVEL >= LOG_DEBUG
#define L_DEBUG(...) L_INTERNAL(LOG_DEBUG, __VA_ARGS__)
#else
#define L_DEBUG(...) do {} while(0)
#endif
// Some C99 compatibility
#ifndef typeof
#define typeof __typeof
#endif
#ifndef container_of
#define container_of(ptr, type, member) ( \
(type *)( (char *)ptr - offsetof(type,member) ))
#endif
#ifndef __unused
#define __unused __attribute__((unused))
#endif
| Java |
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tools to work with checkpoints."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import six
from tensorflow.python.distribute import distribution_strategy_context
from tensorflow.python.framework import ops
from tensorflow.python.ops import io_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import checkpoint_management
from tensorflow.python.training import py_checkpoint_reader
from tensorflow.python.training.saving import saveable_object_util
from tensorflow.python.util.tf_export import tf_export
__all__ = [
"load_checkpoint", "load_variable", "list_variables",
"checkpoints_iterator", "init_from_checkpoint"
]
@tf_export("train.load_checkpoint")
def load_checkpoint(ckpt_dir_or_file):
"""Returns `CheckpointReader` for checkpoint found in `ckpt_dir_or_file`.
If `ckpt_dir_or_file` resolves to a directory with multiple checkpoints,
reader for the latest checkpoint is returned.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint
file.
Returns:
`CheckpointReader` object.
Raises:
ValueError: If `ckpt_dir_or_file` resolves to a directory with no
checkpoints.
"""
filename = _get_checkpoint_filename(ckpt_dir_or_file)
if filename is None:
raise ValueError("Couldn't find 'checkpoint' file or checkpoints in "
"given directory %s" % ckpt_dir_or_file)
return py_checkpoint_reader.NewCheckpointReader(filename)
@tf_export("train.load_variable")
def load_variable(ckpt_dir_or_file, name):
"""Returns the tensor value of the given variable in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
name: Name of the variable to return.
Returns:
A numpy `ndarray` with a copy of the value of this variable.
"""
# TODO(b/29227106): Fix this in the right place and remove this.
if name.endswith(":0"):
name = name[:-2]
reader = load_checkpoint(ckpt_dir_or_file)
return reader.get_tensor(name)
@tf_export("train.list_variables")
def list_variables(ckpt_dir_or_file):
"""Returns list of all variables in the checkpoint.
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
Returns:
List of tuples `(name, shape)`.
"""
reader = load_checkpoint(ckpt_dir_or_file)
variable_map = reader.get_variable_to_shape_map()
names = sorted(variable_map.keys())
result = []
for name in names:
result.append((name, variable_map[name]))
return result
def wait_for_new_checkpoint(checkpoint_dir,
last_checkpoint=None,
seconds_to_sleep=1,
timeout=None):
"""Waits until a new checkpoint file is found.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
last_checkpoint: The last checkpoint path used or `None` if we're expecting
a checkpoint for the first time.
seconds_to_sleep: The number of seconds to sleep for before looking for a
new checkpoint.
timeout: The maximum number of seconds to wait. If left as `None`, then the
process will wait indefinitely.
Returns:
a new checkpoint path, or None if the timeout was reached.
"""
logging.info("Waiting for new checkpoint at %s", checkpoint_dir)
stop_time = time.time() + timeout if timeout is not None else None
while True:
checkpoint_path = checkpoint_management.latest_checkpoint(checkpoint_dir)
if checkpoint_path is None or checkpoint_path == last_checkpoint:
if stop_time is not None and time.time() + seconds_to_sleep > stop_time:
return None
time.sleep(seconds_to_sleep)
else:
logging.info("Found new checkpoint at %s", checkpoint_path)
return checkpoint_path
@tf_export("train.checkpoints_iterator")
def checkpoints_iterator(checkpoint_dir,
min_interval_secs=0,
timeout=None,
timeout_fn=None):
"""Continuously yield new checkpoint files as they appear.
The iterator only checks for new checkpoints when control flow has been
reverted to it. This means it can miss checkpoints if your code takes longer
to run between iterations than `min_interval_secs` or the interval at which
new checkpoints are written.
The `timeout` argument is the maximum number of seconds to block waiting for
a new checkpoint. It is used in combination with the `timeout_fn` as
follows:
* If the timeout expires and no `timeout_fn` was specified, the iterator
stops yielding.
* If a `timeout_fn` was specified, that function is called and if it returns
a true boolean value the iterator stops yielding.
* If the function returns a false boolean value then the iterator resumes the
wait for new checkpoints. At this point the timeout logic applies again.
This behavior gives control to callers on what to do if checkpoints do not
come fast enough or stop being generated. For example, if callers have a way
to detect that the training has stopped and know that no new checkpoints
will be generated, they can provide a `timeout_fn` that returns `True` when
the training has stopped. If they know that the training is still going on
they return `False` instead.
Args:
checkpoint_dir: The directory in which checkpoints are saved.
min_interval_secs: The minimum number of seconds between yielding
checkpoints.
timeout: The maximum number of seconds to wait between checkpoints. If left
as `None`, then the process will wait indefinitely.
timeout_fn: Optional function to call after a timeout. If the function
returns True, then it means that no new checkpoints will be generated and
the iterator will exit. The function is called with no arguments.
Yields:
String paths to latest checkpoint files as they arrive.
"""
checkpoint_path = None
while True:
new_checkpoint_path = wait_for_new_checkpoint(
checkpoint_dir, checkpoint_path, timeout=timeout)
if new_checkpoint_path is None:
if not timeout_fn:
# timed out
logging.info("Timed-out waiting for a checkpoint.")
return
if timeout_fn():
# The timeout_fn indicated that we are truly done.
return
else:
# The timeout_fn indicated that more checkpoints may come.
continue
start = time.time()
checkpoint_path = new_checkpoint_path
yield checkpoint_path
time_to_next_eval = start + min_interval_secs - time.time()
if time_to_next_eval > 0:
time.sleep(time_to_next_eval)
@tf_export(v1=["train.init_from_checkpoint"])
def init_from_checkpoint(ckpt_dir_or_file, assignment_map):
"""Replaces `tf.Variable` initializers so they load from a checkpoint file.
Values are not loaded immediately, but when the initializer is run
(typically by running a `tf.compat.v1.global_variables_initializer` op).
Note: This overrides default initialization ops of specified variables and
redefines dtype.
Assignment map supports following syntax:
* `'checkpoint_scope_name/': 'scope_name/'` - will load all variables in
current `scope_name` from `checkpoint_scope_name` with matching tensor
names.
* `'checkpoint_scope_name/some_other_variable': 'scope_name/variable_name'` -
will initialize `scope_name/variable_name` variable
from `checkpoint_scope_name/some_other_variable`.
* `'scope_variable_name': variable` - will initialize given `tf.Variable`
object with tensor 'scope_variable_name' from the checkpoint.
* `'scope_variable_name': list(variable)` - will initialize list of
partitioned variables with tensor 'scope_variable_name' from the checkpoint.
* `'/': 'scope_name/'` - will load all variables in current `scope_name` from
checkpoint's root (e.g. no scope).
Supports loading into partitioned variables, which are represented as
`'<variable>/part_<part #>'`.
Example:
```python
# Say, '/tmp/model.ckpt' has the following tensors:
# -- name='old_scope_1/var1', shape=[20, 2]
# -- name='old_scope_1/var2', shape=[50, 4]
# -- name='old_scope_2/var3', shape=[100, 100]
# Create new model's variables
with tf.compat.v1.variable_scope('new_scope_1'):
var1 = tf.compat.v1.get_variable('var1', shape=[20, 2],
initializer=tf.compat.v1.zeros_initializer())
with tf.compat.v1.variable_scope('new_scope_2'):
var2 = tf.compat.v1.get_variable('var2', shape=[50, 4],
initializer=tf.compat.v1.zeros_initializer())
# Partition into 5 variables along the first axis.
var3 = tf.compat.v1.get_variable(name='var3', shape=[100, 100],
initializer=tf.compat.v1.zeros_initializer(),
partitioner=lambda shape, dtype: [5, 1])
# Initialize all variables in `new_scope_1` from `old_scope_1`.
init_from_checkpoint('/tmp/model.ckpt', {'old_scope_1/': 'new_scope_1'})
# Use names to specify which variables to initialize from checkpoint.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_1/var1': 'new_scope_1/var1',
'old_scope_1/var2': 'new_scope_2/var2'})
# Or use tf.Variable objects to identify what to initialize.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_1/var1': var1,
'old_scope_1/var2': var2})
# Initialize partitioned variables using variable's name
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_2/var3': 'new_scope_2/var3'})
# Or specify the list of tf.Variable objects.
init_from_checkpoint('/tmp/model.ckpt',
{'old_scope_2/var3': var3._get_variable_list()})
```
Args:
ckpt_dir_or_file: Directory with checkpoints file or path to checkpoint.
assignment_map: Dict, where keys are names of the variables in the
checkpoint and values are current variables or names of current variables
(in default graph).
Raises:
ValueError: If missing variables in current graph, or if missing
checkpoints or tensors in checkpoints.
"""
init_from_checkpoint_fn = lambda _: _init_from_checkpoint(
ckpt_dir_or_file, assignment_map)
if distribution_strategy_context.get_cross_replica_context():
init_from_checkpoint_fn(None)
else:
distribution_strategy_context.get_replica_context().merge_call(
init_from_checkpoint_fn)
def _init_from_checkpoint(ckpt_dir_or_file, assignment_map):
"""See `init_from_checkpoint` for documentation."""
ckpt_file = _get_checkpoint_filename(ckpt_dir_or_file)
reader = load_checkpoint(ckpt_dir_or_file)
variable_map = reader.get_variable_to_shape_map()
for tensor_name_in_ckpt, current_var_or_name in sorted(
six.iteritems(assignment_map)):
var = None
# Check if this is Variable object or list of Variable objects (in case of
# partitioned variables).
if _is_variable(current_var_or_name) or (
isinstance(current_var_or_name, list)
and all(_is_variable(v) for v in current_var_or_name)):
var = current_var_or_name
else:
store_vars = vs._get_default_variable_store()._vars # pylint:disable=protected-access
# Check if this variable is in var_store.
var = store_vars.get(current_var_or_name, None)
# Also check if variable is partitioned as list.
if var is None:
var = _collect_partitioned_variable(current_var_or_name, store_vars)
if var is not None:
# If 1 to 1 mapping was provided, find variable in the checkpoint.
if tensor_name_in_ckpt not in variable_map:
raise ValueError("Tensor %s is not found in %s checkpoint %s" % (
tensor_name_in_ckpt, ckpt_dir_or_file, variable_map
))
if _is_variable(var):
# Additional at-call-time checks.
if not var.get_shape().is_compatible_with(
variable_map[tensor_name_in_ckpt]):
raise ValueError(
"Shape of variable %s (%s) doesn't match with shape of "
"tensor %s (%s) from checkpoint reader." % (
var.name, str(var.get_shape()),
tensor_name_in_ckpt, str(variable_map[tensor_name_in_ckpt])
))
var_name = var.name
else:
var_name = ",".join([v.name for v in var])
_set_variable_or_list_initializer(var, ckpt_file, tensor_name_in_ckpt)
logging.debug("Initialize variable %s from checkpoint %s with %s",
var_name, ckpt_dir_or_file, tensor_name_in_ckpt)
else:
scopes = ""
# TODO(vihanjain): Support list of 'current_var_or_name' here.
if "/" in current_var_or_name:
scopes = current_var_or_name[:current_var_or_name.rindex("/")]
if not tensor_name_in_ckpt.endswith("/"):
raise ValueError(
"Assignment map with scope only name {} should map to scope only "
"{}. Should be 'scope/': 'other_scope/'.".format(
scopes, tensor_name_in_ckpt))
# If scope to scope mapping was provided, find all variables in the scope
# and create variable to variable mapping.
scope_variables = set()
for var_name in store_vars:
if not scopes or var_name.startswith(scopes + "/"):
# Consume /part_ if partitioned variable.
if "/part_" in var_name:
var_name = var_name[:var_name.index("/part_")]
scope_variables.add(var_name)
for var_name in sorted(scope_variables):
# Lookup name with specified prefix and suffix from current variable.
# If tensor_name given is '/' (root), don't use it for full name.
full_tensor_name = var_name[len(scopes):]
if current_var_or_name != "/":
full_tensor_name = full_tensor_name[1:]
if tensor_name_in_ckpt != "/":
full_tensor_name = tensor_name_in_ckpt + full_tensor_name
# Remove trailing '/', if any, in the full_tensor_name
if full_tensor_name.endswith("/"):
full_tensor_name = full_tensor_name[:-1]
if full_tensor_name not in variable_map:
raise ValueError(
"Tensor %s (%s in %s) is not found in %s checkpoint" % (
full_tensor_name, var_name[len(scopes) + 1:],
tensor_name_in_ckpt, ckpt_dir_or_file
))
var = store_vars.get(var_name, None)
if var is None:
var = _collect_partitioned_variable(var_name, store_vars)
_set_variable_or_list_initializer(var, ckpt_file, full_tensor_name)
logging.debug("Initialize variable %s from checkpoint %s with %s",
var_name, ckpt_dir_or_file, full_tensor_name)
def _get_checkpoint_filename(ckpt_dir_or_file):
"""Returns checkpoint filename given directory or specific checkpoint file."""
if gfile.IsDirectory(ckpt_dir_or_file):
return checkpoint_management.latest_checkpoint(ckpt_dir_or_file)
return ckpt_dir_or_file
def _set_checkpoint_initializer(variable,
ckpt_file,
tensor_name,
slice_spec,
name="checkpoint_initializer"):
"""Overrides given variable's initialization op.
Sets variable initializer to assign op that initializes variable from tensor's
value in the checkpoint.
Args:
variable: `tf.Variable` object.
ckpt_file: string, full path of the checkpoint.
tensor_name: Name of the tensor to load from the checkpoint.
slice_spec: Slice specification for loading partitioned tensors.
name: Name of the operation.
"""
base_type = variable.dtype.base_dtype
# Do not colocate with variable since RestoreV2 op only runs on CPU and
# colocation will force variable (and other ops that colocate with variable)
# to be on CPU as well. It is okay to place the variable's initializer op on
# CPU since it will only be run once at the start.
with ops.device(variable.device), ops.device("/cpu:0"):
restore_op = io_ops.restore_v2(
ckpt_file, [tensor_name], [slice_spec], [base_type], name=name)[0]
names_to_saveables = saveable_object_util.op_list_to_dict([variable])
saveable_objects = []
for name, op in names_to_saveables.items():
for s in saveable_object_util.saveable_objects_for_op(op, name):
saveable_objects.append(s)
assert len(saveable_objects) == 1 # Should be only one variable.
init_op = saveable_objects[0].restore([restore_op], restored_shapes=None)
# pylint:disable=protected-access
variable._initializer_op = init_op
restore_op.set_shape(variable.shape)
variable._initial_value = restore_op
# pylint:enable=protected-access
def _set_variable_or_list_initializer(variable_or_list, ckpt_file,
tensor_name):
"""Overrides initialization op of given variable or list of variables.
Calls `_set_checkpoint_initializer` for each variable in the given list of
variables.
Args:
variable_or_list: `tf.Variable` object or a list of `tf.Variable` objects.
ckpt_file: string, full path of the checkpoint.
tensor_name: Name of the tensor to load from the checkpoint.
Raises:
ValueError: if all objects in `variable_or_list` are not partitions of the
same large variable.
"""
if isinstance(variable_or_list, (list, tuple)):
# A set of slices.
slice_name = None
for v in variable_or_list:
slice_info = v._save_slice_info # pylint:disable=protected-access
if slice_name is None:
slice_name = slice_info.full_name
elif slice_name != slice_info.full_name:
raise ValueError("Slices must all be from the same tensor: %s != %s" %
(slice_name, slice_info.full_name))
_set_checkpoint_initializer(v, ckpt_file, tensor_name, slice_info.spec)
else:
_set_checkpoint_initializer(variable_or_list, ckpt_file, tensor_name, "")
def _is_variable(x):
return (isinstance(x, variables.Variable) or
resource_variable_ops.is_resource_variable(x))
def _collect_partitioned_variable(name, all_vars):
"""Returns list of `tf.Variable` that comprise the partitioned variable."""
if name + "/part_0" in all_vars:
var = []
i = 0
while name + "/part_%d" % i in all_vars:
var.append(all_vars[name + "/part_%d" % i])
i += 1
return var
return None
| Java |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
using System.Reflection;
using System.Runtime.InteropServices;
[assembly: AssemblyCompany("The Apache Software Foundation.")]
[assembly: AssemblyTrademark("The Apache Software Foundation")]
[assembly: AssemblyCopyright("Copyright © 2017 The Apache Software Foundation")]
[assembly: AssemblyCulture("")]
[assembly: AssemblyConfiguration("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
[assembly: AssemblyVersion("0.16.0.0")]
[assembly: AssemblyFileVersion("0.16.0.0")]
| Java |
// Copyright 2012 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef V8_DATE_H_
#define V8_DATE_H_
#include "src/base/timezone-cache.h"
#include "src/globals.h"
#include "src/objects/smi.h"
namespace v8 {
namespace internal {
class DateCache {
public:
static const int kMsPerMin = 60 * 1000;
static const int kSecPerDay = 24 * 60 * 60;
static const int64_t kMsPerDay = kSecPerDay * 1000;
static const int64_t kMsPerMonth = kMsPerDay * 30;
// The largest time that can be passed to OS date-time library functions.
static const int kMaxEpochTimeInSec = kMaxInt;
static const int64_t kMaxEpochTimeInMs =
static_cast<int64_t>(kMaxInt) * 1000;
// The largest time that can be stored in JSDate.
static const int64_t kMaxTimeInMs =
static_cast<int64_t>(864000000) * 10000000;
// Conservative upper bound on time that can be stored in JSDate
// before UTC conversion.
static const int64_t kMaxTimeBeforeUTCInMs = kMaxTimeInMs + kMsPerMonth;
// Sentinel that denotes an invalid local offset.
static const int kInvalidLocalOffsetInMs = kMaxInt;
// Sentinel that denotes an invalid cache stamp.
// It is an invariant of DateCache that cache stamp is non-negative.
static const int kInvalidStamp = -1;
DateCache();
virtual ~DateCache() {
delete tz_cache_;
tz_cache_ = nullptr;
}
// Clears cached timezone information and increments the cache stamp.
void ResetDateCache(
base::TimezoneCache::TimeZoneDetection time_zone_detection);
// Computes floor(time_ms / kMsPerDay).
static int DaysFromTime(int64_t time_ms) {
if (time_ms < 0) time_ms -= (kMsPerDay - 1);
return static_cast<int>(time_ms / kMsPerDay);
}
// Computes modulo(time_ms, kMsPerDay) given that
// days = floor(time_ms / kMsPerDay).
static int TimeInDay(int64_t time_ms, int days) {
return static_cast<int>(time_ms - days * kMsPerDay);
}
// ECMA 262 - ES#sec-timeclip TimeClip (time)
static double TimeClip(double time);
// Given the number of days since the epoch, computes the weekday.
// ECMA 262 - 15.9.1.6.
int Weekday(int days) {
int result = (days + 4) % 7;
return result >= 0 ? result : result + 7;
}
bool IsLeap(int year) {
return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
}
// ECMA 262 - ES#sec-local-time-zone-adjustment
int LocalOffsetInMs(int64_t time, bool is_utc) {
return GetLocalOffsetFromOS(time, is_utc);
}
const char* LocalTimezone(int64_t time_ms) {
if (time_ms < 0 || time_ms > kMaxEpochTimeInMs) {
time_ms = EquivalentTime(time_ms);
}
bool is_dst = DaylightSavingsOffsetInMs(time_ms) != 0;
const char** name = is_dst ? &dst_tz_name_ : &tz_name_;
if (*name == nullptr) {
*name = tz_cache_->LocalTimezone(static_cast<double>(time_ms));
}
return *name;
}
// ECMA 262 - 15.9.5.26
int TimezoneOffset(int64_t time_ms) {
int64_t local_ms = ToLocal(time_ms);
return static_cast<int>((time_ms - local_ms) / kMsPerMin);
}
// ECMA 262 - ES#sec-localtime-t
// LocalTime(t) = t + LocalTZA(t, true)
int64_t ToLocal(int64_t time_ms) {
return time_ms + LocalOffsetInMs(time_ms, true);
}
// ECMA 262 - ES#sec-utc-t
// UTC(t) = t - LocalTZA(t, false)
int64_t ToUTC(int64_t time_ms) {
return time_ms - LocalOffsetInMs(time_ms, false);
}
// Computes a time equivalent to the given time according
// to ECMA 262 - 15.9.1.9.
// The issue here is that some library calls don't work right for dates
// that cannot be represented using a non-negative signed 32 bit integer
// (measured in whole seconds based on the 1970 epoch).
// We solve this by mapping the time to a year with same leap-year-ness
// and same starting day for the year. The ECMAscript specification says
// we must do this, but for compatibility with other browsers, we use
// the actual year if it is in the range 1970..2037
int64_t EquivalentTime(int64_t time_ms) {
int days = DaysFromTime(time_ms);
int time_within_day_ms = static_cast<int>(time_ms - days * kMsPerDay);
int year, month, day;
YearMonthDayFromDays(days, &year, &month, &day);
int new_days = DaysFromYearMonth(EquivalentYear(year), month) + day - 1;
return static_cast<int64_t>(new_days) * kMsPerDay + time_within_day_ms;
}
// Returns an equivalent year in the range [2008-2035] matching
// - leap year,
// - week day of first day.
// ECMA 262 - 15.9.1.9.
int EquivalentYear(int year) {
int week_day = Weekday(DaysFromYearMonth(year, 0));
int recent_year = (IsLeap(year) ? 1956 : 1967) + (week_day * 12) % 28;
// Find the year in the range 2008..2037 that is equivalent mod 28.
// Add 3*28 to give a positive argument to the modulus operator.
return 2008 + (recent_year + 3 * 28 - 2008) % 28;
}
// Given the number of days since the epoch, computes
// the corresponding year, month, and day.
void YearMonthDayFromDays(int days, int* year, int* month, int* day);
// Computes the number of days since the epoch for
// the first day of the given month in the given year.
int DaysFromYearMonth(int year, int month);
// Breaks down the time value.
void BreakDownTime(int64_t time_ms, int* year, int* month, int* day,
int* weekday, int* hour, int* min, int* sec, int* ms);
// Cache stamp is used for invalidating caches in JSDate.
// We increment the stamp each time when the timezone information changes.
// JSDate objects perform stamp check and invalidate their caches if
// their saved stamp is not equal to the current stamp.
Smi stamp() { return stamp_; }
void* stamp_address() { return &stamp_; }
// These functions are virtual so that we can override them when testing.
virtual int GetDaylightSavingsOffsetFromOS(int64_t time_sec) {
double time_ms = static_cast<double>(time_sec * 1000);
return static_cast<int>(tz_cache_->DaylightSavingsOffset(time_ms));
}
virtual int GetLocalOffsetFromOS(int64_t time_ms, bool is_utc);
private:
// The implementation relies on the fact that no time zones have
// more than one daylight savings offset change per 19 days.
// In Egypt in 2010 they decided to suspend DST during Ramadan. This
// led to a short interval where DST is in effect from September 10 to
// September 30.
static const int kDefaultDSTDeltaInSec = 19 * kSecPerDay;
// Size of the Daylight Savings Time cache.
static const int kDSTSize = 32;
// Daylight Savings Time segment stores a segment of time where
// daylight savings offset does not change.
struct DST {
int start_sec;
int end_sec;
int offset_ms;
int last_used;
};
// Computes the daylight savings offset for the given time.
// ECMA 262 - 15.9.1.8
int DaylightSavingsOffsetInMs(int64_t time_ms);
// Sets the before_ and the after_ segments from the DST cache such that
// the before_ segment starts earlier than the given time and
// the after_ segment start later than the given time.
// Both segments might be invalid.
// The last_used counters of the before_ and after_ are updated.
void ProbeDST(int time_sec);
// Finds the least recently used segment from the DST cache that is not
// equal to the given 'skip' segment.
DST* LeastRecentlyUsedDST(DST* skip);
// Extends the after_ segment with the given point or resets it
// if it starts later than the given time + kDefaultDSTDeltaInSec.
inline void ExtendTheAfterSegment(int time_sec, int offset_ms);
// Makes the given segment invalid.
inline void ClearSegment(DST* segment);
bool InvalidSegment(DST* segment) {
return segment->start_sec > segment->end_sec;
}
Smi stamp_;
// Daylight Saving Time cache.
DST dst_[kDSTSize];
int dst_usage_counter_;
DST* before_;
DST* after_;
int local_offset_ms_;
// Year/Month/Day cache.
bool ymd_valid_;
int ymd_days_;
int ymd_year_;
int ymd_month_;
int ymd_day_;
// Timezone name cache
const char* tz_name_;
const char* dst_tz_name_;
base::TimezoneCache* tz_cache_;
};
} // namespace internal
} // namespace v8
#endif // V8_DATE_H_
| Java |
//
// SZUserSettingsViewControllerIOS6.h
// Socialize
//
// Created by David Jedeikin on 1/6/14.
// Copyright (c) 2014 ShareThis. All rights reserved.
//
#import <Socialize/Socialize.h>
@interface SZUserSettingsViewControllerIOS6 : SZUserSettingsViewController
@end
| Java |
/* Copyright 2019 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
// This file implements logic for lowering MHLO dialect to Standard dialect.
#include "llvm/ADT/StringSwitch.h"
#include "mlir-hlo/Dialect/mhlo/IR/hlo_ops.h"
#include "mlir-hlo/Dialect/mhlo/transforms/passes.h"
#include "mlir-hlo/Dialect/mhlo/transforms/rewriters.h"
#include "mlir/Dialect/StandardOps/IR/Ops.h"
#include "mlir/IR/Function.h"
#include "mlir/IR/PatternMatch.h"
#include "mlir/Pass/Pass.h"
namespace mlir {
namespace {
#include "generated_legalize_to_standard.inc"
} // end anonymous namespace
namespace mhlo {
namespace {
class CompareIConvert : public OpRewritePattern<mhlo::CompareOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::CompareOp op,
PatternRewriter &rewriter) const override {
auto lhs = op.lhs();
auto rhs = op.rhs();
auto lhs_type = lhs.getType().cast<TensorType>();
auto rhs_type = rhs.getType().cast<TensorType>();
// Broadcasting not supported by this rewrite.
if (lhs_type.getShape() != rhs_type.getShape()) return failure();
if (!lhs_type.getElementType().isSignlessInteger() ||
!rhs_type.getElementType().isSignlessInteger())
return failure();
auto comparison_direction = op.comparison_direction();
auto compare_predicate =
llvm::StringSwitch<Optional<CmpIPredicate>>(comparison_direction)
.Case("EQ", CmpIPredicate::eq)
.Case("NE", CmpIPredicate::ne)
.Case("LT", CmpIPredicate::slt)
.Case("LE", CmpIPredicate::sle)
.Case("GT", CmpIPredicate::sgt)
.Case("GE", CmpIPredicate::sge)
.Default(llvm::None);
if (!compare_predicate.hasValue()) return failure();
rewriter.replaceOpWithNewOp<CmpIOp>(op, compare_predicate.getValue(), lhs,
rhs);
return success();
}
};
class CompareFConvert : public OpRewritePattern<mhlo::CompareOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::CompareOp op,
PatternRewriter &rewriter) const override {
auto lhs = op.lhs();
auto rhs = op.rhs();
auto lhs_type = lhs.getType().cast<TensorType>();
auto rhs_type = rhs.getType().cast<TensorType>();
// Broadcasting not supported by this rewrite.
if (lhs_type.getShape() != rhs_type.getShape()) return failure();
if (!lhs_type.getElementType().isa<FloatType>() ||
!rhs_type.getElementType().isa<FloatType>())
return failure();
auto comparison_direction = op.comparison_direction();
auto compare_predicate =
llvm::StringSwitch<Optional<CmpFPredicate>>(comparison_direction)
.Case("EQ", CmpFPredicate::OEQ)
.Case("NE", CmpFPredicate::UNE)
.Case("LT", CmpFPredicate::OLT)
.Case("LE", CmpFPredicate::OLE)
.Case("GT", CmpFPredicate::OGT)
.Case("GE", CmpFPredicate::OGE)
.Default(llvm::None);
if (!compare_predicate.hasValue()) return failure();
rewriter.replaceOpWithNewOp<CmpFOp>(op, compare_predicate.getValue(), lhs,
rhs);
return success();
}
};
// Replace IotaOp with an integer constant. A ConvertOp is added to
// convert the integer constant to iota result type. For complex types, the real
// part is replaced with the generated constant and the imaginary part is
// replaced with zero tensor.
class ConvertIotaOp : public OpRewritePattern<mhlo::IotaOp> {
public:
using OpRewritePattern::OpRewritePattern;
LogicalResult matchAndRewrite(mhlo::IotaOp op,
PatternRewriter &rewriter) const override {
auto output_type = op.getType().cast<ShapedType>();
auto output_size = output_type.getNumElements();
auto dimension = op.iota_dimension();
auto max_dim_size = output_type.getDimSize(dimension);
auto element_type = output_type.getElementType();
int bitwidth;
auto complex_ty = element_type.dyn_cast<ComplexType>();
Type int_or_float_ty = element_type;
if (complex_ty) int_or_float_ty = complex_ty.getElementType();
bitwidth = int_or_float_ty.getIntOrFloatBitWidth();
llvm::SmallVector<APInt, 10> values;
values.reserve(output_size);
int64_t increase_stride = output_size;
for (int i = 0; i <= dimension; i++) {
increase_stride /= output_type.getDimSize(i);
}
int64_t current_value = 0;
for (int i = 0; i < output_size; i++) {
int64_t value = (current_value / increase_stride) % max_dim_size;
values.push_back(APInt(bitwidth, value));
++current_value;
}
auto int_shape_type = RankedTensorType::get(
output_type.getShape(),
IntegerType::get(bitwidth, rewriter.getContext()));
auto loc = op.getLoc();
auto integer_const = rewriter.create<mlir::ConstantOp>(
loc, DenseIntElementsAttr::get(int_shape_type, values));
auto int_or_float_shape_ty =
RankedTensorType::get(output_type.getShape(), int_or_float_ty);
auto iota_const =
rewriter.create<ConvertOp>(loc, int_or_float_shape_ty, integer_const);
// For int/float types we are done, replace op and return.
if (!complex_ty) {
rewriter.replaceOp(op, iota_const.getResult());
return success();
}
// For complex types, generate a constant tensor of zeroes for the imaginary
// part and use iota_const for real part.
auto zeroes = rewriter.create<mlir::ConstantOp>(
loc, DenseIntElementsAttr::get(int_shape_type, APInt(bitwidth, 0)));
auto imag_zeroes =
rewriter.create<ConvertOp>(loc, int_or_float_shape_ty, zeroes);
rewriter.replaceOpWithNewOp<mhlo::ComplexOp>(op, iota_const, imag_zeroes);
return success();
}
};
} // end anonymous namespace
namespace {
struct LegalizeToStandardPass
: public PassWrapper<LegalizeToStandardPass, FunctionPass> {
void getDependentDialects(DialectRegistry ®istry) const override {
registry.insert<StandardOpsDialect>();
}
/// Perform the lowering to Standard dialect.
void runOnFunction() override;
};
} // end anonymous namespace
std::unique_ptr<mlir::OperationPass<mlir::FuncOp>> createLegalizeToStdPass() {
return std::make_unique<LegalizeToStandardPass>();
}
void PopulateMhloToStdPatterns(OwningRewritePatternList *patterns,
mlir::MLIRContext *ctx) {
mlir::populateWithGenerated(ctx, patterns);
patterns->insert<CompareFConvert, CompareIConvert, ConvertIotaOp>(ctx);
}
/// Perform the lowering to standard dialect.
void LegalizeToStandardPass::runOnFunction() {
OwningRewritePatternList patterns;
mlir::mhlo::PopulateMhloToStdPatterns(&patterns, &getContext());
applyPatternsAndFoldGreedily(getFunction(), patterns);
}
} // end namespace mhlo
} // end namespace mlir
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.uima.annotator.regex;
import java.util.regex.Pattern;
/**
* RegexVariables interface.
*/
public interface RegexVariables {
public static final String VARIABLE_START = "\\v";
public static final String VARIABLE_REGEX_BEGIN = "\\\\v\\{";
public static final String VARIABLE_REGEX_END = "\\}";
public static final Pattern VARIABLE_REGEX_PATTERN = Pattern
.compile(VARIABLE_REGEX_BEGIN + "(\\w+)" + VARIABLE_REGEX_END);
/**
* Adds a variable to the Variables object.
*
* @param varName
* variable name
*
* @param varValue
* variable value
*/
public void addVariable(String varName, String varValue);
/**
* returns the value of the specified variable or <code>null</code> if the
* variable does not exist
*
* @param varName
* variable name
*
* @return returns the variable value of <code>null</code> if the variable
* does not exist
*
*/
public String getVariableValue(String varName);
} | Java |
/*
* The Alluxio Open Foundation licenses this work under the Apache License, version 2.0
* (the "License"). You may not use this work except in compliance with the License, which is
* available at www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied, as more fully set forth in the License.
*
* See the NOTICE file distributed with this work for information regarding copyright ownership.
*/
package alluxio.client.block.stream;
import alluxio.conf.AlluxioConfiguration;
import alluxio.conf.PropertyKey;
import alluxio.exception.status.AlluxioStatusException;
import alluxio.exception.status.UnauthenticatedException;
import alluxio.grpc.BlockWorkerGrpc;
import alluxio.grpc.CacheRequest;
import alluxio.grpc.ClearMetricsRequest;
import alluxio.grpc.ClearMetricsResponse;
import alluxio.grpc.CreateLocalBlockRequest;
import alluxio.grpc.CreateLocalBlockResponse;
import alluxio.grpc.DataMessageMarshaller;
import alluxio.grpc.DataMessageMarshallerProvider;
import alluxio.grpc.GrpcChannel;
import alluxio.grpc.GrpcChannelBuilder;
import alluxio.grpc.GrpcNetworkGroup;
import alluxio.grpc.GrpcSerializationUtils;
import alluxio.grpc.GrpcServerAddress;
import alluxio.grpc.MoveBlockRequest;
import alluxio.grpc.MoveBlockResponse;
import alluxio.grpc.OpenLocalBlockRequest;
import alluxio.grpc.OpenLocalBlockResponse;
import alluxio.grpc.ReadRequest;
import alluxio.grpc.ReadResponse;
import alluxio.grpc.RemoveBlockRequest;
import alluxio.grpc.RemoveBlockResponse;
import alluxio.grpc.WriteRequest;
import alluxio.grpc.WriteResponse;
import alluxio.resource.AlluxioResourceLeakDetectorFactory;
import alluxio.retry.RetryPolicy;
import alluxio.retry.RetryUtils;
import alluxio.security.user.UserState;
import com.google.common.base.Preconditions;
import com.google.common.io.Closer;
import io.grpc.StatusRuntimeException;
import io.grpc.stub.StreamObserver;
import io.netty.util.ResourceLeakDetector;
import io.netty.util.ResourceLeakTracker;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.concurrent.TimeUnit;
import javax.annotation.Nullable;
/**
* Default implementation of {@link BlockWorkerClient}.
*/
public class DefaultBlockWorkerClient implements BlockWorkerClient {
private static final Logger LOG =
LoggerFactory.getLogger(DefaultBlockWorkerClient.class.getName());
private static final ResourceLeakDetector<DefaultBlockWorkerClient> DETECTOR =
AlluxioResourceLeakDetectorFactory.instance()
.newResourceLeakDetector(DefaultBlockWorkerClient.class);
private GrpcChannel mStreamingChannel;
private GrpcChannel mRpcChannel;
private GrpcServerAddress mAddress;
private final long mRpcTimeoutMs;
private BlockWorkerGrpc.BlockWorkerStub mStreamingAsyncStub;
private BlockWorkerGrpc.BlockWorkerBlockingStub mRpcBlockingStub;
private BlockWorkerGrpc.BlockWorkerStub mRpcAsyncStub;
@Nullable
private final ResourceLeakTracker<DefaultBlockWorkerClient> mTracker;
/**
* Creates a client instance for communicating with block worker.
*
* @param userState the user state
* @param address the address of the worker
* @param alluxioConf Alluxio configuration
*/
public DefaultBlockWorkerClient(UserState userState, GrpcServerAddress address,
AlluxioConfiguration alluxioConf) throws IOException {
RetryPolicy retryPolicy = RetryUtils.defaultClientRetry(
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_MAX_DURATION),
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_BASE_SLEEP_MS),
alluxioConf.getDuration(PropertyKey.USER_RPC_RETRY_MAX_SLEEP_MS));
UnauthenticatedException lastException = null;
// TODO(feng): unify worker client with AbstractClient
while (retryPolicy.attempt()) {
try {
// Disables channel pooling for data streaming to achieve better throughput.
// Channel is still reused due to client pooling.
mStreamingChannel = GrpcChannelBuilder.newBuilder(address, alluxioConf)
.setSubject(userState.getSubject())
.setNetworkGroup(GrpcNetworkGroup.STREAMING)
.setClientType("DefaultBlockWorkerClient-Stream")
.build();
mStreamingChannel.intercept(new StreamSerializationClientInterceptor());
// Uses default pooling strategy for RPC calls for better scalability.
mRpcChannel = GrpcChannelBuilder.newBuilder(address, alluxioConf)
.setSubject(userState.getSubject())
.setNetworkGroup(GrpcNetworkGroup.RPC)
.setClientType("DefaultBlockWorkerClient-Rpc")
.build();
lastException = null;
break;
} catch (StatusRuntimeException e) {
close();
throw AlluxioStatusException.fromStatusRuntimeException(e);
} catch (UnauthenticatedException e) {
close();
userState.relogin();
lastException = e;
}
}
if (lastException != null) {
throw lastException;
}
mStreamingAsyncStub = BlockWorkerGrpc.newStub(mStreamingChannel);
mRpcBlockingStub = BlockWorkerGrpc.newBlockingStub(mRpcChannel);
mRpcAsyncStub = BlockWorkerGrpc.newStub(mRpcChannel);
mAddress = address;
mRpcTimeoutMs = alluxioConf.getMs(PropertyKey.USER_RPC_RETRY_MAX_DURATION);
mTracker = DETECTOR.track(this);
}
@Override
public boolean isShutdown() {
return mStreamingChannel.isShutdown() || mRpcChannel.isShutdown();
}
@Override
public boolean isHealthy() {
return !isShutdown() && mStreamingChannel.isHealthy() && mRpcChannel.isHealthy();
}
@Override
public void close() throws IOException {
try (Closer closer = Closer.create()) {
closer.register(() -> {
if (mStreamingChannel != null) {
mStreamingChannel.shutdown();
}
});
closer.register(() -> {
if (mRpcChannel != null) {
mRpcChannel.shutdown();
}
});
closer.register(() -> {
if (mTracker != null) {
mTracker.close(this);
}
});
}
}
@Override
public StreamObserver<WriteRequest> writeBlock(StreamObserver<WriteResponse> responseObserver) {
if (responseObserver instanceof DataMessageMarshallerProvider) {
DataMessageMarshaller<WriteRequest> marshaller =
((DataMessageMarshallerProvider<WriteRequest, WriteResponse>) responseObserver)
.getRequestMarshaller();
Preconditions.checkNotNull(marshaller, "marshaller");
return mStreamingAsyncStub
.withOption(GrpcSerializationUtils.OVERRIDDEN_METHOD_DESCRIPTOR,
BlockWorkerGrpc.getWriteBlockMethod().toBuilder()
.setRequestMarshaller(marshaller)
.build())
.writeBlock(responseObserver);
} else {
return mStreamingAsyncStub.writeBlock(responseObserver);
}
}
@Override
public StreamObserver<ReadRequest> readBlock(StreamObserver<ReadResponse> responseObserver) {
if (responseObserver instanceof DataMessageMarshallerProvider) {
DataMessageMarshaller<ReadResponse> marshaller =
((DataMessageMarshallerProvider<ReadRequest, ReadResponse>) responseObserver)
.getResponseMarshaller();
Preconditions.checkNotNull(marshaller);
return mStreamingAsyncStub
.withOption(GrpcSerializationUtils.OVERRIDDEN_METHOD_DESCRIPTOR,
BlockWorkerGrpc.getReadBlockMethod().toBuilder()
.setResponseMarshaller(marshaller)
.build())
.readBlock(responseObserver);
} else {
return mStreamingAsyncStub.readBlock(responseObserver);
}
}
@Override
public StreamObserver<CreateLocalBlockRequest> createLocalBlock(
StreamObserver<CreateLocalBlockResponse> responseObserver) {
return mStreamingAsyncStub.createLocalBlock(responseObserver);
}
@Override
public StreamObserver<OpenLocalBlockRequest> openLocalBlock(
StreamObserver<OpenLocalBlockResponse> responseObserver) {
return mStreamingAsyncStub.openLocalBlock(responseObserver);
}
@Override
public RemoveBlockResponse removeBlock(final RemoveBlockRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.removeBlock(request);
}
@Override
public MoveBlockResponse moveBlock(MoveBlockRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.moveBlock(request);
}
@Override
public ClearMetricsResponse clearMetrics(ClearMetricsRequest request) {
return mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS)
.clearMetrics(request);
}
@Override
public void cache(CacheRequest request) {
boolean async = request.getAsync();
try {
mRpcBlockingStub.withDeadlineAfter(mRpcTimeoutMs, TimeUnit.MILLISECONDS).cache(request);
} catch (Exception e) {
if (!async) {
throw e;
}
LOG.warn("Error sending async cache request {} to worker {}.", request, mAddress, e);
}
}
}
| Java |
/*
* Copyright (c) 2005-2010 Grameen Foundation USA
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* See also http://www.apache.org/licenses/LICENSE-2.0.html for an
* explanation of the license and how it is applied.
*/
package org.mifos.test.acceptance.framework.loan;
public class CreateLoanAccountSearchParameters {
private String searchString;
private String loanProduct;
public String getSearchString() {
return this.searchString;
}
public void setSearchString(String searchString) {
this.searchString = searchString;
}
public String getLoanProduct() {
return this.loanProduct;
}
public void setLoanProduct(String loanProduct) {
this.loanProduct = loanProduct;
}
}
| Java |
/*
* Copyright 2014 Realm Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.realm.internal.test;
import io.realm.internal.DefineTable;
/**
* A helper class containing model(s) for simple code generation tests.
*/
class CodeGenTest {
@DefineTable // this is enabled only for occasional local tests
class someModel {
String name;
int age;
}
}
| Java |
/*
mustache.js — Logic-less templates in JavaScript
See http://mustache.github.com/ for more info.
*/
var Mustache = function() {
var Renderer = function() {};
Renderer.prototype = {
otag: "{{",
ctag: "}}",
pragmas: {},
buffer: [],
pragmas_implemented: {
"IMPLICIT-ITERATOR": true,
"TRANSLATION-HINT": true
},
context: {},
render: function(template, context, partials, in_recursion) {
// reset buffer & set context
if(!in_recursion) {
this.context = context;
this.buffer = []; // TODO: make this non-lazy
}
// fail fast
if(!this.includes("", template)) {
if(in_recursion) {
return template;
} else {
this.send(template);
return;
}
}
// Branching or moving down the partial stack, save any translation mode info.
if (this.pragmas['TRANSLATION-HINT']) {
context['_mode'] = this.pragmas['TRANSLATION-HINT']['mode'];
}
template = this.render_pragmas(template);
template = this.render_i18n(template, context, partials);
var html = this.render_section(template, context, partials);
if (html === template) {
if (in_recursion) {
return this.render_tags(html, context, partials, true);
}
this.render_tags(html, context, partials, false);
} else {
if(in_recursion) {
return html;
} else {
var lines = html.split("\n");
for (var i = 0; i < lines.length; i++) {
this.send(lines[i]);
}
return;
}
}
},
/*
Sends parsed lines
*/
send: function(line) {
if(line != "") {
this.buffer.push(line);
}
},
/*
Looks for %PRAGMAS
*/
render_pragmas: function(template) {
// no pragmas
if(!this.includes("%", template)) {
return template;
}
var that = this;
var regex = new RegExp(this.otag + "%([\\w-]+) ?([\\w]+=[\\w]+)?" +
this.ctag);
return template.replace(regex, function(match, pragma, options) {
if(!that.pragmas_implemented[pragma]) {
throw({message:
"This implementation of mustache doesn't understand the '" +
pragma + "' pragma"});
}
that.pragmas[pragma] = {};
if(options) {
var opts = options.split("=");
that.pragmas[pragma][opts[0]] = opts[1];
}
return "";
// ignore unknown pragmas silently
});
},
/*
Tries to find a partial in the curent scope and render it
*/
render_partial: function(name, context, partials) {
name = this.trim(name);
if(!partials || partials[name] === undefined) {
throw({message: "unknown_partial '" + name + "'"});
}
if(typeof(context[name]) != "object") {
return this.render(partials[name], context, partials, true);
}
return this.render(partials[name], context[name], partials, true);
},
render_i18n: function(html, context, partials) {
if (html.indexOf(this.otag + "_i") == -1) {
return html;
}
var that = this;
var regex = new RegExp(this.otag + "\\_i" + this.ctag +
"\\s*([\\s\\S]+?)" + this.otag + "\\/i" + this.ctag, "mg");
// for each {{_i}}{{/i}} section do...
return html.replace(regex, function(match, content) {
var translation_mode = undefined;
if (that.pragmas && that.pragmas["TRANSLATION-HINT"] && that.pragmas["TRANSLATION-HINT"]['mode']) {
translation_mode = { _mode: that.pragmas["TRANSLATION-HINT"]['mode'] };
} else if (context['_mode']) {
translation_mode = { _mode: context['_mode'] };
}
return that.render(_(content, translation_mode), context, partials, true);
});
},
/*
Renders inverted (^) and normal (#) sections
*/
render_section: function(template, context, partials) {
if(!this.includes("#", template) && !this.includes("^", template)) {
return template;
}
var that = this;
// This regex matches _the first_ section ({{#foo}}{{/foo}}), and captures the remainder
var regex = new RegExp(
"^([\\s\\S]*?)" + // all the crap at the beginning that is not {{*}} ($1)
this.otag + // {{
"(\\^|\\#)\\s*(.+)\\s*" + // #foo (# == $2, foo == $3)
this.ctag + // }}
"\n*([\\s\\S]*?)" + // between the tag ($2). leading newlines are dropped
this.otag + // {{
"\\/\\s*\\3\\s*" + // /foo (backreference to the opening tag).
this.ctag + // }}
"\\s*([\\s\\S]*)$", // everything else in the string ($4). leading whitespace is dropped.
"g");
// for each {{#foo}}{{/foo}} section do...
return template.replace(regex, function(match, before, type, name, content, after) {
// before contains only tags, no sections
var renderedBefore = before ? that.render_tags(before, context, partials, true) : "",
// after may contain both sections and tags, so use full rendering function
renderedAfter = after ? that.render(after, context, partials, true) : "";
var value = that.find(name, context);
if(type == "^") { // inverted section
if(!value || that.is_array(value) && value.length === 0) {
// false or empty list, render it
return renderedBefore + that.render(content, context, partials, true) + renderedAfter;
} else {
return renderedBefore + "" + renderedAfter;
}
} else if(type == "#") { // normal section
if(that.is_array(value)) { // Enumerable, Let's loop!
return renderedBefore + that.map(value, function(row) {
return that.render(content, that.create_context(row), partials, true);
}).join("") + renderedAfter;
} else if(that.is_object(value)) { // Object, Use it as subcontext!
return renderedBefore + that.render(content, that.create_context(value),
partials, true) + renderedAfter;
} else if(typeof value === "function") {
// higher order section
return renderedBefore + value.call(context, content, function(text) {
return that.render(text, context, partials, true);
}) + renderedAfter;
} else if(value) { // boolean section
return renderedBefore + that.render(content, context, partials, true) + renderedAfter;
} else {
return renderedBefore + "" + renderedAfter;
}
}
});
},
/*
Replace {{foo}} and friends with values from our view
*/
render_tags: function(template, context, partials, in_recursion) {
// tit for tat
var that = this;
var new_regex = function() {
return new RegExp(that.otag + "(=|!|>|\\{|%)?([^\\/#\\^]+?)\\1?" +
that.ctag + "+", "g");
};
var regex = new_regex();
var tag_replace_callback = function(match, operator, name) {
switch(operator) {
case "!": // ignore comments
return "";
case "=": // set new delimiters, rebuild the replace regexp
that.set_delimiters(name);
regex = new_regex();
return "";
case ">": // render partial
return that.render_partial(name, context, partials);
case "{": // the triple mustache is unescaped
return that.find(name, context);
default: // escape the value
return that.escape(that.find(name, context));
}
};
var lines = template.split("\n");
for(var i = 0; i < lines.length; i++) {
lines[i] = lines[i].replace(regex, tag_replace_callback, this);
if(!in_recursion) {
this.send(lines[i]);
}
}
if(in_recursion) {
return lines.join("\n");
}
},
set_delimiters: function(delimiters) {
var dels = delimiters.split(" ");
this.otag = this.escape_regex(dels[0]);
this.ctag = this.escape_regex(dels[1]);
},
escape_regex: function(text) {
// thank you Simon Willison
if(!arguments.callee.sRE) {
var specials = [
'/', '.', '*', '+', '?', '|',
'(', ')', '[', ']', '{', '}', '\\'
];
arguments.callee.sRE = new RegExp(
'(\\' + specials.join('|\\') + ')', 'g'
);
}
return text.replace(arguments.callee.sRE, '\\$1');
},
/*
find `name` in current `context`. That is find me a value
from the view object
*/
find: function(name, context) {
name = this.trim(name);
// Checks whether a value is thruthy or false or 0
function is_kinda_truthy(bool) {
return bool === false || bool === 0 || bool;
}
var value;
if(is_kinda_truthy(context[name])) {
value = context[name];
} else if(is_kinda_truthy(this.context[name])) {
value = this.context[name];
}
if(typeof value === "function") {
return value.apply(context);
}
if(value !== undefined) {
return value;
}
// silently ignore unkown variables
return "";
},
// Utility methods
/* includes tag */
includes: function(needle, haystack) {
return haystack.indexOf(this.otag + needle) != -1;
},
/*
Does away with nasty characters
*/
escape: function(s) {
s = String(s === null ? "" : s);
return s.replace(/&(?!\w+;)|["'<>\\]/g, function(s) {
switch(s) {
case "&": return "&";
case "\\": return "\\\\";
case '"': return '"';
case "'": return ''';
case "<": return "<";
case ">": return ">";
default: return s;
}
});
},
// by @langalex, support for arrays of strings
create_context: function(_context) {
if(this.is_object(_context)) {
return _context;
} else {
var iterator = ".";
if(this.pragmas["IMPLICIT-ITERATOR"]) {
iterator = this.pragmas["IMPLICIT-ITERATOR"].iterator;
}
var ctx = {};
ctx[iterator] = _context;
return ctx;
}
},
is_object: function(a) {
return a && typeof a == "object";
},
is_array: function(a) {
return Object.prototype.toString.call(a) === '[object Array]';
},
/*
Gets rid of leading and trailing whitespace
*/
trim: function(s) {
return s.replace(/^\s*|\s*$/g, "");
},
/*
Why, why, why? Because IE. Cry, cry cry.
*/
map: function(array, fn) {
if (typeof array.map == "function") {
return array.map(fn);
} else {
var r = [];
var l = array.length;
for(var i = 0; i < l; i++) {
r.push(fn(array[i]));
}
return r;
}
}
};
return({
name: "mustache.js",
version: "0.3.1-dev-twitter",
/*
Turns a template and view into HTML
*/
to_html: function(template, view, partials, send_fun) {
var renderer = new Renderer();
if(send_fun) {
renderer.send = send_fun;
}
renderer.render(template, view || {}, partials);
if(!send_fun) {
return renderer.buffer.join("\n");
}
}
});
}();
| Java |
/*
* $Id: WrapperClassBean.java 799110 2009-07-29 22:44:26Z musachy $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.json;
import java.util.List;
import java.util.Map;
public class WrapperClassBean {
private String stringField;
private Integer intField;
private int nullIntField;
private Boolean booleanField;
private boolean primitiveBooleanField1;
private boolean primitiveBooleanField2;
private boolean primitiveBooleanField3;
private Character charField;
private Long longField;
private Float floatField;
private Double doubleField;
private Object objectField;
private Byte byteField;
private List<SimpleValue> listField;
private List<Map<String, Long>> listMapField;
private Map<String, List<Long>> mapListField;
private Map<String, Long>[] arrayMapField;
public List<SimpleValue> getListField() {
return listField;
}
public void setListField(List<SimpleValue> listField) {
this.listField = listField;
}
public List<Map<String, Long>> getListMapField() {
return listMapField;
}
public void setListMapField(List<Map<String, Long>> listMapField) {
this.listMapField = listMapField;
}
public Map<String, List<Long>> getMapListField() {
return mapListField;
}
public void setMapListField(Map<String, List<Long>> mapListField) {
this.mapListField = mapListField;
}
public Map<String, Long>[] getArrayMapField() {
return arrayMapField;
}
public void setArrayMapField(Map<String, Long>[] arrayMapField) {
this.arrayMapField = arrayMapField;
}
public Boolean getBooleanField() {
return booleanField;
}
public void setBooleanField(Boolean booleanField) {
this.booleanField = booleanField;
}
public boolean isPrimitiveBooleanField1() {
return primitiveBooleanField1;
}
public void setPrimitiveBooleanField1(boolean primitiveBooleanField1) {
this.primitiveBooleanField1 = primitiveBooleanField1;
}
public boolean isPrimitiveBooleanField2() {
return primitiveBooleanField2;
}
public void setPrimitiveBooleanField2(boolean primitiveBooleanField2) {
this.primitiveBooleanField2 = primitiveBooleanField2;
}
public boolean isPrimitiveBooleanField3() {
return primitiveBooleanField3;
}
public void setPrimitiveBooleanField3(boolean primitiveBooleanField3) {
this.primitiveBooleanField3 = primitiveBooleanField3;
}
public Byte getByteField() {
return byteField;
}
public void setByteField(Byte byteField) {
this.byteField = byteField;
}
public Character getCharField() {
return charField;
}
public void setCharField(Character charField) {
this.charField = charField;
}
public Double getDoubleField() {
return doubleField;
}
public void setDoubleField(Double doubleField) {
this.doubleField = doubleField;
}
public Float getFloatField() {
return floatField;
}
public void setFloatField(Float floatField) {
this.floatField = floatField;
}
public Integer getIntField() {
return intField;
}
public void setIntField(Integer intField) {
this.intField = intField;
}
public int getNullIntField() {
return nullIntField;
}
public void setNullIntField(int nullIntField) {
this.nullIntField = nullIntField;
}
public Long getLongField() {
return longField;
}
public void setLongField(Long longField) {
this.longField = longField;
}
public Object getObjectField() {
return objectField;
}
public void setObjectField(Object objectField) {
this.objectField = objectField;
}
public String getStringField() {
return stringField;
}
public void setStringField(String stringField) {
this.stringField = stringField;
}
}
| Java |
package alien4cloud.tosca.parser.mapping.generator;
import java.io.IOException;
import java.util.AbstractMap;
import java.util.Map;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.ApplicationContext;
import org.springframework.stereotype.Component;
import org.yaml.snakeyaml.nodes.MappingNode;
import org.yaml.snakeyaml.nodes.Node;
import org.yaml.snakeyaml.nodes.NodeTuple;
import org.yaml.snakeyaml.nodes.ScalarNode;
import org.yaml.snakeyaml.nodes.SequenceNode;
import alien4cloud.tosca.parser.IChecker;
import alien4cloud.tosca.parser.INodeParser;
import alien4cloud.tosca.parser.KeyValueMappingTarget;
import alien4cloud.tosca.parser.MappingTarget;
import alien4cloud.tosca.parser.ParserUtils;
import alien4cloud.tosca.parser.ParsingContextExecution;
import alien4cloud.tosca.parser.ParsingError;
import alien4cloud.tosca.parser.ParsingException;
import alien4cloud.tosca.parser.ParsingResult;
import alien4cloud.tosca.parser.YamlSimpleParser;
import alien4cloud.tosca.parser.impl.ErrorCode;
import alien4cloud.tosca.parser.impl.base.CheckedTypeNodeParser;
import alien4cloud.tosca.parser.impl.base.ScalarParser;
import alien4cloud.tosca.parser.impl.base.TypeNodeParser;
import alien4cloud.tosca.parser.mapping.DefaultParser;
import com.google.common.collect.Maps;
/**
* Load type mapping definition from yaml and add it to the type mapping registry.
*/
@Slf4j
@Component
public class MappingGenerator extends DefaultParser<Map<String, INodeParser>> {
@Resource
private ApplicationContext applicationContext;
private Map<String, INodeParser> parsers = Maps.newHashMap();
private Map<String, IMappingBuilder> mappingBuilders = Maps.newHashMap();
private Map<String, IChecker> checkers = Maps.newHashMap();
@PostConstruct
public void initialize() {
Map<String, INodeParser> contextParsers = applicationContext.getBeansOfType(INodeParser.class);
// register parsers based on their class name.
for (INodeParser parser : contextParsers.values()) {
parsers.put(parser.getClass().getName(), parser);
}
Map<String, IMappingBuilder> contextMappingBuilders = applicationContext.getBeansOfType(IMappingBuilder.class);
for (IMappingBuilder mappingBuilder : contextMappingBuilders.values()) {
mappingBuilders.put(mappingBuilder.getKey(), mappingBuilder);
}
Map<String, IChecker> contextCheckers = applicationContext.getBeansOfType(IChecker.class);
for (IChecker checker : contextCheckers.values()) {
checkers.put(checker.getName(), checker);
}
}
public Map<String, INodeParser> process(String resourceLocation) throws ParsingException {
org.springframework.core.io.Resource resource = applicationContext.getResource(resourceLocation);
YamlSimpleParser<Map<String, INodeParser>> nodeParser = new YamlSimpleParser<>(this);
try {
ParsingResult<Map<String, INodeParser>> result = nodeParser.parseFile(resource.getURI().toString(), resource.getFilename(),
resource.getInputStream(), null);
if (result.getContext().getParsingErrors().isEmpty()) {
return result.getResult();
}
throw new ParsingException(resource.getFilename(), result.getContext().getParsingErrors());
} catch (IOException e) {
log.error("Failed to open stream", e);
throw new ParsingException(resource.getFilename(), new ParsingError(ErrorCode.MISSING_FILE, "Unable to load file.", null, e.getMessage(), null,
resourceLocation));
}
}
public Map<String, INodeParser> parse(Node node, ParsingContextExecution context) {
Map<String, INodeParser> parsers = Maps.newHashMap();
if (node instanceof SequenceNode) {
SequenceNode types = (SequenceNode) node;
for (Node mapping : types.getValue()) {
Map.Entry<String, INodeParser<?>> entry = processTypeMapping(mapping, context);
if (entry != null) {
parsers.put(entry.getKey(), entry.getValue());
}
}
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Mapping should be a sequence of type mappings", node.getStartMark(), "Actually was "
+ node.getClass().getSimpleName(), node.getEndMark(), ""));
}
return parsers;
}
private Map.Entry<String, INodeParser<?>> processTypeMapping(Node node, ParsingContextExecution context) {
try {
return doProcessTypeMapping(node, context);
} catch (ClassNotFoundException | IllegalAccessException | InstantiationException e) {
log.error("Failed to load class while parsing mapping", e);
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to load class", node.getStartMark(), e.getMessage(), node.getEndMark(), ""));
return null;
}
}
private Map.Entry<String, INodeParser<?>> doProcessTypeMapping(Node node, ParsingContextExecution context) throws ClassNotFoundException,
IllegalAccessException, InstantiationException {
if (node instanceof MappingNode) {
MappingNode mapping = (MappingNode) node;
String yamlType = null;
INodeParser<?> parser = null;
for (NodeTuple tuple : mapping.getValue()) {
if (yamlType == null) {
yamlType = ParserUtils.getScalar(tuple.getKeyNode(), context);
String type = ParserUtils.getScalar(tuple.getValueNode(), context);
if (type.startsWith("__")) {
parser = getWrapperParser(type, mapping, context);
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
}
parser = this.parsers.get(type);
if (parser != null) {
log.debug("Mapping yaml type <" + yamlType + "> using parser <" + type + ">");
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
}
parser = buildTypeNodeParser(yamlType, type);
// log.debug("Mapping yaml type <" + yamlType + "> to class <" + type + ">");
// Class<?> javaClass = Class.forName(type);
// parser = new TypeNodeParser<>(javaClass, yamlType);
} else {
// process a mapping
map(tuple, (TypeNodeParser) parser, context);
}
}
return new AbstractMap.SimpleEntry<String, INodeParser<?>>(yamlType, parser);
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unable to process type mapping.", node.getStartMark(),
"Mapping must be defined using a mapping node.", node.getEndMark(), ""));
}
return null;
}
private TypeNodeParser<?> buildTypeNodeParser(String yamlType, String javaType) throws ClassNotFoundException {
String realJavaType = javaType;
IChecker checker = null;
if (javaType.contains("|")) {
realJavaType = javaType.substring(0, javaType.indexOf("|"));
String checkerName = javaType.substring(javaType.indexOf("|") + 1);
log.debug(String.format("After parsing <%s>, realJavaType is <%s>, checkerName is <%s>", javaType, realJavaType, checkerName));
checker = checkers.get(checkerName);
if (checker == null) {
log.warn(String.format("Can not find checker <%s>, using a standard TypeNodeParser", checkerName));
}
}
Class<?> javaClass = Class.forName(realJavaType);
if (checker == null) {
log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + ">");
return new TypeNodeParser<>(javaClass, yamlType);
} else {
// TODO check that the type are compatible
log.debug("Mapping yaml type <" + yamlType + "> to class <" + realJavaType + "> using checker " + checker.toString());
return new CheckedTypeNodeParser<>(javaClass, yamlType, checker);
}
}
private INodeParser<?> getWrapperParser(String wrapperKey, MappingNode mapping, ParsingContextExecution context) {
IMappingBuilder builder = this.mappingBuilders.get(wrapperKey.substring(2));
return builder.buildMapping(mapping, context).getParser();
}
private void map(NodeTuple tuple, TypeNodeParser<?> parser, ParsingContextExecution context) {
String key = ParserUtils.getScalar(tuple.getKeyNode(), context);
int positionMappingIndex = positionMappingIndex(key);
if (positionMappingIndex > -1) {
mapPositionMapping(positionMappingIndex, tuple.getValueNode(), parser, context);
} else {
MappingTarget mappingTarget = getMappingTarget(tuple.getValueNode(), context);
if (mappingTarget != null) {
parser.getYamlToObjectMapping().put(key, mappingTarget);
}
}
}
private MappingTarget getMappingTarget(Node mappingNode, ParsingContextExecution context) {
if (mappingNode instanceof ScalarNode) {
// create a scalar mapping
String value = ParserUtils.getScalar(mappingNode, context);
return new MappingTarget(value, parsers.get(ScalarParser.class.getName()));
} else if (mappingNode instanceof MappingNode) {
return mapMappingNode((MappingNode) mappingNode, context);
}
return null;
}
private int positionMappingIndex(String key) {
if (key.startsWith("__")) {
try {
int position = Integer.valueOf(key.substring(2));
return position;
} catch (NumberFormatException e) {
// not a position mapping
return -1;
}
}
return -1;
}
private void mapPositionMapping(Integer index, Node positionMapping, TypeNodeParser<?> parser, ParsingContextExecution context) {
if (positionMapping instanceof MappingNode) {
MappingNode mappingNode = (MappingNode) positionMapping;
String key = null;
MappingTarget valueMappingTarget = null;
for (NodeTuple tuple : mappingNode.getValue()) {
String tupleKey = ParserUtils.getScalar(tuple.getKeyNode(), context);
if (tupleKey.equals("key")) {
key = ParserUtils.getScalar(tuple.getValueNode(), context);
} else if (tupleKey.equals("value")) {
valueMappingTarget = getMappingTarget(tuple.getValueNode(), context);
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Unknown key for position mapping.", tuple.getKeyNode().getStartMark(), tupleKey, tuple
.getKeyNode().getEndMark(), ""));
}
}
if (valueMappingTarget == null) {
return;
}
if (key == null) {
parser.getYamlOrderedToObjectMapping().put(index, valueMappingTarget);
} else {
parser.getYamlOrderedToObjectMapping().put(index, new KeyValueMappingTarget(key, valueMappingTarget.getPath(), valueMappingTarget.getParser()));
}
} else {
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "Position mapping must be a mapping node with key and value fields.", positionMapping
.getStartMark(), "", positionMapping.getEndMark(), ""));
}
}
private MappingTarget mapMappingNode(MappingNode mappingNode, ParsingContextExecution context) {
String key = ParserUtils.getScalar(mappingNode.getValue().get(0).getKeyNode(), context);
IMappingBuilder mappingBuilder = mappingBuilders.get(key);
if (mappingBuilder != null) {
log.debug("Mapping yaml key <" + key + "> using mapping builder " + mappingBuilder.getClass().getName());
return mappingBuilder.buildMapping(mappingNode, context);
}
context.getParsingErrors().add(
new ParsingError(ErrorCode.SYNTAX_ERROR, "No mapping target found for key", mappingNode.getValue().get(0).getKeyNode().getStartMark(), key,
mappingNode.getValue().get(0).getKeyNode().getEndMark(), ""));
return null;
}
} | Java |
---
id: io-kinesis-sink
title: Kinesis sink connector
sidebar_label: "Kinesis sink connector"
original_id: io-kinesis-sink
---
The Kinesis sink connector pulls data from Pulsar and persists data into Amazon Kinesis.
## Configuration
The configuration of the Kinesis sink connector has the following property.
### Property
| Name | Type|Required | Default | Description
|------|----------|----------|---------|-------------|
`messageFormat`|MessageFormat|true|ONLY_RAW_PAYLOAD|Message format in which Kinesis sink converts Pulsar messages and publishes to Kinesis streams.<br /><br />Below are the available options:<br /><br /><li>`ONLY_RAW_PAYLOAD`: Kinesis sink directly publishes Pulsar message payload as a message into the configured Kinesis stream. <br /><br /></li><li>`FULL_MESSAGE_IN_JSON`: Kinesis sink creates a JSON payload with Pulsar message payload, properties and encryptionCtx, and publishes JSON payload into the configured Kinesis stream.<br /><br /></li><li>`FULL_MESSAGE_IN_FB`: Kinesis sink creates a flatbuffer serialized payload with Pulsar message payload, properties and encryptionCtx, and publishes flatbuffer payload into the configured Kinesis stream.</li>
`retainOrdering`|boolean|false|false|Whether Pulsar connectors to retain ordering when moving messages from Pulsar to Kinesis or not.
`awsEndpoint`|String|false|" " (empty string)|The Kinesis end-point URL, which can be found at [here](https://docs.aws.amazon.com/general/latest/gr/rande.html).
`awsRegion`|String|false|" " (empty string)|The AWS region. <br /><br />**Example**<br /> us-west-1, us-west-2
`awsKinesisStreamName`|String|true|" " (empty string)|The Kinesis stream name.
`awsCredentialPluginName`|String|false|" " (empty string)|The fully-qualified class name of implementation of {@inject: github:AwsCredentialProviderPlugin:/pulsar-io/kinesis/src/main/java/org/apache/pulsar/io/kinesis/AwsCredentialProviderPlugin.java}. <br /><br />It is a factory class which creates an AWSCredentialsProvider that is used by Kinesis sink. <br /><br />If it is empty, the Kinesis sink creates a default AWSCredentialsProvider which accepts json-map of credentials in `awsCredentialPluginParam`.
`awsCredentialPluginParam`|String |false|" " (empty string)|The JSON parameter to initialize `awsCredentialsProviderPlugin`.
### Built-in plugins
The following are built-in `AwsCredentialProviderPlugin` plugins:
* `org.apache.pulsar.io.kinesis.AwsDefaultProviderChainPlugin`
This plugin takes no configuration, it uses the default AWS provider chain.
For more information, see [AWS documentation](https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html#credentials-default).
* `org.apache.pulsar.io.kinesis.STSAssumeRoleProviderPlugin`
This plugin takes a configuration (via the `awsCredentialPluginParam`) that describes a role to assume when running the KCL.
This configuration takes the form of a small json document like:
```json
{"roleArn": "arn...", "roleSessionName": "name"}
```
### Example
Before using the Kinesis sink connector, you need to create a configuration file through one of the following methods.
* JSON
```json
{
"awsEndpoint": "some.endpoint.aws",
"awsRegion": "us-east-1",
"awsKinesisStreamName": "my-stream",
"awsCredentialPluginParam": "{\"accessKey\":\"myKey\",\"secretKey\":\"my-Secret\"}",
"messageFormat": "ONLY_RAW_PAYLOAD",
"retainOrdering": "true"
}
```
* YAML
```yaml
configs:
awsEndpoint: "some.endpoint.aws"
awsRegion: "us-east-1"
awsKinesisStreamName: "my-stream"
awsCredentialPluginParam: "{\"accessKey\":\"myKey\",\"secretKey\":\"my-Secret\"}"
messageFormat: "ONLY_RAW_PAYLOAD"
retainOrdering: "true"
```
| Java |
'use strict';
import { module } from 'angular';
import _ from 'lodash';
import { AccountService, ExpectedArtifactService } from '@spinnaker/core';
import { KubernetesProviderSettings } from '../../../kubernetes.settings';
export const KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER = 'spinnaker.kubernetes.clusterCommandBuilder.service';
export const name = KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER; // for backwards compatibility
module(KUBERNETES_V1_CLUSTER_CONFIGURE_COMMANDBUILDER, []).factory('kubernetesClusterCommandBuilder', function() {
function attemptToSetValidAccount(application, defaultAccount, command) {
return AccountService.listAccounts('kubernetes', 'v1').then(function(kubernetesAccounts) {
const kubernetesAccountNames = _.map(kubernetesAccounts, 'name');
let firstKubernetesAccount = null;
if (application.accounts.length) {
firstKubernetesAccount = _.find(application.accounts, function(applicationAccount) {
return kubernetesAccountNames.includes(applicationAccount);
});
} else if (kubernetesAccountNames.length) {
firstKubernetesAccount = kubernetesAccountNames[0];
}
const defaultAccountIsValid = defaultAccount && kubernetesAccountNames.includes(defaultAccount);
command.account = defaultAccountIsValid
? defaultAccount
: firstKubernetesAccount
? firstKubernetesAccount
: 'my-account-name';
});
}
function applyHealthProviders(application, command) {
command.interestingHealthProviderNames = ['KubernetesContainer', 'KubernetesPod'];
}
function buildNewClusterCommand(application, defaults = {}) {
const defaultAccount = defaults.account || KubernetesProviderSettings.defaults.account;
const command = {
account: defaultAccount,
application: application.name,
strategy: '',
targetSize: 1,
cloudProvider: 'kubernetes',
selectedProvider: 'kubernetes',
namespace: 'default',
containers: [],
initContainers: [],
volumeSources: [],
buildImageId: buildImageId,
groupByRegistry: groupByRegistry,
terminationGracePeriodSeconds: 30,
viewState: {
mode: defaults.mode || 'create',
disableStrategySelection: true,
useAutoscaler: false,
},
capacity: {
min: 1,
desired: 1,
max: 1,
},
scalingPolicy: {
cpuUtilization: {
target: 40,
},
},
useSourceCapacity: false,
deployment: {
enabled: false,
minReadySeconds: 0,
deploymentStrategy: {
type: 'RollingUpdate',
rollingUpdate: {
maxUnavailable: 1,
maxSurge: 1,
},
},
},
};
applyHealthProviders(application, command);
attemptToSetValidAccount(application, defaultAccount, command);
return command;
}
function buildClusterCommandFromExisting(application, existing, mode) {
mode = mode || 'clone';
const command = _.cloneDeep(existing.deployDescription);
command.groupByRegistry = groupByRegistry;
command.cloudProvider = 'kubernetes';
command.selectedProvider = 'kubernetes';
command.account = existing.account;
command.buildImageId = buildImageId;
command.strategy = '';
command.containers.forEach(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.initContainers.forEach(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.viewState = {
mode: mode,
useAutoscaler: !!command.scalingPolicy,
};
if (!command.capacity) {
command.capacity = {
min: command.targetSize,
max: command.targetSize,
desired: command.targetSize,
};
}
if (!_.has(command, 'scalingPolicy.cpuUtilization.target')) {
command.scalingPolicy = { cpuUtilization: { target: 40 } };
}
applyHealthProviders(application, command);
return command;
}
function groupByRegistry(container) {
if (container.imageDescription) {
if (container.imageDescription.fromContext) {
return 'Find Image Result(s)';
} else if (container.imageDescription.fromTrigger) {
return 'Images from Trigger(s)';
} else if (container.imageDescription.fromArtifact) {
return 'Images from Artifact(s)';
} else {
return container.imageDescription.registry;
}
}
}
function buildImageId(image) {
if (image.fromFindImage) {
return `${image.cluster} ${image.pattern}`;
} else if (image.fromBake) {
return `${image.repository} (Baked during execution)`;
} else if (image.fromTrigger && !image.tag) {
return `${image.registry}/${image.repository} (Tag resolved at runtime)`;
} else if (image.fromArtifact) {
return `${image.name} (Artifact resolved at runtime)`;
} else {
if (image.registry) {
return `${image.registry}/${image.repository}:${image.tag}`;
} else {
return `${image.repository}:${image.tag}`;
}
}
}
function reconcileUpstreamImages(containers, upstreamImages) {
const getConfig = image => {
if (image.fromContext) {
return {
match: other => other.fromContext && other.stageId === image.stageId,
fieldsToCopy: matchImage => {
const { cluster, pattern, repository } = matchImage;
return { cluster, pattern, repository };
},
};
} else if (image.fromTrigger) {
return {
match: other =>
other.fromTrigger &&
other.registry === image.registry &&
other.repository === image.repository &&
other.tag === image.tag,
fieldsToCopy: () => ({}),
};
} else if (image.fromArtifact) {
return {
match: other => other.fromArtifact && other.stageId === image.stageId,
fieldsToCopy: matchImage => {
const { name } = matchImage;
return { name };
},
};
} else {
return {
skipProcessing: true,
};
}
};
const result = [];
containers.forEach(container => {
const imageDescription = container.imageDescription;
const imageConfig = getConfig(imageDescription);
if (imageConfig.skipProcessing) {
result.push(container);
} else {
const matchingImage = upstreamImages.find(imageConfig.match);
if (matchingImage) {
Object.assign(imageDescription, imageConfig.fieldsToCopy(matchingImage));
result.push(container);
}
}
});
return result;
}
function findContextImages(current, all, visited = {}) {
// This actually indicates a loop in the stage dependencies.
if (visited[current.refId]) {
return [];
} else {
visited[current.refId] = true;
}
let result = [];
if (current.type === 'findImage') {
result.push({
fromContext: true,
fromFindImage: true,
cluster: current.cluster,
pattern: current.imageNamePattern,
repository: current.name,
stageId: current.refId,
});
} else if (current.type === 'bake') {
result.push({
fromContext: true,
fromBake: true,
repository: current.ami_name,
organization: current.organization,
stageId: current.refId,
});
}
current.requisiteStageRefIds.forEach(function(id) {
const next = all.find(stage => stage.refId === id);
if (next) {
result = result.concat(findContextImages(next, all, visited));
}
});
return result;
}
function findTriggerImages(triggers) {
return triggers
.filter(trigger => {
return trigger.type === 'docker';
})
.map(trigger => {
return {
fromTrigger: true,
repository: trigger.repository,
account: trigger.account,
organization: trigger.organization,
registry: trigger.registry,
tag: trigger.tag,
};
});
}
function findArtifactImages(currentStage, pipeline) {
const artifactImages = ExpectedArtifactService.getExpectedArtifactsAvailableToStage(currentStage, pipeline)
.filter(artifact => artifact.matchArtifact.type === 'docker/image')
.map(artifact => ({
fromArtifact: true,
artifactId: artifact.id,
name: artifact.matchArtifact.name,
}));
return artifactImages;
}
function buildNewClusterCommandForPipeline(current, pipeline) {
let contextImages = findContextImages(current, pipeline.stages) || [];
contextImages = contextImages.concat(findTriggerImages(pipeline.triggers));
contextImages = contextImages.concat(findArtifactImages(current, pipeline));
return {
strategy: '',
viewState: {
contextImages: contextImages,
mode: 'editPipeline',
submitButtonLabel: 'Done',
requiresTemplateSelection: true,
useAutoscaler: false,
},
};
}
function buildClusterCommandFromPipeline(app, originalCommand, current, pipeline) {
const command = _.cloneDeep(originalCommand);
let contextImages = findContextImages(current, pipeline.stages) || [];
contextImages = contextImages.concat(findTriggerImages(pipeline.triggers));
contextImages = contextImages.concat(findArtifactImages(current, pipeline));
command.containers = reconcileUpstreamImages(command.containers, contextImages);
command.containers.map(container => {
container.imageDescription.imageId = buildImageId(container.imageDescription);
});
command.groupByRegistry = groupByRegistry;
command.buildImageId = buildImageId;
command.strategy = command.strategy || '';
command.selectedProvider = 'kubernetes';
command.viewState = {
mode: 'editPipeline',
contextImages: contextImages,
submitButtonLabel: 'Done',
useAutoscaler: !!command.scalingPolicy,
};
if (!_.has(command, 'scalingPolicy.cpuUtilization.target')) {
command.scalingPolicy = { cpuUtilization: { target: 40 } };
}
return command;
}
return {
buildNewClusterCommand: buildNewClusterCommand,
buildClusterCommandFromExisting: buildClusterCommandFromExisting,
buildNewClusterCommandForPipeline: buildNewClusterCommandForPipeline,
buildClusterCommandFromPipeline: buildClusterCommandFromPipeline,
groupByRegistry: groupByRegistry,
buildImageId: buildImageId,
};
});
| Java |
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Creates a VM with the provided name, metadata, and auth scopes."""
COMPUTE_URL_BASE = 'https://www.googleapis.com/compute/v1/'
def GlobalComputeUrl(project, collection, name):
return ''.join([COMPUTE_URL_BASE, 'projects/', project,
'/global/', collection, '/', name])
def ZonalComputeUrl(project, zone, collection, name):
return ''.join([COMPUTE_URL_BASE, 'projects/', project,
'/zones/', zone, '/', collection, '/', name])
def GenerateConfig(context):
"""Generate configuration."""
base_name = context.properties['instanceName']
items = []
for key, value in context.properties['metadata'].iteritems():
items.append({
'key': key,
'value': value
})
metadata = {'items': items}
# Properties for the container-based instance.
instance = {
'zone': context.properties['zone'],
'machineType': ZonalComputeUrl(
context.env['project'], context.properties['zone'], 'machineTypes',
'f1-micro'),
'metadata': metadata,
'serviceAccounts': [{
'email': 'default',
'scopes': context.properties['scopes']
}],
'disks': [{
'deviceName': 'boot',
'type': 'PERSISTENT',
'autoDelete': True,
'boot': True,
'initializeParams': {
'diskName': base_name + '-disk',
'sourceImage': GlobalComputeUrl(
'debian-cloud', 'images',
''.join(['backports-debian', '-7-wheezy-v20151104']))
},
}],
'networkInterfaces': [{
'accessConfigs': [{
'name': 'external-nat',
'type': 'ONE_TO_ONE_NAT'
}],
'network': GlobalComputeUrl(
context.env['project'], 'networks', 'default')
}]
}
# Resources and output to return.
return {
'resources': [{
'name': base_name,
'type': 'compute.v1.instance',
'properties': instance
}]
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.carbondata.core.writer.sortindex;
import java.io.File;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
import org.apache.carbondata.core.carbon.ColumnIdentifier;
import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
import org.apache.carbondata.core.reader.sortindex.CarbonDictionarySortIndexReader;
import org.apache.carbondata.core.reader.sortindex.CarbonDictionarySortIndexReaderImpl;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.writer.CarbonDictionaryWriter;
import org.apache.carbondata.core.writer.CarbonDictionaryWriterImpl;
import org.apache.commons.lang.ArrayUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
/**
* class contains the unit test cases of the dictionary sort index & sort index inverted writing
*/
public class CarbonDictionarySortIndexWriterImplTest {
private String hdfsStorePath;
@Before public void setUp() throws Exception {
hdfsStorePath = "target/carbonStore";
}
@After public void tearDown() throws Exception {
//deleteStorePath();
}
/**
* s
* Method to test the write of sortIndex file.
*
* @throws Exception
*/
@Test public void write() throws Exception {
String storePath = hdfsStorePath;
CarbonTableIdentifier carbonTableIdentifier = new CarbonTableIdentifier("testSchema", "carbon", UUID.randomUUID().toString());
ColumnIdentifier columnIdentifier = new ColumnIdentifier("Name", null, null);
String metaFolderPath =hdfsStorePath+File.separator+carbonTableIdentifier.getDatabaseName()+File.separator+carbonTableIdentifier.getTableName()+File.separator+"Metadata";
CarbonUtil.checkAndCreateFolder(metaFolderPath);
CarbonDictionaryWriter dictionaryWriter = new CarbonDictionaryWriterImpl(hdfsStorePath,
carbonTableIdentifier, columnIdentifier);
CarbonDictionarySortIndexWriter dictionarySortIndexWriter =
new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<int[]> indexList = prepareExpectedData();
int[] data = indexList.get(0);
for(int i=0;i<data.length;i++) {
dictionaryWriter.write(String.valueOf(data[i]));
}
dictionaryWriter.close();
dictionaryWriter.commit();
List<Integer> sortIndex = Arrays.asList(ArrayUtils.toObject(indexList.get(0)));
List<Integer> invertedSortIndex = Arrays.asList(ArrayUtils.toObject(indexList.get(1)));
dictionarySortIndexWriter.writeSortIndex(sortIndex);
dictionarySortIndexWriter.writeInvertedSortIndex(invertedSortIndex);
dictionarySortIndexWriter.close();
CarbonDictionarySortIndexReader carbonDictionarySortIndexReader =
new CarbonDictionarySortIndexReaderImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<Integer> actualSortIndex = carbonDictionarySortIndexReader.readSortIndex();
List<Integer> actualInvertedSortIndex = carbonDictionarySortIndexReader.readInvertedSortIndex();
for (int i = 0; i < actualSortIndex.size(); i++) {
Assert.assertEquals(sortIndex.get(i), actualSortIndex.get(i));
Assert.assertEquals(invertedSortIndex.get(i), actualInvertedSortIndex.get(i));
}
}
/**
* @throws Exception
*/
@Test public void writingEmptyValue() throws Exception {
String storePath = hdfsStorePath;
CarbonTableIdentifier carbonTableIdentifier = new CarbonTableIdentifier("testSchema", "carbon", UUID.randomUUID().toString());
ColumnIdentifier columnIdentifier = new ColumnIdentifier("Name", null, null);
CarbonDictionarySortIndexWriter dictionarySortIndexWriter =
new CarbonDictionarySortIndexWriterImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<Integer> sortIndex = new ArrayList<>();
List<Integer> invertedSortIndex = new ArrayList<>();
dictionarySortIndexWriter.writeSortIndex(sortIndex);
dictionarySortIndexWriter.writeInvertedSortIndex(invertedSortIndex);
dictionarySortIndexWriter.close();
CarbonDictionarySortIndexReader carbonDictionarySortIndexReader =
new CarbonDictionarySortIndexReaderImpl(carbonTableIdentifier, columnIdentifier, storePath);
List<Integer> actualSortIndex = carbonDictionarySortIndexReader.readSortIndex();
List<Integer> actualInvertedSortIndex = carbonDictionarySortIndexReader.readInvertedSortIndex();
for (int i = 0; i < actualSortIndex.size(); i++) {
Assert.assertEquals(sortIndex.get(i), actualSortIndex.get(i));
Assert.assertEquals(invertedSortIndex.get(i), actualInvertedSortIndex.get(i));
}
}
private List<int[]> prepareExpectedData() {
List<int[]> indexList = new ArrayList<>(2);
int[] sortIndex = { 0, 3, 2, 4, 1 };
int[] sortIndexInverted = { 0, 2, 4, 1, 2 };
indexList.add(0, sortIndex);
indexList.add(1, sortIndexInverted);
return indexList;
}
/**
* this method will delete the store path
*/
private void deleteStorePath() {
FileFactory.FileType fileType = FileFactory.getFileType(this.hdfsStorePath);
CarbonFile carbonFile = FileFactory.getCarbonFile(this.hdfsStorePath, fileType);
deleteRecursiveSilent(carbonFile);
}
/**
* this method will delete the folders recursively
*/
private static void deleteRecursiveSilent(CarbonFile f) {
if (f.isDirectory()) {
if (f.listFiles() != null) {
for (CarbonFile c : f.listFiles()) {
deleteRecursiveSilent(c);
}
}
}
if (f.exists() && !f.delete()) {
return;
}
}
}
| Java |
import { getGlobal } from '../src/prebidGlobal.js';
import { createBid } from '../src/bidfactory.js';
import { STATUS } from '../src/constants.json';
import { ajax } from '../src/ajax.js';
import * as utils from '../src/utils.js';
import { config } from '../src/config.js';
import { getHook } from '../src/hook.js';
const DEFAULT_CURRENCY_RATE_URL = 'https://cdn.jsdelivr.net/gh/prebid/currency-file@1/latest.json?date=$$TODAY$$';
const CURRENCY_RATE_PRECISION = 4;
var bidResponseQueue = [];
var conversionCache = {};
var currencyRatesLoaded = false;
var needToCallForCurrencyFile = true;
var adServerCurrency = 'USD';
export var currencySupportEnabled = false;
export var currencyRates = {};
var bidderCurrencyDefault = {};
var defaultRates;
/**
* Configuration function for currency
* @param {string} [config.adServerCurrency = 'USD']
* ISO 4217 3-letter currency code that represents the target currency. (e.g. 'EUR'). If this value is present,
* the currency conversion feature is activated.
* @param {number} [config.granularityMultiplier = 1]
* A decimal value representing how mcuh to scale the price granularity calculations.
* @param {object} config.bidderCurrencyDefault
* An optional argument to specify bid currencies for bid adapters. This option is provided for the transitional phase
* before every bid adapter will specify its own bid currency. If the adapter specifies a bid currency, this value is
* ignored for that bidder.
*
* example:
* {
* rubicon: 'USD'
* }
* @param {string} [config.conversionRateFile = 'URL pointing to conversion file']
* Optional path to a file containing currency conversion data. Prebid.org hosts a file that is used as the default,
* if not specified.
* @param {object} [config.rates]
* This optional argument allows you to specify the rates with a JSON object, subverting the need for a external
* config.conversionRateFile parameter. If this argument is specified, the conversion rate file will not be loaded.
*
* example:
* {
* 'GBP': { 'CNY': 8.8282, 'JPY': 141.7, 'USD': 1.2824 },
* 'USD': { 'CNY': 6.8842, 'GBP': 0.7798, 'JPY': 110.49 }
* }
* @param {object} [config.defaultRates]
* This optional currency rates definition follows the same format as config.rates, however it is only utilized if
* there is an error loading the config.conversionRateFile.
*/
export function setConfig(config) {
let url = DEFAULT_CURRENCY_RATE_URL;
if (typeof config.rates === 'object') {
currencyRates.conversions = config.rates;
currencyRatesLoaded = true;
needToCallForCurrencyFile = false; // don't call if rates are already specified
}
if (typeof config.defaultRates === 'object') {
defaultRates = config.defaultRates;
// set up the default rates to be used if the rate file doesn't get loaded in time
currencyRates.conversions = defaultRates;
currencyRatesLoaded = true;
}
if (typeof config.adServerCurrency === 'string') {
utils.logInfo('enabling currency support', arguments);
adServerCurrency = config.adServerCurrency;
if (config.conversionRateFile) {
utils.logInfo('currency using override conversionRateFile:', config.conversionRateFile);
url = config.conversionRateFile;
}
// see if the url contains a date macro
// this is a workaround to the fact that jsdelivr doesn't currently support setting a 24-hour HTTP cache header
// So this is an approach to let the browser cache a copy of the file each day
// We should remove the macro once the CDN support a day-level HTTP cache setting
const macroLocation = url.indexOf('$$TODAY$$');
if (macroLocation !== -1) {
// get the date to resolve the macro
const d = new Date();
let month = `${d.getMonth() + 1}`;
let day = `${d.getDate()}`;
if (month.length < 2) month = `0${month}`;
if (day.length < 2) day = `0${day}`;
const todaysDate = `${d.getFullYear()}${month}${day}`;
// replace $$TODAY$$ with todaysDate
url = `${url.substring(0, macroLocation)}${todaysDate}${url.substring(macroLocation + 9, url.length)}`;
}
initCurrency(url);
} else {
// currency support is disabled, setting defaults
utils.logInfo('disabling currency support');
resetCurrency();
}
if (typeof config.bidderCurrencyDefault === 'object') {
bidderCurrencyDefault = config.bidderCurrencyDefault;
}
}
config.getConfig('currency', config => setConfig(config.currency));
function errorSettingsRates(msg) {
if (defaultRates) {
utils.logWarn(msg);
utils.logWarn('Currency failed loading rates, falling back to currency.defaultRates');
} else {
utils.logError(msg);
}
}
function initCurrency(url) {
conversionCache = {};
currencySupportEnabled = true;
utils.logInfo('Installing addBidResponse decorator for currency module', arguments);
// Adding conversion function to prebid global for external module and on page use
getGlobal().convertCurrency = (cpm, fromCurrency, toCurrency) => parseFloat(cpm) * getCurrencyConversion(fromCurrency, toCurrency);
getHook('addBidResponse').before(addBidResponseHook, 100);
// call for the file if we haven't already
if (needToCallForCurrencyFile) {
needToCallForCurrencyFile = false;
ajax(url,
{
success: function (response) {
try {
currencyRates = JSON.parse(response);
utils.logInfo('currencyRates set to ' + JSON.stringify(currencyRates));
currencyRatesLoaded = true;
processBidResponseQueue();
} catch (e) {
errorSettingsRates('Failed to parse currencyRates response: ' + response);
}
},
error: errorSettingsRates
}
);
}
}
function resetCurrency() {
utils.logInfo('Uninstalling addBidResponse decorator for currency module', arguments);
getHook('addBidResponse').getHooks({hook: addBidResponseHook}).remove();
delete getGlobal().convertCurrency;
adServerCurrency = 'USD';
conversionCache = {};
currencySupportEnabled = false;
currencyRatesLoaded = false;
needToCallForCurrencyFile = true;
currencyRates = {};
bidderCurrencyDefault = {};
}
export function addBidResponseHook(fn, adUnitCode, bid) {
if (!bid) {
return fn.call(this, adUnitCode); // if no bid, call original and let it display warnings
}
let bidder = bid.bidderCode || bid.bidder;
if (bidderCurrencyDefault[bidder]) {
let currencyDefault = bidderCurrencyDefault[bidder];
if (bid.currency && currencyDefault !== bid.currency) {
utils.logWarn(`Currency default '${bidder}: ${currencyDefault}' ignored. adapter specified '${bid.currency}'`);
} else {
bid.currency = currencyDefault;
}
}
// default to USD if currency not set
if (!bid.currency) {
utils.logWarn('Currency not specified on bid. Defaulted to "USD"');
bid.currency = 'USD';
}
// used for analytics
bid.getCpmInNewCurrency = function(toCurrency) {
return (parseFloat(this.cpm) * getCurrencyConversion(this.currency, toCurrency)).toFixed(3);
};
// execute immediately if the bid is already in the desired currency
if (bid.currency === adServerCurrency) {
return fn.call(this, adUnitCode, bid);
}
bidResponseQueue.push(wrapFunction(fn, this, [adUnitCode, bid]));
if (!currencySupportEnabled || currencyRatesLoaded) {
processBidResponseQueue();
}
}
function processBidResponseQueue() {
while (bidResponseQueue.length > 0) {
(bidResponseQueue.shift())();
}
}
function wrapFunction(fn, context, params) {
return function() {
let bid = params[1];
if (bid !== undefined && 'currency' in bid && 'cpm' in bid) {
let fromCurrency = bid.currency;
try {
let conversion = getCurrencyConversion(fromCurrency);
if (conversion !== 1) {
bid.cpm = (parseFloat(bid.cpm) * conversion).toFixed(4);
bid.currency = adServerCurrency;
}
} catch (e) {
utils.logWarn('Returning NO_BID, getCurrencyConversion threw error: ', e);
params[1] = createBid(STATUS.NO_BID, {
bidder: bid.bidderCode || bid.bidder,
bidId: bid.requestId
});
}
}
return fn.apply(context, params);
};
}
function getCurrencyConversion(fromCurrency, toCurrency = adServerCurrency) {
var conversionRate = null;
var rates;
let cacheKey = `${fromCurrency}->${toCurrency}`;
if (cacheKey in conversionCache) {
conversionRate = conversionCache[cacheKey];
utils.logMessage('Using conversionCache value ' + conversionRate + ' for ' + cacheKey);
} else if (currencySupportEnabled === false) {
if (fromCurrency === 'USD') {
conversionRate = 1;
} else {
throw new Error('Prebid currency support has not been enabled and fromCurrency is not USD');
}
} else if (fromCurrency === toCurrency) {
conversionRate = 1;
} else {
if (fromCurrency in currencyRates.conversions) {
// using direct conversion rate from fromCurrency to toCurrency
rates = currencyRates.conversions[fromCurrency];
if (!(toCurrency in rates)) {
// bid should fail, currency is not supported
throw new Error('Specified adServerCurrency in config \'' + toCurrency + '\' not found in the currency rates file');
}
conversionRate = rates[toCurrency];
utils.logInfo('getCurrencyConversion using direct ' + fromCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
} else if (toCurrency in currencyRates.conversions) {
// using reciprocal of conversion rate from toCurrency to fromCurrency
rates = currencyRates.conversions[toCurrency];
if (!(fromCurrency in rates)) {
// bid should fail, currency is not supported
throw new Error('Specified fromCurrency \'' + fromCurrency + '\' not found in the currency rates file');
}
conversionRate = roundFloat(1 / rates[fromCurrency], CURRENCY_RATE_PRECISION);
utils.logInfo('getCurrencyConversion using reciprocal ' + fromCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
} else {
// first defined currency base used as intermediary
var anyBaseCurrency = Object.keys(currencyRates.conversions)[0];
if (!(fromCurrency in currencyRates.conversions[anyBaseCurrency])) {
// bid should fail, currency is not supported
throw new Error('Specified fromCurrency \'' + fromCurrency + '\' not found in the currency rates file');
}
var toIntermediateConversionRate = 1 / currencyRates.conversions[anyBaseCurrency][fromCurrency];
if (!(toCurrency in currencyRates.conversions[anyBaseCurrency])) {
// bid should fail, currency is not supported
throw new Error('Specified adServerCurrency in config \'' + toCurrency + '\' not found in the currency rates file');
}
var fromIntermediateConversionRate = currencyRates.conversions[anyBaseCurrency][toCurrency];
conversionRate = roundFloat(toIntermediateConversionRate * fromIntermediateConversionRate, CURRENCY_RATE_PRECISION);
utils.logInfo('getCurrencyConversion using intermediate ' + fromCurrency + ' thru ' + anyBaseCurrency + ' to ' + toCurrency + ' conversionRate ' + conversionRate);
}
}
if (!(cacheKey in conversionCache)) {
utils.logMessage('Adding conversionCache value ' + conversionRate + ' for ' + cacheKey);
conversionCache[cacheKey] = conversionRate;
}
return conversionRate;
}
function roundFloat(num, dec) {
var d = 1;
for (let i = 0; i < dec; i++) {
d += '0';
}
return Math.round(num * d) / d;
}
| Java |
<!--
~ Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->
<div class="main-content">
<h2>Getting Started</h2>
<p>WSO2 API Manager is a complete solution for publishing APIs, creating and managing a developer community, and
for scalably routing API traffic. It leverages proven, production-ready, integration, security and
governance components from WSO2 Enterprise Service Bus, WSO2 Identity Server, and WSO2 Governance Registry.
Moreover, it is powered by WSO2 Business Activity Monitor, thereby making WSO2 API Manager ready for any
large-scale deployments right away.
</p>
<p>
As part of its latest release, the REST API was developed as a CXF REST web application running on WSO2 API
Manager. This API comes with a pluggable security mechanism. Since API security is implemented as a CXF
handler, if you need to plug a custom security mechanism, you can write your own handler and add it to the
web service. This REST API is implemented based on REST best practices and specifications. API development
is started with a swagger specification for Store and Publisher operations.
</p>
<p>
Before invoking the API with the access token, obtain the consumer key/secret key pair by calling the
dynamic client registration endpoint. You can request an access token with the preferred grant type. An
example is shown below,
</p>
<div class="pre"><code class="bash">curl -X POST -H "Authorization: Basic YWRtaW46YWRtaW4=" -H "Content-Type: application/json" -d @payload.json http://localhost:9763/client-registration/v0.9/register</code></div>
<br/>
<p>
Sample request:
</p>
<div class="pre"><code class="json">{
"callbackUrl": "www.google.lk",
"clientName": "rest_api_publisher",
"tokenScope": "Production",
"owner": "admin",
"grantType": "password refresh_token",
"saasApp": true
}</code></div>
<br/>
<p>
Sample response:
</p>
<div class="pre"><code class="json">{
"callBackURL": "www.google.lk",
"jsonString":
"{
\"username\":\"admin\",
\"redirect_uris\":\"www.google.lk\",
\"tokenScope\":[Ljava.lang.String;@3a73796a,
\"client_name\":\"admin_rest_api_publisher\",
\"grant_types\":\"authorization_code password refresh_token iwa:ntlm
urn:ietf:params:oauth:grant-type:saml2-bearer client_credentialsimplicit\"
}",
"clientName": null,
"clientId": "HfEl1jJPdg5tbtrxhAwybN05QGoa",
"clientSecret": "l6c0aoLcWR3fwezHhc7XoGOht5Aa"
}</code></div>
<br/>
<p>
During the API invocation process request, click the CXF handler first, which calls an introspection API to
validate the token. Generate the access token using the already created OAuth application. A sample call to
generate the access token is shown below.
</p>
<p>
<b>
<i>Note:</i> Access token must be generated using correct scope for the resource.
Scope for each resource is given in resource documentation.
</b>
</p>
<div class="pre"><code class="bash">curl -k -d "grant_type=password&username=admin&password=admin&<b>scope=apim:api_view</b>" -H "Authorization: Basic SGZFbDFqSlBkZzV0YnRyeGhBd3liTjA1UUdvYTpsNmMwYW9MY1dSM2Z3ZXpIaGM3WG9HT2h0NUFh" https://127.0.0.1:8243/token</code></div>
<br/>
<p>
Token response:
</p>
<div class="pre"><code class="json">{
"scope":"apim:api_view",
"token_type":"Bearer",
"expires_in":3600,
"refresh_token":"33c3be152ebf0030b3fb76f2c1f80bf8",
"access_token":"292ff0fd256814536baca0926f483c8d"
}</code></div>
<br/>
<p>
Now you have a valid access token, which you can use to invoke an API. Navigate through the API descriptions
to find the required API, obtain an access token as described above and invoke the API with the
authentication header. If you use a different authentication mechanism, this process may change.
</p>
</div> | Java |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/codecommit/CodeCommit_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/codecommit/model/Conflict.h>
#include <aws/codecommit/model/BatchDescribeMergeConflictsError.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace CodeCommit
{
namespace Model
{
class AWS_CODECOMMIT_API BatchDescribeMergeConflictsResult
{
public:
BatchDescribeMergeConflictsResult();
BatchDescribeMergeConflictsResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
BatchDescribeMergeConflictsResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline const Aws::Vector<Conflict>& GetConflicts() const{ return m_conflicts; }
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline void SetConflicts(const Aws::Vector<Conflict>& value) { m_conflicts = value; }
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline void SetConflicts(Aws::Vector<Conflict>&& value) { m_conflicts = std::move(value); }
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline BatchDescribeMergeConflictsResult& WithConflicts(const Aws::Vector<Conflict>& value) { SetConflicts(value); return *this;}
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline BatchDescribeMergeConflictsResult& WithConflicts(Aws::Vector<Conflict>&& value) { SetConflicts(std::move(value)); return *this;}
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline BatchDescribeMergeConflictsResult& AddConflicts(const Conflict& value) { m_conflicts.push_back(value); return *this; }
/**
* <p>A list of conflicts for each file, including the conflict metadata and the
* hunks of the differences between the files.</p>
*/
inline BatchDescribeMergeConflictsResult& AddConflicts(Conflict&& value) { m_conflicts.push_back(std::move(value)); return *this; }
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline const Aws::String& GetNextToken() const{ return m_nextToken; }
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline void SetNextToken(const Aws::String& value) { m_nextToken = value; }
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline void SetNextToken(Aws::String&& value) { m_nextToken = std::move(value); }
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline void SetNextToken(const char* value) { m_nextToken.assign(value); }
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline BatchDescribeMergeConflictsResult& WithNextToken(const Aws::String& value) { SetNextToken(value); return *this;}
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline BatchDescribeMergeConflictsResult& WithNextToken(Aws::String&& value) { SetNextToken(std::move(value)); return *this;}
/**
* <p>An enumeration token that can be used in a request to return the next batch
* of the results.</p>
*/
inline BatchDescribeMergeConflictsResult& WithNextToken(const char* value) { SetNextToken(value); return *this;}
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline const Aws::Vector<BatchDescribeMergeConflictsError>& GetErrors() const{ return m_errors; }
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline void SetErrors(const Aws::Vector<BatchDescribeMergeConflictsError>& value) { m_errors = value; }
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline void SetErrors(Aws::Vector<BatchDescribeMergeConflictsError>&& value) { m_errors = std::move(value); }
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline BatchDescribeMergeConflictsResult& WithErrors(const Aws::Vector<BatchDescribeMergeConflictsError>& value) { SetErrors(value); return *this;}
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline BatchDescribeMergeConflictsResult& WithErrors(Aws::Vector<BatchDescribeMergeConflictsError>&& value) { SetErrors(std::move(value)); return *this;}
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline BatchDescribeMergeConflictsResult& AddErrors(const BatchDescribeMergeConflictsError& value) { m_errors.push_back(value); return *this; }
/**
* <p>A list of any errors returned while describing the merge conflicts for each
* file.</p>
*/
inline BatchDescribeMergeConflictsResult& AddErrors(BatchDescribeMergeConflictsError&& value) { m_errors.push_back(std::move(value)); return *this; }
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline const Aws::String& GetDestinationCommitId() const{ return m_destinationCommitId; }
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetDestinationCommitId(const Aws::String& value) { m_destinationCommitId = value; }
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetDestinationCommitId(Aws::String&& value) { m_destinationCommitId = std::move(value); }
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetDestinationCommitId(const char* value) { m_destinationCommitId.assign(value); }
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithDestinationCommitId(const Aws::String& value) { SetDestinationCommitId(value); return *this;}
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithDestinationCommitId(Aws::String&& value) { SetDestinationCommitId(std::move(value)); return *this;}
/**
* <p>The commit ID of the destination commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithDestinationCommitId(const char* value) { SetDestinationCommitId(value); return *this;}
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline const Aws::String& GetSourceCommitId() const{ return m_sourceCommitId; }
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetSourceCommitId(const Aws::String& value) { m_sourceCommitId = value; }
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetSourceCommitId(Aws::String&& value) { m_sourceCommitId = std::move(value); }
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline void SetSourceCommitId(const char* value) { m_sourceCommitId.assign(value); }
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithSourceCommitId(const Aws::String& value) { SetSourceCommitId(value); return *this;}
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithSourceCommitId(Aws::String&& value) { SetSourceCommitId(std::move(value)); return *this;}
/**
* <p>The commit ID of the source commit specifier that was used in the merge
* evaluation.</p>
*/
inline BatchDescribeMergeConflictsResult& WithSourceCommitId(const char* value) { SetSourceCommitId(value); return *this;}
/**
* <p>The commit ID of the merge base.</p>
*/
inline const Aws::String& GetBaseCommitId() const{ return m_baseCommitId; }
/**
* <p>The commit ID of the merge base.</p>
*/
inline void SetBaseCommitId(const Aws::String& value) { m_baseCommitId = value; }
/**
* <p>The commit ID of the merge base.</p>
*/
inline void SetBaseCommitId(Aws::String&& value) { m_baseCommitId = std::move(value); }
/**
* <p>The commit ID of the merge base.</p>
*/
inline void SetBaseCommitId(const char* value) { m_baseCommitId.assign(value); }
/**
* <p>The commit ID of the merge base.</p>
*/
inline BatchDescribeMergeConflictsResult& WithBaseCommitId(const Aws::String& value) { SetBaseCommitId(value); return *this;}
/**
* <p>The commit ID of the merge base.</p>
*/
inline BatchDescribeMergeConflictsResult& WithBaseCommitId(Aws::String&& value) { SetBaseCommitId(std::move(value)); return *this;}
/**
* <p>The commit ID of the merge base.</p>
*/
inline BatchDescribeMergeConflictsResult& WithBaseCommitId(const char* value) { SetBaseCommitId(value); return *this;}
private:
Aws::Vector<Conflict> m_conflicts;
Aws::String m_nextToken;
Aws::Vector<BatchDescribeMergeConflictsError> m_errors;
Aws::String m_destinationCommitId;
Aws::String m_sourceCommitId;
Aws::String m_baseCommitId;
};
} // namespace Model
} // namespace CodeCommit
} // namespace Aws
| Java |
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" lang="en-us" xml:lang="en-us">
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<head>
<meta content="text/html; charset=utf-8" http-equiv="Content-Type" />
<meta name="copyright" content="(C) Copyright 2005" />
<meta name="DC.rights.owner" content="(C) Copyright 2005" />
<meta content="public" name="security" />
<meta content="index,follow" name="Robots" />
<meta http-equiv="PICS-Label" content='(PICS-1.1 "http://www.icra.org/ratingsv02.html" l gen true r (cz 1 lz 1 nz 1 oz 1 vz 1) "http://www.rsac.org/ratingsv01.html" l gen true r (n 0 s 0 v 0 l 0) "http://www.classify.org/safesurf/" l gen true r (SS~~000 1))' />
<meta content="reference" name="DC.Type" />
<meta name="DC.Title" content="derby.authentication.native.passwordLifetimeMillis" />
<meta content="derby.authentication.native.passwordLifetimeMillis, password expiration, specifying" name="DC.subject" />
<meta content="derby.authentication.native.passwordLifetimeMillis, password expiration, specifying" name="keywords" />
<meta scheme="URI" name="DC.Relation" content="crefproper22250.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperbuiltinalgorithm.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperiterations.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropersaltlength.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperauthdn.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperauthpw.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper26978.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper37341.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperpasswordthreshold.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper13766.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper25581.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27467.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperclasspath.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper24846.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper81405.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper25025.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropernoautoboot.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper24390.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper39325.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropersqlauth.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper13217.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperxatrantimeout.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper43414.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper43517.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperpreallocator.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperstatementcachesize.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper10607.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper23835.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper40346.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper98166.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper46141.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperlogbuffersize.html" />
<meta scheme="URI" name="DC.Relation" content="rrefpropermaxlogshippinginterval.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperminlogshippinginterval.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperverbose.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperindexstatsauto.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperindexstatslog.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperindexstatstrace.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27529.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperstormin.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper81359.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper28026.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper40688.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperrowlocking.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper34037.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperdefaultfileperms.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperextdiagsevlevel.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper33027.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper18151.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperlogboottrace.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper26985.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper35028.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperbootall.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperdurability.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper32066.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproper27355.html" />
<meta scheme="URI" name="DC.Relation" content="rrefproperdatadictversion.html" />
<meta content="XHTML" name="DC.Format" />
<meta content="rrefproperpasswordmillis" name="DC.Identifier" />
<meta content="en-us" name="DC.Language" />
<link href="commonltr.css" type="text/css" rel="stylesheet" />
<title>derby.authentication.native.passwordLifetimeMillis</title>
</head>
<body id="rrefproperpasswordmillis"><a name="rrefproperpasswordmillis"><!-- --></a>
<h1 class="topictitle1">derby.authentication.native.passwordLifetimeMillis</h1>
<div>
<div class="section"><h2 class="sectiontitle">Function</h2>
<p>Specifies the number of milliseconds a NATIVE authentication password remains
valid after being created, reset, or modified. If the value is less than or
equal to zero, the password never expires.</p>
<p>To avoid locking out the super-user, the password of the database owner of a
credentials database never expires.</p>
<p>If a connection attempt is made when the password's remaining lifetime is
less than a proportion of the maximum lifetime, a warning is issued. The
proportion is specified by the
<em><a href="rrefproperpasswordthreshold.html#rrefproperpasswordthreshold">derby.authentication.native.passwordLifetimeThreshold</a></em>
property.</p>
</div>
<div class="section"><h2 class="sectiontitle">Syntax</h2>
<pre><strong>derby.authentication.native.passwordLifetimeMillis=<em>millis</em></strong></pre>
</div>
<div class="section"><h2 class="sectiontitle">Default</h2>
<p>A number of milliseconds equal to 31 days (2,678,400,000).</p>
</div>
<div class="example"><h2 class="sectiontitle">Example</h2>
<pre><strong><span>-- system-wide property</span>
derby.authentication.native.passwordLifetimeMillis=5356800000
<span>-- database-level property</span>
CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(
'derby.authentication.native.passwordLifetimeMillis', '5356800000');</strong></pre>
</div>
<div class="section"><h2 class="sectiontitle">Dynamic or static</h2>
<p>Static. For system-wide properties, you must reboot
<span>Derby</span> for the change to take
effect. For database-wide properties, you must reboot the database for the
change to take effect.</p>
</div>
</div>
<div>
<div class="familylinks">
<div class="parentlink"><strong>Parent topic:</strong> <a href="crefproper22250.html" title="">Derby properties</a></div>
</div>
<div class="relref"><strong>Related reference</strong><br />
<div><a href="rrefproperbuiltinalgorithm.html" title="">derby.authentication.builtin.algorithm</a></div>
<div><a href="rrefproperiterations.html" title="">derby.authentication.builtin.iterations</a></div>
<div><a href="rrefpropersaltlength.html" title="">derby.authentication.builtin.saltLength</a></div>
<div><a href="rrefproperauthdn.html" title="">derby.authentication.ldap.searchAuthDN</a></div>
<div><a href="rrefproperauthpw.html" title="">derby.authentication.ldap.searchAuthPW</a></div>
<div><a href="rrefproper26978.html" title="">derby.authentication.ldap.searchBase</a></div>
<div><a href="rrefproper37341.html" title="">derby.authentication.ldap.searchFilter</a></div>
<div><a href="rrefproperpasswordthreshold.html" title="">derby.authentication.native.passwordLifetimeThreshold</a></div>
<div><a href="rrefproper13766.html" title="">derby.authentication.provider</a></div>
<div><a href="rrefproper25581.html" title="">derby.authentication.server</a></div>
<div><a href="rrefproper27467.html" title="">derby.connection.requireAuthentication</a></div>
<div><a href="rrefproperclasspath.html" title="">derby.database.classpath</a></div>
<div><a href="rrefproper24846.html" title="">derby.database.defaultConnectionMode</a></div>
<div><a href="rrefproper81405.html" title="">derby.database.forceDatabaseLock</a></div>
<div><a href="rrefproper25025.html" title="">derby.database.fullAccessUsers</a></div>
<div><a href="rrefpropernoautoboot.html" title="">derby.database.noAutoBoot</a></div>
<div><a href="rrefproper24390.html" title="">derby.database.propertiesOnly</a></div>
<div><a href="rrefproper39325.html" title="">derby.database.readOnlyAccessUsers</a></div>
<div><a href="rrefpropersqlauth.html" title="">derby.database.sqlAuthorization</a></div>
<div><a href="rrefproper13217.html" title="">derby.infolog.append</a></div>
<div><a href="rrefproperxatrantimeout.html" title="">derby.jdbc.xaTransactionTimeout</a></div>
<div><a href="rrefproper43414.html" title="">derby.language.logQueryPlan</a></div>
<div><a href="rrefproper43517.html" title="">derby.language.logStatementText</a></div>
<div><a href="rrefproperpreallocator.html" title="">derby.language.sequence.preallocator</a></div>
<div><a href="rrefproperstatementcachesize.html" title="">derby.language.statementCacheSize</a></div>
<div><a href="rrefproper10607.html" title="">derby.locks.deadlockTimeout</a></div>
<div><a href="rrefproper23835.html" title="">derby.locks.deadlockTrace</a></div>
<div><a href="rrefproper40346.html" title="">derby.locks.escalationThreshold</a></div>
<div><a href="rrefproper98166.html" title="">derby.locks.monitor</a></div>
<div><a href="rrefproper46141.html" title="">derby.locks.waitTimeout</a></div>
<div><a href="rrefproperlogbuffersize.html" title="">derby.replication.logBufferSize</a></div>
<div><a href="rrefpropermaxlogshippinginterval.html" title="">derby.replication.maxLogShippingInterval</a></div>
<div><a href="rrefproperminlogshippinginterval.html" title="">derby.replication.minLogShippingInterval</a></div>
<div><a href="rrefproperverbose.html" title="">derby.replication.verbose</a></div>
<div><a href="rrefproperindexstatsauto.html" title="">derby.storage.indexStats.auto</a></div>
<div><a href="rrefproperindexstatslog.html" title="">derby.storage.indexStats.log</a></div>
<div><a href="rrefproperindexstatstrace.html" title="">derby.storage.indexStats.trace</a></div>
<div><a href="rrefproper27529.html" title="">derby.storage.initialPages</a></div>
<div><a href="rrefproperstormin.html" title="">derby.storage.minimumRecordSize</a></div>
<div><a href="rrefproper81359.html" title="">derby.storage.pageCacheSize</a></div>
<div><a href="rrefproper28026.html" title="">derby.storage.pageReservedSpace</a></div>
<div><a href="rrefproper40688.html" title="">derby.storage.pageSize</a></div>
<div><a href="rrefproperrowlocking.html" title="">derby.storage.rowLocking</a></div>
<div><a href="rrefproper34037.html" title="">derby.storage.tempDirectory</a></div>
<div><a href="rrefproperdefaultfileperms.html" title="">derby.storage.useDefaultFilePermissions</a></div>
<div><a href="rrefproperextdiagsevlevel.html" title="">derby.stream.error.extendedDiagSeverityLevel</a></div>
<div><a href="rrefproper33027.html" title="">derby.stream.error.field</a></div>
<div><a href="rrefproper18151.html" title="">derby.stream.error.file</a></div>
<div><a href="rrefproperlogboottrace.html" title="">derby.stream.error.logBootTrace</a></div>
<div><a href="rrefproper26985.html" title="">derby.stream.error.logSeverityLevel</a></div>
<div><a href="rrefproper35028.html" title="">derby.stream.error.method</a></div>
<div><a href="rrefproperbootall.html" title="">derby.system.bootAll</a></div>
<div><a href="rrefproperdurability.html" title="">derby.system.durability</a></div>
<div><a href="rrefproper32066.html" title="">derby.system.home</a></div>
<div><a href="rrefproper27355.html" title="">derby.user.UserName</a></div>
<div><a href="rrefproperdatadictversion.html" title="">DataDictionaryVersion</a></div>
</div>
</div>
</body>
</html>
| Java |
import app from 'common/electron/app';
import path from 'path';
/**
* @return the theme's css path
*/
function getThemePath (name) {
return path.join(app.getAppPath(), 'themes', name + '.css');
}
/**
* @return the style's css path
*/
function getStylePath (name) {
return path.join(app.getAppPath(), 'styles', name + '.css');
}
/**
* @return the image's path
*/
function getImagePath (name) {
return path.join(app.getAppPath(), 'images', name);
}
/**
* Windows only.
* @return the directory where the app is ran from
*/
function getCustomUserDataPath () {
return path.join(path.dirname(app.getPath('exe')), 'data');
}
/**
* Windows only.
* @return the path to Update.exe created by Squirrel.Windows
*/
function getSquirrelUpdateExePath () {
return path.join(path.dirname(app.getPath('exe')), '..', 'Update.exe');
}
export default {
getThemePath,
getStylePath,
getImagePath,
getCustomUserDataPath,
getSquirrelUpdateExePath
};
| Java |
{% extends "admin/change_form.html" %}
{% load i18n admin_static admin_modify admin_urls %}
{% block extrahead %}
{{ block.super }}
{# hackily include js required for django admin datepicker #}
<script type="text/javascript" src="{% static 'admin/js/core.js' %}"></script>
<script type="text/javascript" src="{% static 'admin/js/vendor/jquery/jquery.js' %}"></script>
<script type="text/javascript" src="{% static 'admin/js/jquery.init.js' %}"></script>
{{ form.media }}
{% endblock %}
{% block breadcrumbs %}
<div class="breadcrumbs">
<a href="{% url 'admin:index' %}">{% trans 'Home' %}</a>
› <a href="{% url 'admin:app_list' app_label=opts.app_label %}">{{ opts.app_config.verbose_name }}</a>
› <a href="{% url opts|admin_urlname:'changelist' %}">{{ opts.verbose_name_plural|capfirst }}</a>
› <a href="{% url 'admin:share_sourceconfig_change' source_config.id %}">{{ source_config.label }}</a>
› Harvest
</div>
{% endblock %}
{% block content %}
<div id="content-main">
<form action="" method="POST">
{% csrf_token %}
{% if form.non_field_errors|length > 0 %}
<p class="errornote">
"Please correct the errors below."
</p>
{{ form.non_field_errors }}
{% endif %}
<fieldset class="module aligned">
{% for field in form %}
<div class="form-row">
{{ field.errors }}
{{ field.label_tag }}
{{ field }}
{% if field.field.help_text %}
<p class="help">
{{ field.field.help_text|safe }}
</p>
{% endif %}
</div>
{% endfor %}
</fieldset>
<div class="submit-row">
<input type="submit" class="default" value="Start Harvest!">
</div>
</form>
</div>
{% endblock %}
| Java |
/* $NetBSD: disklabel.h,v 1.12 2013/05/27 07:37:20 msaitoh Exp $ */
/*
* Copyright (c) 1994 Mark Brinicombe.
* Copyright (c) 1994 Brini.
* All rights reserved.
*
* This code is derived from software written for Brini by Mark Brinicombe
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. All advertising materials mentioning features or use of this software
* must display the following acknowledgement:
* This product includes software developed by Brini.
* 4. The name of the company nor the name of the author may be used to
* endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL BRINI OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* RiscBSD kernel project
*
* disklabel.h
*
* machine specific disk label info
*
* Created : 04/10/94
*/
#ifndef _ARM_DISKLABEL_H_
#define _ARM_DISKLABEL_H_
#ifndef LABELUSESMBR
#define LABELUSESMBR 1 /* use MBR partitionning */
#endif
#define LABELSECTOR 1 /* sector containing label */
#define LABELOFFSET 0 /* offset of label in sector */
#define MAXPARTITIONS 16 /* number of partitions */
#define OLDMAXPARTITIONS 8 /* old number of partitions */
#ifndef RAW_PART
#define RAW_PART 2 /* raw partition: XX?c */
#endif
/*
* We use the highest bit of the minor number for the partition number.
* This maintains backward compatibility with device nodes created before
* MAXPARTITIONS was increased.
*/
#define __ARM_MAXDISKS ((1 << 20) / MAXPARTITIONS)
#define DISKUNIT(dev) ((minor(dev) / OLDMAXPARTITIONS) % __ARM_MAXDISKS)
#define DISKPART(dev) ((minor(dev) % OLDMAXPARTITIONS) + \
((minor(dev) / (__ARM_MAXDISKS * OLDMAXPARTITIONS)) * OLDMAXPARTITIONS))
#define DISKMINOR(unit, part) \
(((unit) * OLDMAXPARTITIONS) + ((part) % OLDMAXPARTITIONS) + \
((part) / OLDMAXPARTITIONS) * (__ARM_MAXDISKS * OLDMAXPARTITIONS))
#if HAVE_NBTOOL_CONFIG_H
#include <nbinclude/sys/dkbad.h>
#include <nbinclude/sys/disklabel_acorn.h>
#include <nbinclude/sys/bootblock.h>
#else
#include <sys/dkbad.h>
#include <sys/disklabel_acorn.h>
#include <sys/bootblock.h>
#endif /* HAVE_NBTOOL_CONFIG_H */
struct cpu_disklabel {
struct mbr_partition mbrparts[MBR_PART_COUNT];
#define __HAVE_DISKLABEL_DKBAD
struct dkbad bad;
};
#ifdef _KERNEL
struct buf;
struct disklabel;
/* for readdisklabel. rv != 0 -> matches, msg == NULL -> success */
int mbr_label_read(dev_t, void (*)(struct buf *), struct disklabel *,
struct cpu_disklabel *, const char **, int *, int *);
/* for writedisklabel. rv == 0 -> dosen't match, rv > 0 -> success */
int mbr_label_locate(dev_t, void (*)(struct buf *),
struct disklabel *, struct cpu_disklabel *, int *, int *);
#endif /* _KERNEL */
#endif /* _ARM_DISKLABEL_H_ */
| Java |
/**
* Copyright (C) 2015 Born Informatik AG (www.born.ch)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wte4j.impl.service;
import org.wte4j.WteException;
/**
* Map JDBC types (as defined in <code>java.sql.Types</code>) to Java types. The
* mappings have been taken from [1]
* "JDBC 4.0 Specification, JSR 221, November 7, 2006, Appendix B, Table B-3"
*
*/
final class MapperSqlType {
private MapperSqlType() {
};
public static Class<?> map(int jdbcType) {
switch (jdbcType) {
case java.sql.Types.BIT:
case java.sql.Types.BOOLEAN:
return java.lang.Boolean.class;
case java.sql.Types.TINYINT:
case java.sql.Types.SMALLINT:
case java.sql.Types.INTEGER:
return java.lang.Integer.class;
case java.sql.Types.BIGINT:
return java.lang.Long.class;
case java.sql.Types.FLOAT:
case java.sql.Types.DOUBLE:
return java.lang.Double.class;
case java.sql.Types.REAL:
return java.lang.Float.class;
case java.sql.Types.NUMERIC: // according to [1] Table B-1
case java.sql.Types.DECIMAL:
return java.math.BigDecimal.class;
case java.sql.Types.CHAR:
case java.sql.Types.VARCHAR:
case java.sql.Types.LONGVARCHAR:
return java.lang.String.class;
case java.sql.Types.DATE:
return java.sql.Date.class;
case java.sql.Types.TIME:
return java.sql.Time.class;
case java.sql.Types.TIMESTAMP:
return java.sql.Timestamp.class;
case java.sql.Types.STRUCT:
return java.sql.Struct.class;
case java.sql.Types.ARRAY:
return java.sql.Array.class;
case java.sql.Types.BLOB:
return java.sql.Blob.class;
case java.sql.Types.CLOB:
return java.sql.Clob.class;
case java.sql.Types.REF:
return java.sql.Ref.class;
case java.sql.Types.DATALINK:
return java.net.URL.class;
case java.sql.Types.ROWID:
return java.sql.RowId.class;
case java.sql.Types.NULL:
case java.sql.Types.OTHER:
case java.sql.Types.JAVA_OBJECT:
case java.sql.Types.DISTINCT:
case java.sql.Types.BINARY:
case java.sql.Types.VARBINARY:
case java.sql.Types.LONGVARBINARY:
default:
throw new WteException("invalid or unmapped SQL type (" + jdbcType
+ ")");
}
}
}
| Java |
/* Copyright 2017 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "tensorflow/compiler/xla/tools/parser/hlo_parser.h"
#include "tensorflow/compiler/xla/literal_util.h"
#include "tensorflow/compiler/xla/service/hlo_opcode.h"
#include "tensorflow/compiler/xla/shape_util.h"
#include "tensorflow/compiler/xla/util.h"
#include "tensorflow/core/lib/gtl/map_util.h"
#include "tensorflow/core/lib/strings/strcat.h"
#include "tensorflow/core/lib/strings/stringprintf.h"
namespace xla {
namespace tools {
namespace {
using tensorflow::StringPiece;
using tensorflow::gtl::optional;
using tensorflow::str_util::Split;
using tensorflow::str_util::SplitAndParseAsInts;
using tensorflow::strings::Printf;
using tensorflow::strings::StrAppend;
using tensorflow::strings::StrCat;
const double kF16max = 65504;
// Parser for the HloModule::ToString() format text.
class HloParser {
public:
using LocTy = HloLexer::LocTy;
explicit HloParser(StringPiece str, const HloModuleConfig& config)
: lexer_(str), config_(config) {}
// Runs the parser. Returns false if an error occurred.
bool Run();
// Returns the parsed HloModule.
std::unique_ptr<HloModule> ConsumeHloModule() { return std::move(module_); }
// Returns the error information.
string GetError() const { return tensorflow::str_util::Join(error_, "\n"); }
private:
// ParseXXX returns false if an error occurred.
bool ParseHloModule();
bool ParseComputations();
bool ParseComputation(HloComputation** entry_computation);
bool ParseInstructionList(HloComputation::Builder* builder,
string* root_name);
bool ParseInstruction(HloComputation::Builder* builder, string* root_name);
bool ParseControlPredecessors(HloInstruction* instruction);
bool ParseLiteral(std::unique_ptr<Literal>* literal, const Shape& shape);
bool ParseTupleLiteral(std::unique_ptr<Literal>* literal, const Shape& shape);
bool ParseNonTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape);
// Sets the sub-value of literal at the given index to the given value. The
// literal's shape must have the default layout.
bool SetValueInLiteral(int64 value, int64 linear_index, Literal* literal);
bool SetValueInLiteral(double value, int64 linear_index, Literal* literal);
bool SetValueInLiteral(bool value, int64 linear_index, Literal* literal);
template <typename LiteralNativeT, typename ParsedElemT>
bool SetValueInLiteralHelper(ParsedElemT value, int64 linear_index,
Literal* literal);
bool ParseOperands(std::vector<HloInstruction*>* operands);
// Fills parsed operands into 'operands' and expects a certain number of
// operands.
bool ParseOperands(std::vector<HloInstruction*>* operands,
const int expected_size);
// Describes the start, limit, and stride on every dimension of the operand
// being sliced.
struct SliceRanges {
std::vector<int64> starts;
std::vector<int64> limits;
std::vector<int64> strides;
};
// Types of attributes.
enum class AttrTy {
kInt64,
kInt32,
kFloat,
kString,
kBracedInt64List,
kHloComputation,
kWindow,
kConvolutionDimensionNumbers,
kSharding,
kInstructionList,
kSliceRanges,
kPaddingConfig,
kMetadata,
kFusionKind,
kDistribution,
};
struct AttrConfig {
bool required; // whether it's required or optional
AttrTy attr_type; // what type it is
void* result; // where to store the parsed result.
};
// attributes ::= (',' attribute)*
//
// Parses attributes given names and configs of the attributes. Each parsed
// result is passed back through the result pointer in corresponding
// AttrConfig. Note that the result pointer must point to a optional<T> typed
// variable which outlives this function. Returns false on error. You should
// not use the any of the results if this function failed.
//
// Example usage:
//
// std::unordered_map<string, AttrConfig> attrs;
// optional<int64> foo;
// attrs["foo"] = {/*required=*/false, AttrTy::kInt64, &foo};
// optional<Window> bar;
// attrs["bar"] = {/*required=*/true, AttrTy::kWindow, &bar};
// if (!ParseAttributes(attrs)) {
// return false; // Do not use 'foo' 'bar' if failed.
// }
// // Do something with 'bar'.
// if (foo) { // If attr foo is seen, do something with 'foo'. }
//
bool ParseAttributes(const std::unordered_map<string, AttrConfig>& attrs);
// sub_attributes ::= '{' (','? attribute)* '}'
//
// Usage is the same as ParseAttributes. See immediately above.
bool ParseSubAttributes(const std::unordered_map<string, AttrConfig>& attrs);
// Parses one attribute. If it has already been seen, return error. Returns
// true and adds to seen_attrs on success.
//
// Do not call this except in ParseAttributes or ParseSubAttributes.
bool ParseAttributeHelper(const std::unordered_map<string, AttrConfig>& attrs,
std::unordered_set<string>* seen_attrs);
// Parses a name and finds the corresponding hlo computation.
bool ParseComputationName(HloComputation** value);
// Parses a list of names and finds the corresponding hlo instructions.
bool ParseInstructionNames(std::vector<HloInstruction*>* instructions);
bool ParseWindow(Window* window);
bool ParseConvolutionDimensionNumbers(ConvolutionDimensionNumbers* dnums);
bool ParsePaddingConfig(PaddingConfig* padding);
bool ParseMetadata(OpMetadata* metadata);
bool ParseSharding(OpSharding* sharding);
bool ParseSingleSharding(OpSharding* sharding, bool lbrace_pre_lexed);
// Parses a sub-attribute of the window attribute, e.g.,size=1x2x3.
bool ParseDxD(const string& name, std::vector<int64>* result);
// Parses window's pad sub-attriute, e.g., pad=0_0x3x3.
bool ParseWindowPad(std::vector<std::vector<int64>>* pad);
bool ParseSliceRanges(SliceRanges* result);
bool ParseInt64List(const TokKind start, const TokKind end,
const TokKind delim, std::vector<int64>* result);
bool ParseParamListToShape(Shape* shape, LocTy* shape_loc);
bool ParseParamList();
bool ParseName(string* result);
bool ParseAttributeName(string* result);
bool ParseString(string* result);
bool ParseShape(Shape* result);
bool ParseOpcode(HloOpcode* result);
bool ParseFusionKind(HloInstruction::FusionKind* result);
bool ParseRandomDistribution(RandomDistribution* result);
bool ParseInt64(int64* result);
bool ParseDouble(double* result);
bool ParseBool(bool* result);
bool ParseToken(TokKind kind, const string& msg);
// Returns true if the current token is the beginning of a shape.
bool CanBeShape();
// Returns true if the current token is the beginning of a
// param_list_to_shape.
bool CanBeParamListToShape();
// Logs the current parsing line and the given message. Always returns false.
bool TokenError(StringPiece msg);
bool Error(LocTy loc, StringPiece msg);
// If the current token is 'kind', eats it (i.e. lexes the next token) and
// returns true.
bool EatIfPresent(TokKind kind);
// Parses a shape, and returns true if the result is compatible with the given
// shape.
bool EatShapeAndCheckCompatible(const Shape& shape);
// Adds the instruction to the pool. Returns false and emits an error if the
// instruction already exists.
bool AddInstruction(const string& name, HloInstruction* instruction,
LocTy name_loc);
// Adds the computation to the pool. Returns false and emits an error if the
// computation already exists.
bool AddComputation(const string& name, HloComputation* computation,
LocTy name_loc);
// The map from the instruction name to the instruction. This does not own the
// instructions.
std::unordered_map<string, HloInstruction*> instruction_pool_;
std::unordered_map<string, HloComputation*> computation_pool_;
HloLexer lexer_;
std::unique_ptr<HloModule> module_;
std::vector<std::unique_ptr<HloComputation>> computations_;
const HloModuleConfig config_;
std::vector<string> error_;
};
bool HloParser::Error(LocTy loc, StringPiece msg) {
auto line_col = lexer_.GetLineAndColumn(loc);
const unsigned line = line_col.first;
const unsigned col = line_col.second;
std::vector<string> error_lines;
error_lines.push_back(
StrCat("was parsing ", line, ":", col, ": error: ", msg));
error_lines.push_back(lexer_.GetLine(loc).ToString());
error_lines.push_back(col == 0 ? "" : StrCat(string(col - 1, ' '), "^"));
error_.push_back(tensorflow::str_util::Join(error_lines, "\n"));
VLOG(1) << "Error: " << error_.back();
return false;
}
bool HloParser::TokenError(StringPiece msg) {
return Error(lexer_.GetLoc(), msg);
}
bool HloParser::Run() {
lexer_.Lex();
return ParseHloModule();
}
// ::= 'HloModule' name computations
bool HloParser::ParseHloModule() {
if (lexer_.GetKind() != TokKind::kw_HloModule) {
return TokenError("expects HloModule");
}
// Eat 'HloModule'
lexer_.Lex();
string name;
if (!ParseName(&name)) {
return false;
}
module_ = MakeUnique<HloModule>(name, config_);
return ParseComputations();
}
// computations ::= (computation)+
bool HloParser::ParseComputations() {
HloComputation* entry_computation = nullptr;
do {
if (!ParseComputation(&entry_computation)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kEof);
for (int i = 0; i < computations_.size(); i++) {
// If entry_computation is not nullptr, it means the computation it pointed
// to is marked with "ENTRY"; otherwise, no computation is marked with
// "ENTRY", and we use the last computation as the entry computation. We
// add the non-entry computations as embedded computations to the module.
if ((entry_computation != nullptr &&
computations_[i].get() != entry_computation) ||
(entry_computation == nullptr && i != computations_.size() - 1)) {
module_->AddEmbeddedComputation(std::move(computations_[i]));
continue;
}
auto computation =
module_->AddEntryComputation(std::move(computations_[i]));
// The parameters and result layouts were set to default layout. Here we
// set the layouts to what the hlo text says.
for (int p = 0; p < computation->num_parameters(); p++) {
const Shape& param_shape = computation->parameter_instruction(p)->shape();
if (param_shape.has_layout()) {
module_->mutable_entry_computation_layout()
->mutable_parameter_layout(p)
->ResetLayout(param_shape.layout());
}
}
const Shape& result_shape = computation->root_instruction()->shape();
if (result_shape.has_layout()) {
module_->mutable_entry_computation_layout()
->mutable_result_layout()
->ResetLayout(result_shape.layout());
}
}
return true;
}
// computation ::= ('ENTRY')? name (param_list_to_shape)? instruction_list
bool HloParser::ParseComputation(HloComputation** entry_computation) {
LocTy maybe_entry_loc = lexer_.GetLoc();
const bool is_entry_computation = EatIfPresent(TokKind::kw_ENTRY);
string name;
LocTy name_loc = lexer_.GetLoc();
if (!ParseName(&name)) {
return false;
}
auto builder = MakeUnique<HloComputation::Builder>(name);
LocTy shape_loc = nullptr;
Shape shape;
if (CanBeParamListToShape() && !ParseParamListToShape(&shape, &shape_loc)) {
return false;
}
string root_name;
if (!ParseInstructionList(builder.get(), &root_name)) {
return false;
}
HloInstruction* root =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, root_name);
// This means some instruction was marked as ROOT but we didn't find it in the
// pool, which should not happen.
if (!root_name.empty() && root == nullptr) {
LOG(FATAL) << "instruction " << root_name
<< " was marked as ROOT but the parser has not seen it before";
}
// Now root can be either an existing instruction or a nullptr. If it's a
// nullptr, the implementation of Builder will set the last instruction as
// root instruction.
computations_.emplace_back(builder->Build(root));
HloComputation* computation = computations_.back().get();
if (!root) {
root = computation->root_instruction();
} else {
CHECK_EQ(root, computation->root_instruction());
}
// If param_list_to_shape was present, check compatibility.
if (shape_loc != nullptr && !ShapeUtil::Compatible(root->shape(), shape)) {
return Error(
shape_loc,
StrCat("Shape of computation ", name, ", ",
ShapeUtil::HumanString(shape),
", is not compatible with that of its root instruction ",
root_name, ", ", ShapeUtil::HumanString(root->shape())));
}
if (is_entry_computation) {
if (*entry_computation != nullptr) {
return Error(maybe_entry_loc, "expects only one ENTRY");
}
*entry_computation = computation;
}
return AddComputation(name, computation, name_loc);
}
// instruction_list ::= '{' instruction_list1 '}'
// instruction_list1 ::= (instruction)+
bool HloParser::ParseInstructionList(HloComputation::Builder* builder,
string* root_name) {
if (!ParseToken(TokKind::kLbrace,
"expects '{' at the beginning of instruction list.")) {
return false;
}
do {
if (!ParseInstruction(builder, root_name)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kRbrace);
return ParseToken(TokKind::kRbrace,
"expects '}' at the end of instruction list.");
}
// instruction ::= ('ROOT')? name '=' shape opcode operands (attribute)*
bool HloParser::ParseInstruction(HloComputation::Builder* builder,
string* root_name) {
string name;
Shape shape;
HloOpcode opcode;
std::vector<HloInstruction*> operands;
LocTy maybe_root_loc = lexer_.GetLoc();
bool is_root = EatIfPresent(TokKind::kw_ROOT);
const LocTy name_loc = lexer_.GetLoc();
if (!ParseName(&name) ||
!ParseToken(TokKind::kEqual, "expects '=' in instruction") ||
!ParseShape(&shape) || !ParseOpcode(&opcode)) {
return false;
}
if (is_root) {
if (!root_name->empty()) {
return Error(maybe_root_loc, "one computation should have only one ROOT");
}
*root_name = name;
}
// Add optional attributes.
std::unordered_map<string, AttrConfig> attrs;
optional<OpSharding> sharding;
attrs["sharding"] = {/*required=*/false, AttrTy::kSharding, &sharding};
optional<std::vector<HloInstruction*>> predecessors;
attrs["control-predecessors"] = {/*required=*/false, AttrTy::kInstructionList,
&predecessors};
optional<OpMetadata> metadata;
attrs["metadata"] = {/*required=*/false, AttrTy::kMetadata, &metadata};
HloInstruction* instruction;
switch (opcode) {
case HloOpcode::kParameter: {
int64 parameter_number;
if (!ParseToken(TokKind::kLparen,
"expects '(' before parameter number") ||
!ParseInt64(¶meter_number) ||
!ParseToken(TokKind::kRparen, "expects ')' after parameter number") ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateParameter(parameter_number, shape, name));
break;
}
case HloOpcode::kConstant: {
std::unique_ptr<Literal> literal;
if (!ParseToken(TokKind::kLparen,
"expects '(' before constant literal") ||
!ParseLiteral(&literal, shape) ||
!ParseToken(TokKind::kRparen, "expects ')' after constant literal") ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateConstant(std::move(literal)));
break;
}
// Unary ops.
case HloOpcode::kAbs:
case HloOpcode::kRoundNearestAfz:
case HloOpcode::kBitcast:
case HloOpcode::kCeil:
case HloOpcode::kCopy:
case HloOpcode::kCos:
case HloOpcode::kExp:
case HloOpcode::kImag:
case HloOpcode::kIsFinite:
case HloOpcode::kFloor:
case HloOpcode::kLog:
case HloOpcode::kNot:
case HloOpcode::kNegate:
case HloOpcode::kReal:
case HloOpcode::kSign:
case HloOpcode::kSin:
case HloOpcode::kSort:
case HloOpcode::kTanh: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateUnary(shape, opcode, operands[0]));
break;
}
// Binary ops.
case HloOpcode::kAdd:
case HloOpcode::kDivide:
case HloOpcode::kMultiply:
case HloOpcode::kSubtract:
case HloOpcode::kAtan2:
case HloOpcode::kComplex:
case HloOpcode::kEq:
case HloOpcode::kGe:
case HloOpcode::kGt:
case HloOpcode::kLe:
case HloOpcode::kLt:
case HloOpcode::kNe:
case HloOpcode::kMaximum:
case HloOpcode::kMinimum:
case HloOpcode::kPower:
case HloOpcode::kRemainder:
case HloOpcode::kAnd:
case HloOpcode::kOr:
case HloOpcode::kShiftLeft:
case HloOpcode::kShiftRightArithmetic:
case HloOpcode::kShiftRightLogical: {
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBinary(
shape, opcode, operands[0], operands[1]));
break;
}
// Ternary ops.
case HloOpcode::kClamp:
case HloOpcode::kSelect: {
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateTernary(
shape, opcode, operands[0], operands[1], operands[2]));
break;
}
// Other supported ops.
case HloOpcode::kConvert: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateConvert(shape, operands[0]));
break;
}
case HloOpcode::kBitcastConvert: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateBitcastConvert(shape, operands[0]));
break;
}
case HloOpcode::kCrossReplicaSum: {
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateCrossReplicaSum(shape, operands));
break;
}
case HloOpcode::kReshape: {
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateReshape(shape, operands[0]));
break;
}
case HloOpcode::kTuple: {
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateTuple(operands));
break;
}
case HloOpcode::kWhile: {
optional<HloComputation*> condition;
optional<HloComputation*> body;
attrs["condition"] = {/*required=*/true, AttrTy::kHloComputation,
&condition};
attrs["body"] = {/*required=*/true, AttrTy::kHloComputation, &body};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateWhile(
shape, *condition, *body, /*init=*/operands[0]));
break;
}
case HloOpcode::kRecv: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/0) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateRecv(shape.tuple_shapes(0), *channel_id));
break;
}
case HloOpcode::kRecvDone: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
if (channel_id != operands[0]->channel_id()) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateRecvDone(operands[0]));
break;
}
case HloOpcode::kSend: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateSend(operands[0], *channel_id));
break;
}
case HloOpcode::kSendDone: {
optional<int64> channel_id;
attrs["channel_id"] = {/*required=*/true, AttrTy::kInt64, &channel_id};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
if (channel_id != operands[0]->channel_id()) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateSendDone(operands[0]));
break;
}
case HloOpcode::kGetTupleElement: {
optional<int64> index;
attrs["index"] = {/*required=*/true, AttrTy::kInt64, &index};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateGetTupleElement(shape, operands[0], *index));
break;
}
case HloOpcode::kCall: {
optional<HloComputation*> to_apply;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&to_apply};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateCall(shape, operands, *to_apply));
break;
}
case HloOpcode::kReduceWindow: {
optional<HloComputation*> reduce_computation;
optional<Window> window;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&reduce_computation};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction = builder->AddInstruction(HloInstruction::CreateReduceWindow(
shape, /*operand=*/operands[0], /*init_value=*/operands[1], *window,
*reduce_computation));
break;
}
case HloOpcode::kConvolution: {
optional<Window> window;
optional<ConvolutionDimensionNumbers> dnums;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
attrs["dim_labels"] = {/*required=*/true,
AttrTy::kConvolutionDimensionNumbers, &dnums};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction = builder->AddInstruction(HloInstruction::CreateConvolve(
shape, /*lhs=*/operands[0], /*rhs=*/operands[1], *window, *dnums));
break;
}
case HloOpcode::kBroadcast: {
optional<std::vector<int64>> broadcast_dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&broadcast_dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBroadcast(
shape, operands[0], *broadcast_dimensions));
break;
}
case HloOpcode::kConcatenate: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands) || !ParseAttributes(attrs) ||
dimensions->size() != 1) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateConcatenate(
shape, operands, dimensions->at(0)));
break;
}
case HloOpcode::kMap: {
optional<HloComputation*> to_apply;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&to_apply};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateMap(shape, operands, *to_apply));
break;
}
case HloOpcode::kReduce: {
optional<HloComputation*> reduce_computation;
attrs["to_apply"] = {/*required=*/true, AttrTy::kHloComputation,
&reduce_computation};
optional<std::vector<int64>> dimensions_to_reduce;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions_to_reduce};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateReduce(
shape, /*operand=*/operands[0], /*init_value=*/operands[1],
*dimensions_to_reduce, *reduce_computation));
break;
}
case HloOpcode::kReverse: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateReverse(shape, operands[0], *dimensions));
break;
}
case HloOpcode::kSelectAndScatter: {
optional<HloComputation*> select;
attrs["select"] = {/*required=*/true, AttrTy::kHloComputation, &select};
optional<HloComputation*> scatter;
attrs["scatter"] = {/*required=*/true, AttrTy::kHloComputation, &scatter};
optional<Window> window;
attrs["window"] = {/*required=*/false, AttrTy::kWindow, &window};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
if (!window) {
window.emplace();
}
instruction =
builder->AddInstruction(HloInstruction::CreateSelectAndScatter(
shape, /*operand=*/operands[0], *select, *window,
/*source=*/operands[1], /*init_value=*/operands[2], *scatter));
break;
}
case HloOpcode::kSlice: {
optional<SliceRanges> slice_ranges;
attrs["slice"] = {/*required=*/true, AttrTy::kSliceRanges, &slice_ranges};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateSlice(
shape, operands[0], slice_ranges->starts, slice_ranges->limits,
slice_ranges->strides));
break;
}
case HloOpcode::kDynamicSlice: {
optional<std::vector<int64>> dynamic_slice_sizes;
attrs["dynamic_slice_sizes"] = {
/*required=*/true, AttrTy::kBracedInt64List, &dynamic_slice_sizes};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateDynamicSlice(
shape, /*operand=*/operands[0], /*start_indices=*/operands[1],
*dynamic_slice_sizes));
break;
}
case HloOpcode::kDynamicUpdateSlice: {
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateDynamicUpdateSlice(
shape, /*operand=*/operands[0], /*update=*/operands[1],
/*start_indices=*/operands[2]));
break;
}
case HloOpcode::kTranspose: {
optional<std::vector<int64>> dimensions;
attrs["dimensions"] = {/*required=*/true, AttrTy::kBracedInt64List,
&dimensions};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateTranspose(shape, operands[0], *dimensions));
break;
}
case HloOpcode::kBatchNormTraining: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateBatchNormTraining(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*offset=*/operands[2], *epsilon, *feature_index));
break;
}
case HloOpcode::kBatchNormInference: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/5) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateBatchNormInference(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*offset=*/operands[2], /*mean=*/operands[3],
/*variance=*/operands[4], *epsilon, *feature_index));
break;
}
case HloOpcode::kBatchNormGrad: {
optional<float> epsilon;
attrs["epsilon"] = {/*required=*/true, AttrTy::kFloat, &epsilon};
optional<int64> feature_index;
attrs["feature_index"] = {/*required=*/true, AttrTy::kInt64,
&feature_index};
if (!ParseOperands(&operands, /*expected_size=*/5) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateBatchNormGrad(
shape, /*operand=*/operands[0], /*scale=*/operands[1],
/*mean=*/operands[2], /*variance=*/operands[3],
/*grad_output=*/operands[4], *epsilon, *feature_index));
break;
}
case HloOpcode::kPad: {
optional<PaddingConfig> padding;
attrs["padding"] = {/*required=*/true, AttrTy::kPaddingConfig, &padding};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreatePad(
shape, operands[0], /*padding_value=*/operands[1], *padding));
break;
}
case HloOpcode::kFusion: {
optional<HloComputation*> fusion_computation;
attrs["calls"] = {/*required=*/true, AttrTy::kHloComputation,
&fusion_computation};
optional<HloInstruction::FusionKind> fusion_kind;
attrs["kind"] = {/*required=*/true, AttrTy::kFusionKind, &fusion_kind};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateFusion(
shape, *fusion_kind, operands, *fusion_computation));
break;
}
case HloOpcode::kInfeed: {
optional<string> config;
attrs["infeed_config"] = {/*required=*/false, AttrTy::kString, &config};
if (!ParseOperands(&operands, /*expected_size=*/0) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateInfeed(shape, config ? *config : ""));
break;
}
case HloOpcode::kOutfeed: {
optional<string> config;
attrs["outfeed_config"] = {/*required=*/false, AttrTy::kString, &config};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateOutfeed(
shape, operands[0], config ? *config : ""));
break;
}
case HloOpcode::kRng: {
optional<RandomDistribution> distribution;
attrs["distribution"] = {/*required=*/true, AttrTy::kDistribution,
&distribution};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(
HloInstruction::CreateRng(shape, *distribution, operands));
break;
}
case HloOpcode::kReducePrecision: {
optional<int64> exponent_bits;
optional<int64> mantissa_bits;
attrs["exponent_bits"] = {/*required=*/true, AttrTy::kInt64,
&exponent_bits};
attrs["mantissa_bits"] = {/*required=*/true, AttrTy::kInt64,
&mantissa_bits};
if (!ParseOperands(&operands, /*expected_size=*/1) ||
!ParseAttributes(attrs)) {
return false;
}
instruction =
builder->AddInstruction(HloInstruction::CreateReducePrecision(
shape, operands[0], static_cast<int>(*exponent_bits),
static_cast<int>(*mantissa_bits)));
break;
}
case HloOpcode::kConditional: {
optional<HloComputation*> true_computation;
optional<HloComputation*> false_computation;
attrs["true_computation"] = {/*required=*/true, AttrTy::kHloComputation,
&true_computation};
attrs["false_computation"] = {/*required=*/true, AttrTy::kHloComputation,
&false_computation};
if (!ParseOperands(&operands, /*expected_size=*/3) ||
!ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateConditional(
shape, /*pred=*/operands[0],
/*true_computation_arg=*/operands[1], *true_computation,
/*false_computation_arg=*/operands[2], *false_computation));
break;
}
case HloOpcode::kCustomCall: {
optional<string> custom_call_target;
attrs["custom_call_target"] = {/*required=*/true, AttrTy::kString,
&custom_call_target};
if (!ParseOperands(&operands) || !ParseAttributes(attrs)) {
return false;
}
instruction = builder->AddInstruction(HloInstruction::CreateCustomCall(
shape, operands, *custom_call_target));
break;
}
case HloOpcode::kDot: {
optional<std::vector<int64>> lhs_contracting_dims;
attrs["lhs_contracting_dims"] = {
/*required=*/false, AttrTy::kBracedInt64List, &lhs_contracting_dims};
optional<std::vector<int64>> rhs_contracting_dims;
attrs["rhs_contracting_dims"] = {
/*required=*/false, AttrTy::kBracedInt64List, &rhs_contracting_dims};
optional<std::vector<int64>> lhs_batch_dims;
attrs["lhs_batch_dims"] = {/*required=*/false, AttrTy::kBracedInt64List,
&lhs_batch_dims};
optional<std::vector<int64>> rhs_batch_dims;
attrs["rhs_batch_dims"] = {/*required=*/false, AttrTy::kBracedInt64List,
&rhs_batch_dims};
if (!ParseOperands(&operands, /*expected_size=*/2) ||
!ParseAttributes(attrs)) {
return false;
}
DotDimensionNumbers dnum;
if (lhs_contracting_dims) {
*dnum.mutable_lhs_contracting_dimensions() = {
lhs_contracting_dims->begin(), lhs_contracting_dims->end()};
}
if (rhs_contracting_dims) {
*dnum.mutable_rhs_contracting_dimensions() = {
rhs_contracting_dims->begin(), rhs_contracting_dims->end()};
}
if (lhs_batch_dims) {
*dnum.mutable_lhs_batch_dimensions() = {lhs_batch_dims->begin(),
lhs_batch_dims->end()};
}
if (rhs_batch_dims) {
*dnum.mutable_rhs_batch_dimensions() = {rhs_batch_dims->begin(),
rhs_batch_dims->end()};
}
instruction = builder->AddInstruction(
HloInstruction::CreateDot(shape, operands[0], operands[1], dnum));
break;
}
case HloOpcode::kTrace:
return TokenError(StrCat("parsing not yet implemented for op: ",
HloOpcodeString(opcode)));
}
instruction->set_name(name);
// Add common attrs (sharding, control predecessors) to the instruction, if
// they were seen.
if (sharding) {
instruction->set_sharding(
HloSharding::FromProto(sharding.value()).ValueOrDie());
}
if (predecessors) {
for (auto* pre : *predecessors) {
Status status = pre->AddControlDependencyTo(instruction);
if (!status.ok()) {
return Error(name_loc, StrCat("error adding control dependency for: ",
name, " status: ", status.ToString()));
}
}
}
if (metadata) {
instruction->set_metadata(*metadata);
}
return AddInstruction(name, instruction, name_loc);
} // NOLINT(readability/fn_size)
// ::= '{' (single_sharding | tuple_sharding) '}'
//
// tuple_sharding ::= single_sharding* (',' single_sharding)*
bool HloParser::ParseSharding(OpSharding* sharding) {
// A single sharding starts with '{' and is not followed by '{'.
// A tuple sharding starts with '{' and is followed by '{', or is '{''}' for
// an empty tuple.
if (!ParseToken(TokKind::kLbrace,
"expected '{' to start sharding attribute")) {
return false;
}
if (lexer_.GetKind() != TokKind::kLbrace &&
lexer_.GetKind() != TokKind::kRbrace) {
return ParseSingleSharding(sharding, /*lbrace_pre_lexed=*/true);
}
// Tuple sharding.
// Allow empty tuple shardings.
if (lexer_.GetKind() != TokKind::kRbrace) {
do {
if (!ParseSingleSharding(sharding->add_tuple_shardings(),
/*lbrace_pre_lexed=*/false)) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
sharding->set_type(OpSharding::Type::OpSharding_Type_TUPLE);
return ParseToken(TokKind::kRbrace, "expected '}' to end sharding attribute");
}
// ::= '{' 'replicated'? 'maximal'? ('device=' int)? shape?
// ('devices=' ('[' dims ']')* device_list)? '}'
// dims ::= int_list device_list ::= int_list
bool HloParser::ParseSingleSharding(OpSharding* sharding,
bool lbrace_pre_lexed) {
if (!lbrace_pre_lexed &&
!ParseToken(TokKind::kLbrace,
"expected '{' to start sharding attribute")) {
return false;
}
LocTy loc = lexer_.GetLoc();
bool maximal = false;
bool replicated = false;
std::vector<int64> devices;
std::vector<int64> tile_assignment_dimensions;
Shape tile_shape;
while (lexer_.GetKind() != TokKind::kRbrace) {
switch (lexer_.GetKind()) {
case TokKind::kw_maximal:
maximal = true;
lexer_.Lex();
break;
case TokKind::kw_replicated:
replicated = true;
lexer_.Lex();
break;
case TokKind::kAttributeName: {
if (lexer_.GetStrVal() == "device") {
if (lexer_.Lex() != TokKind::kInt) {
return TokenError("device= attribute must be an integer");
}
devices = {lexer_.GetInt64Val()};
lexer_.Lex();
} else if (lexer_.GetStrVal() == "devices") {
lexer_.Lex();
if (!ParseToken(TokKind::kLsquare,
"expected '[' to start sharding devices shape")) {
return false;
}
do {
int64 dim;
if (!ParseInt64(&dim)) {
return false;
}
tile_assignment_dimensions.push_back(dim);
} while (EatIfPresent(TokKind::kComma));
if (!ParseToken(TokKind::kRsquare,
"expected ']' to start sharding devices shape")) {
return false;
}
do {
int64 device;
if (!ParseInt64(&device)) {
return false;
}
devices.push_back(device);
} while (EatIfPresent(TokKind::kComma));
} else {
return TokenError(
"unknown attribute in sharding: expected device= or devices=");
}
break;
}
case TokKind::kShape:
tile_shape = lexer_.GetShapeVal();
lexer_.Lex();
break;
case TokKind::kRbrace:
break;
default:
return TokenError("unexpected token");
}
}
if (replicated) {
if (!devices.empty()) {
return Error(loc,
"replicated shardings should not have any devices assigned");
}
if (!ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc,
"replicated shardings should not have any tile shape set");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_REPLICATED);
} else if (maximal) {
if (devices.size() != 1) {
return Error(loc,
"maximal shardings should have exactly one device assigned");
}
if (!ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc, "maximal shardings should not have any tile shape set");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_MAXIMAL);
sharding->add_tile_assignment_devices(devices[0]);
} else {
if (devices.size() <= 1) {
return Error(
loc, "non-maximal shardings must have more than one device assigned");
}
if (ShapeUtil::Equal(tile_shape, Shape())) {
return Error(loc, "non-maximal shardings should have a tile shape set");
}
if (tile_assignment_dimensions.empty()) {
return Error(
loc,
"non-maximal shardings must have a tile assignment list including "
"dimensions");
}
sharding->set_type(OpSharding::Type::OpSharding_Type_OTHER);
*sharding->mutable_tile_shape() = tile_shape;
for (int64 dim : tile_assignment_dimensions) {
sharding->add_tile_assignment_dimensions(dim);
}
for (int64 device : devices) {
sharding->add_tile_assignment_devices(device);
}
}
lexer_.Lex();
return true;
}
// '{' name+ '}'
bool HloParser::ParseInstructionNames(
std::vector<HloInstruction*>* instructions) {
if (!ParseToken(TokKind::kLbrace,
"expects '{' at the beginning of instruction name list")) {
return false;
}
LocTy loc = lexer_.GetLoc();
do {
string name;
if (!ParseName(&name)) {
return Error(loc, "expects a instruction name");
}
HloInstruction* instr =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, name);
if (!instr) {
return TokenError(
Printf("instruction '%s' is not defined", name.c_str()));
}
instructions->push_back(instr);
} while (EatIfPresent(TokKind::kComma));
return ParseToken(TokKind::kRbrace,
"expects '}' at the end of instruction name list");
}
bool HloParser::SetValueInLiteral(int64 value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case S8:
return SetValueInLiteralHelper<int8>(value, linear_index, literal);
case S16:
return SetValueInLiteralHelper<int16>(value, linear_index, literal);
case S32:
return SetValueInLiteralHelper<int32>(value, linear_index, literal);
case S64:
return SetValueInLiteralHelper<int64>(value, linear_index, literal);
case U8:
return SetValueInLiteralHelper<uint8>(value, linear_index, literal);
case U16:
return SetValueInLiteralHelper<uint8>(value, linear_index, literal);
case U32:
return SetValueInLiteralHelper<uint32>(value, linear_index, literal);
case U64:
return SetValueInLiteralHelper<uint64>(value, linear_index, literal);
default:
LOG(FATAL) << "unknown integral primitive type "
<< PrimitiveType_Name(shape.element_type());
}
}
bool HloParser::SetValueInLiteral(double value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case F16:
return SetValueInLiteralHelper<half>(value, linear_index, literal);
case BF16:
return SetValueInLiteralHelper<bfloat16>(value, linear_index, literal);
case F32:
return SetValueInLiteralHelper<float>(value, linear_index, literal);
case F64:
return SetValueInLiteralHelper<double>(value, linear_index, literal);
default:
LOG(FATAL) << "unknown floating point primitive type "
<< PrimitiveType_Name(shape.element_type());
}
}
bool HloParser::SetValueInLiteral(bool value, int64 linear_index,
Literal* literal) {
const Shape& shape = literal->shape();
switch (shape.element_type()) {
case PRED:
return SetValueInLiteralHelper<bool>(value, linear_index, literal);
default:
LOG(FATAL) << PrimitiveType_Name(shape.element_type())
<< " is not PRED type";
}
}
template <typename LiteralNativeT, typename ParsedElemT>
bool HloParser::SetValueInLiteralHelper(ParsedElemT value, int64 linear_index,
Literal* literal) {
// Check that linear_index is in range.
if (linear_index >= ShapeUtil::ElementsIn(literal->shape())) {
return TokenError(
StrCat("trys to set value ", value, " to a literal in shape ",
ShapeUtil::HumanString(literal->shape()), " at linear index ",
linear_index, ", but the index is out of range"));
}
if (std::isnan(value) ||
(std::numeric_limits<ParsedElemT>::has_infinity &&
(std::numeric_limits<ParsedElemT>::infinity() == value ||
-std::numeric_limits<ParsedElemT>::infinity() == value))) {
// Skip range checking for non-finite value.
} else if (literal->shape().element_type() == F16 ||
literal->shape().element_type() == BF16) {
if (value > kF16max || value < -kF16max) {
return TokenError(StrCat(
"value ", value, " is out of range for literal's primitive type ",
PrimitiveType_Name(literal->shape().element_type())));
}
} else if (value > static_cast<ParsedElemT>(
std::numeric_limits<LiteralNativeT>::max()) ||
value < static_cast<ParsedElemT>(
std::numeric_limits<LiteralNativeT>::lowest())) {
// Value is out of range for LiteralNativeT.
return TokenError(StrCat(
"value ", value, " is out of range for literal's primitive type ",
PrimitiveType_Name(literal->shape().element_type())));
}
literal->GetMutableArraySlice<LiteralNativeT>().at(linear_index) =
static_cast<LiteralNativeT>(value);
return true;
}
bool HloParser::EatShapeAndCheckCompatible(const Shape& shape) {
Shape new_shape;
if (!ParseShape(&new_shape)) {
return TokenError(StrCat("expects shape ", ShapeUtil::HumanString(shape)));
}
if (!ShapeUtil::Compatible(shape, new_shape)) {
return TokenError(StrCat(
"expects shape ", ShapeUtil::HumanString(shape),
", but sees a different shape: ", ShapeUtil::HumanString(new_shape)));
}
return true;
}
// literal
// ::= tuple
// ::= non_tuple
bool HloParser::ParseLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
return ShapeUtil::IsTuple(shape) ? ParseTupleLiteral(literal, shape)
: ParseNonTupleLiteral(literal, shape);
}
// tuple
// ::= shape '(' literal_list ')'
// literal_list
// ::= /*empty*/
// ::= literal (',' literal)*
bool HloParser::ParseTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
if (!EatShapeAndCheckCompatible(shape)) {
return TokenError(StrCat("expects tuple constant in shape ",
ShapeUtil::HumanString(shape)));
}
if (!ParseToken(TokKind::kLparen, "expects '(' in front of tuple elements")) {
return false;
}
std::vector<std::unique_ptr<Literal>> elements(
ShapeUtil::TupleElementCount(shape));
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
// literal, (',' literal)*
for (int i = 0; i < elements.size(); i++) {
if (i > 0) {
ParseToken(TokKind::kComma, "exepcts ',' to separate tuple elements");
}
if (!ParseLiteral(&elements[i],
ShapeUtil::GetTupleElementShape(shape, i))) {
return TokenError(StrCat("expects the ", i, "th element"));
}
}
}
*literal = Literal::MakeTupleOwned(std::move(elements));
return ParseToken(TokKind::kRparen,
StrCat("expects ')' at the end of the tuple with ",
ShapeUtil::TupleElementCount(shape), "elements"));
}
// non_tuple
// ::= rank01
// ::= rank2345
// rank2345 ::= shape nested_array
bool HloParser::ParseNonTupleLiteral(std::unique_ptr<Literal>* literal,
const Shape& shape) {
const int64 rank = ShapeUtil::Rank(shape);
if (rank > 1 && !EatShapeAndCheckCompatible(shape)) {
return false;
}
// Create a literal with the given shape in default layout.
*literal = Literal::CreateFromDimensions(shape.element_type(),
AsInt64Slice(shape.dimensions()));
int64 nest_level = 0;
int64 linear_index = 0;
// elems_seen_per_dim[i] is how many elements or sub-arrays we have seen for
// the dimension i. For example, to parse f32[2,3] {{1, 2, 3}, {4, 5, 6}},
// when we are parsing the 2nd '{' (right before '1'), we are seeing a
// sub-array of the dimension 0, so elems_seen_per_dim[0]++. When we are at
// the first '}' (right after '3'), it means the sub-array ends, and the
// sub-array is supposed to contain exactly 3 elements, so check if
// elems_seen_per_dim[1] is 3.
std::vector<int64> elems_seen_per_dim(rank);
auto get_index_str = [&elems_seen_per_dim](int dim) -> string {
std::vector<int64> elems_seen_until_dim(elems_seen_per_dim.begin(),
elems_seen_per_dim.begin() + dim);
return StrCat("[",
tensorflow::str_util::Join(
elems_seen_until_dim, ",",
[](string* out, const int64& num_elems) {
tensorflow::strings::StrAppend(out, num_elems - 1);
}),
"]");
};
do {
switch (lexer_.GetKind()) {
default:
return TokenError("unexpected token type in a literal");
case TokKind::kLbrace: {
nest_level++;
if (nest_level > rank) {
return TokenError(Printf(
"expects nested array in rank %lld, but sees larger", rank));
}
if (nest_level > 1) {
elems_seen_per_dim[nest_level - 2]++;
if (elems_seen_per_dim[nest_level - 2] >
shape.dimensions(nest_level - 2)) {
return TokenError(Printf(
"expects %lld elements in the %sth element, but sees more",
shape.dimensions(nest_level - 2),
get_index_str(nest_level - 2).c_str()));
}
}
lexer_.Lex();
break;
}
case TokKind::kRbrace: {
nest_level--;
if (elems_seen_per_dim[nest_level] != shape.dimensions(nest_level)) {
return TokenError(Printf(
"expects %lld elements in the %sth element, but sees %lld",
shape.dimensions(nest_level), get_index_str(nest_level).c_str(),
elems_seen_per_dim[nest_level]));
}
elems_seen_per_dim[nest_level] = 0;
lexer_.Lex();
break;
}
case TokKind::kComma:
case TokKind::kComment:
// Skip.
lexer_.Lex();
break;
case TokKind::kw_true:
case TokKind::kw_false:
case TokKind::kInt:
case TokKind::kDecimal:
case TokKind::kw_nan:
case TokKind::kw_inf:
case TokKind::kNegInf: {
if (rank > 0) {
if (nest_level != rank) {
return TokenError(
Printf("expects nested array in rank %lld, but sees %lld", rank,
nest_level));
}
elems_seen_per_dim[rank - 1]++;
if (elems_seen_per_dim[rank - 1] > shape.dimensions(rank - 1)) {
return TokenError(
Printf("expects %lld elements on the minor-most dimension, but "
"sees more",
shape.dimensions(rank - 1)));
}
}
if (lexer_.GetKind() == TokKind::kw_true ||
lexer_.GetKind() == TokKind::kw_false) {
// TODO(congliu): bool type literals with rank >= 1 are actually
// printed in a compact form instead of "true" or "false". Fix that.
if (!SetValueInLiteral(lexer_.GetKind() == TokKind::kw_true,
linear_index++, literal->get())) {
return false;
}
lexer_.Lex();
} else if (primitive_util::IsIntegralType(shape.element_type())) {
LocTy loc = lexer_.GetLoc();
int64 value;
if (!ParseInt64(&value)) {
return Error(loc, StrCat("expects integer for primitive type: ",
PrimitiveType_Name(shape.element_type())));
}
if (!SetValueInLiteral(value, linear_index++, literal->get())) {
return false;
}
} else if (primitive_util::IsFloatingPointType(shape.element_type())) {
LocTy loc = lexer_.GetLoc();
double value;
if (!ParseDouble(&value)) {
return Error(
loc, StrCat("expect floating point value for primitive type: ",
PrimitiveType_Name(shape.element_type())));
}
if (!SetValueInLiteral(value, linear_index++, literal->get())) {
return false;
}
} else {
return TokenError(StrCat("unsupported premitive type ",
PrimitiveType_Name(shape.element_type())));
}
break;
}
} // end of switch
} while (nest_level > 0);
*literal = (*literal)->Relayout(shape.layout());
return true;
}
// operands ::= '(' operands1 ')'
// operands1
// ::= /*empty*/
// ::= operand (, operand)*
// operand ::= (shape)? name
bool HloParser::ParseOperands(std::vector<HloInstruction*>* operands) {
if (!ParseToken(TokKind::kLparen,
"expects '(' at the beginning of operands")) {
return false;
}
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
do {
LocTy loc = lexer_.GetLoc();
string name;
if (CanBeShape()) {
Shape shape;
if (!ParseShape(&shape)) {
return false;
}
}
if (!ParseName(&name)) {
return false;
}
HloInstruction* instruction =
tensorflow::gtl::FindPtrOrNull(instruction_pool_, name);
if (!instruction) {
return Error(loc, StrCat("instruction does not exist: ", name));
}
operands->push_back(instruction);
} while (EatIfPresent(TokKind::kComma));
}
return ParseToken(TokKind::kRparen, "expects ')' at the end of operands");
}
bool HloParser::ParseOperands(std::vector<HloInstruction*>* operands,
const int expected_size) {
LocTy loc = lexer_.GetLoc();
if (!ParseOperands(operands)) {
return false;
}
if (expected_size != operands->size()) {
return Error(loc, StrCat("expects ", expected_size, " operands, but has ",
operands->size(), " operands"));
}
return true;
}
// sub_attributes ::= '{' (','? attribute)* '}'
bool HloParser::ParseSubAttributes(
const std::unordered_map<string, AttrConfig>& attrs) {
LocTy loc = lexer_.GetLoc();
if (!ParseToken(TokKind::kLbrace, "expects '{' to start sub attributes")) {
return false;
}
std::unordered_set<string> seen_attrs;
if (lexer_.GetKind() == TokKind::kRbrace) {
// empty
} else {
do {
EatIfPresent(TokKind::kComma);
if (!ParseAttributeHelper(attrs, &seen_attrs)) {
return false;
}
} while (lexer_.GetKind() != TokKind::kRbrace);
}
// Check that all required attrs were seen.
for (const auto& attr_it : attrs) {
if (attr_it.second.required &&
seen_attrs.find(attr_it.first) == seen_attrs.end()) {
return Error(loc, Printf("sub-attribute %s is expected but not seen",
attr_it.first.c_str()));
}
}
return ParseToken(TokKind::kRbrace, "expects '}' to end sub attributes");
}
// attributes ::= (',' attribute)*
bool HloParser::ParseAttributes(
const std::unordered_map<string, AttrConfig>& attrs) {
LocTy loc = lexer_.GetLoc();
std::unordered_set<string> seen_attrs;
while (EatIfPresent(TokKind::kComma)) {
if (!ParseAttributeHelper(attrs, &seen_attrs)) {
return false;
}
}
// Check that all required attrs were seen.
for (const auto& attr_it : attrs) {
if (attr_it.second.required &&
seen_attrs.find(attr_it.first) == seen_attrs.end()) {
return Error(loc, Printf("attribute %s is expected but not seen",
attr_it.first.c_str()));
}
}
return true;
}
bool HloParser::ParseAttributeHelper(
const std::unordered_map<string, AttrConfig>& attrs,
std::unordered_set<string>* seen_attrs) {
LocTy loc = lexer_.GetLoc();
string name;
if (!ParseAttributeName(&name)) {
return Error(loc, "error parsing attributes");
}
VLOG(1) << "Parsing attribute " << name;
if (!seen_attrs->insert(name).second) {
return Error(loc, Printf("attribute %s already exists", name.c_str()));
}
auto attr_it = attrs.find(name);
if (attr_it == attrs.end()) {
return Error(loc, Printf("unexpected attribute %s", name.c_str()));
}
AttrTy attr_type = attr_it->second.attr_type;
void* attr_out_ptr = attr_it->second.result;
bool success = [&] {
LocTy attr_loc = lexer_.GetLoc();
switch (attr_type) {
case AttrTy::kInt64: {
int64 result;
if (!ParseInt64(&result)) {
return false;
}
static_cast<optional<int64>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kInt32: {
int64 result;
if (!ParseInt64(&result)) {
return false;
}
if (result != static_cast<int32>(result)) {
return Error(attr_loc, "value out of range for int32");
}
static_cast<optional<int32>*>(attr_out_ptr)
->emplace(static_cast<int32>(result));
return true;
}
case AttrTy::kFloat: {
double result;
if (!ParseDouble(&result)) {
return false;
}
if (result > std::numeric_limits<float>::max() ||
result < std::numeric_limits<float>::lowest()) {
return Error(attr_loc, "value out of range for float");
}
static_cast<optional<float>*>(attr_out_ptr)
->emplace(static_cast<float>(result));
return true;
}
case AttrTy::kHloComputation: {
HloComputation* result;
if (!ParseComputationName(&result)) {
return false;
}
static_cast<optional<HloComputation*>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kWindow: {
Window result;
if (!ParseWindow(&result)) {
return false;
}
static_cast<optional<Window>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kConvolutionDimensionNumbers: {
ConvolutionDimensionNumbers result;
if (!ParseConvolutionDimensionNumbers(&result)) {
return false;
}
static_cast<optional<ConvolutionDimensionNumbers>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kSharding: {
OpSharding sharding;
if (!ParseSharding(&sharding)) {
return false;
}
static_cast<optional<OpSharding>*>(attr_out_ptr)->emplace(sharding);
return true;
}
case AttrTy::kInstructionList: {
std::vector<HloInstruction*> result;
if (!ParseInstructionNames(&result)) {
return false;
}
static_cast<optional<std::vector<HloInstruction*>>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kFusionKind: {
HloInstruction::FusionKind result;
if (!ParseFusionKind(&result)) {
return false;
}
static_cast<optional<HloInstruction::FusionKind>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kBracedInt64List: {
std::vector<int64> result;
if (!ParseInt64List(TokKind::kLbrace, TokKind::kRbrace, TokKind::kComma,
&result)) {
return false;
}
static_cast<optional<std::vector<int64>>*>(attr_out_ptr)
->emplace(result);
return true;
}
case AttrTy::kSliceRanges: {
SliceRanges result;
if (!ParseSliceRanges(&result)) {
return false;
}
static_cast<optional<SliceRanges>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kPaddingConfig: {
PaddingConfig result;
if (!ParsePaddingConfig(&result)) {
return false;
}
static_cast<optional<PaddingConfig>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kString: {
string result;
if (!ParseString(&result)) {
return false;
}
static_cast<optional<string>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kMetadata: {
OpMetadata result;
if (!ParseMetadata(&result)) {
return false;
}
static_cast<optional<OpMetadata>*>(attr_out_ptr)->emplace(result);
return true;
}
case AttrTy::kDistribution: {
RandomDistribution result;
if (!ParseRandomDistribution(&result)) {
return false;
}
static_cast<optional<RandomDistribution>*>(attr_out_ptr)
->emplace(result);
return true;
}
}
}();
if (!success) {
return Error(loc, Printf("error parsing attribute %s", name.c_str()));
}
return true;
}
bool HloParser::ParseComputationName(HloComputation** value) {
string name;
LocTy loc = lexer_.GetLoc();
if (!ParseName(&name)) {
return Error(loc, "expects computation name");
}
*value = tensorflow::gtl::FindPtrOrNull(computation_pool_, name);
if (*value == nullptr) {
return Error(loc, StrCat("computation does not exist: ", name));
}
return true;
}
// ::= '{' size stride? pad? lhs_dilate? rhs_dilate? '}'
// The subattributes can appear in any order. 'size=' is required, others are
// optional.
bool HloParser::ParseWindow(Window* window) {
LocTy loc = lexer_.GetLoc();
if (!ParseToken(TokKind::kLbrace, "expected '{' to start window attribute")) {
return false;
}
std::vector<int64> size;
std::vector<int64> stride;
std::vector<std::vector<int64>> pad;
std::vector<int64> lhs_dilate;
std::vector<int64> rhs_dilate;
std::vector<int64> rhs_reversal;
while (lexer_.GetKind() != TokKind::kRbrace) {
LocTy attr_loc = lexer_.GetLoc();
string field_name;
if (!ParseAttributeName(&field_name)) {
return Error(attr_loc, "expects sub-attributes in window");
}
bool ok = [&] {
if (field_name == "size") {
return ParseDxD("size", &size);
}
if (field_name == "stride") {
return ParseDxD("stride", &stride);
}
if (field_name == "lhs_dilate") {
return ParseDxD("lhs_dilate", &lhs_dilate);
}
if (field_name == "rhs_dilate") {
return ParseDxD("rls_dilate", &rhs_dilate);
}
if (field_name == "pad") {
return ParseWindowPad(&pad);
}
if (field_name == "rhs_reversal") {
return ParseDxD("rhs_reversal", &rhs_reversal);
}
return Error(loc, StrCat("unexpected attribute name: ", field_name));
}();
if (!ok) {
return false;
}
}
if (size.empty()) {
return Error(loc,
"sub-attribute 'size=' is required in the window attribute");
}
if (!stride.empty() && stride.size() != size.size()) {
return Error(loc, "expects 'stride=' has the same size as 'size='");
}
if (!lhs_dilate.empty() && lhs_dilate.size() != size.size()) {
return Error(loc, "expects 'lhs_dilate=' has the same size as 'size='");
}
if (!rhs_dilate.empty() && rhs_dilate.size() != size.size()) {
return Error(loc, "expects 'rhs_dilate=' has the same size as 'size='");
}
if (!pad.empty() && pad.size() != size.size()) {
return Error(loc, "expects 'pad=' has the same size as 'size='");
}
for (int i = 0; i < size.size(); i++) {
window->add_dimensions()->set_size(size[i]);
if (!pad.empty()) {
window->mutable_dimensions(i)->set_padding_low(pad[i][0]);
window->mutable_dimensions(i)->set_padding_high(pad[i][1]);
}
// If some field is not present, it has the default value.
window->mutable_dimensions(i)->set_stride(stride.empty() ? 1 : stride[i]);
window->mutable_dimensions(i)->set_base_dilation(
lhs_dilate.empty() ? 1 : lhs_dilate[i]);
window->mutable_dimensions(i)->set_window_dilation(
rhs_dilate.empty() ? 1 : rhs_dilate[i]);
window->mutable_dimensions(i)->set_window_reversal(
rhs_reversal.empty() ? false : (rhs_reversal[i] == 1));
}
return ParseToken(TokKind::kRbrace, "expected '}' to end window attribute");
}
// This is the inverse of HloInstruction::ConvolutionDimensionNumbersToString.
// The string looks like "dim_labels=0bf_0io->0bf".
bool HloParser::ParseConvolutionDimensionNumbers(
ConvolutionDimensionNumbers* dnums) {
if (lexer_.GetKind() != TokKind::kDimLabels) {
return TokenError("expects dim labels pattern, e.g., 'bf0_0io->0bf'");
}
string str = lexer_.GetStrVal();
// The str is expected to have 3 items, lhs, rhs, out, and it must looks like
// lhs_rhs->out, that is, the first separator is "_" and the second is "->".
// So we replace the "->" with "_" and then split on "_".
str = tensorflow::str_util::StringReplace(str, /*oldsub=*/"->",
/*newsub=*/"_",
/*replace_all=*/false);
std::vector<string> lhs_rhs_out = Split(str, "_");
if (lhs_rhs_out.size() != 3) {
LOG(FATAL) << "expects 3 items: lhs, rhs, and output dims, but sees "
<< str;
}
const int64 rank = lhs_rhs_out[0].length();
if (rank != lhs_rhs_out[1].length() || rank != lhs_rhs_out[2].length()) {
return TokenError(
"convolution lhs, rhs, and output must have the same rank");
}
if (rank < 2) {
return TokenError("convolution rank must >=2");
}
auto is_unique = [](string str) -> bool {
std::sort(str.begin(), str.end());
return std::unique(str.begin(), str.end()) == str.end();
};
// lhs
{
const string& lhs = lhs_rhs_out[0];
if (!is_unique(lhs)) {
return TokenError(
StrCat("expects unique lhs dimension numbers, but sees ", lhs));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_input_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = lhs[i];
if (c == 'b') {
dnums->set_input_batch_dimension(i);
} else if (c == 'f') {
dnums->set_input_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_input_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldbf] in lhs dimension numbers", rank - 1));
}
}
}
// rhs
{
const string& rhs = lhs_rhs_out[1];
if (!is_unique(rhs)) {
return TokenError(
StrCat("expects unique rhs dimension numbers, but sees ", rhs));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_kernel_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = rhs[i];
if (c == 'i') {
dnums->set_kernel_input_feature_dimension(i);
} else if (c == 'o') {
dnums->set_kernel_output_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_kernel_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldio] in rhs dimension numbers", rank - 1));
}
}
}
// output
{
const string& out = lhs_rhs_out[2];
if (!is_unique(out)) {
return TokenError(
StrCat("expects unique output dimension numbers, but sees ", out));
}
for (int i = 0; i < rank - 2; i++) {
dnums->add_output_spatial_dimensions(-1);
}
for (int i = 0; i < rank; i++) {
char c = out[i];
if (c == 'b') {
dnums->set_output_batch_dimension(i);
} else if (c == 'f') {
dnums->set_output_feature_dimension(i);
} else if (c < '0' + rank && c >= '0') {
dnums->set_output_spatial_dimensions(c - '0', i);
} else {
return TokenError(
Printf("expects [0-%lldbf] in output dimension numbers", rank - 1));
}
}
}
lexer_.Lex();
return true;
}
// ::= '{' ranges '}'
// ::= /*empty*/
// ::= range (',' range)*
// range ::= '[' start ':' limit (':' stride)? ']'
//
// The slice ranges are printed as:
//
// {[dim0_start:dim0_limit:dim0stride], [dim1_start:dim1_limit], ...}
//
// This function extracts the starts, limits, and strides as 3 vectors to the
// result. If stride is not present, stride is 1. For example, if the slice
// ranges is printed as:
//
// {[2:3:4], [5:6:7], [8:9]}
//
// The the parsed result will be:
//
// {/*starts=*/{2, 5, 8}, /*limits=*/{3, 6, 9}, /*strides=*/{4, 7, 1}}
//
bool HloParser::ParseSliceRanges(SliceRanges* result) {
if (!ParseToken(TokKind::kLbrace, "expects '{' to start ranges")) {
return false;
}
std::vector<std::vector<int64>> ranges;
if (lexer_.GetKind() == TokKind::kRbrace) {
// empty
return ParseToken(TokKind::kRbrace, "expects '}' to end ranges");
}
do {
LocTy loc = lexer_.GetLoc();
ranges.emplace_back();
if (!ParseInt64List(TokKind::kLsquare, TokKind::kRsquare, TokKind::kColon,
&ranges.back())) {
return false;
}
const auto& range = ranges.back();
if (range.size() != 2 && range.size() != 3) {
return Error(loc, Printf("expects [start:limit:step] or [start:limit], "
"but sees %ld elements.",
range.size()));
}
} while (EatIfPresent(TokKind::kComma));
for (const auto& range : ranges) {
result->starts.push_back(range[0]);
result->limits.push_back(range[1]);
result->strides.push_back(range.size() == 3 ? range[2] : 1);
}
return ParseToken(TokKind::kRbrace, "expects '}' to end ranges");
}
// int64list ::= start int64_elements end
// int64_elements
// ::= /*empty*/
// ::= int64_val (delim int64_val)*
bool HloParser::ParseInt64List(const TokKind start, const TokKind end,
const TokKind delim,
std::vector<int64>* result) {
if (!ParseToken(start, StrCat("expects an int64 list starting with ",
TokKindToString(start)))) {
return false;
}
if (lexer_.GetKind() == end) {
// empty
} else {
do {
int64 i;
if (!ParseInt64(&i)) {
return false;
}
result->push_back(i);
} while (EatIfPresent(delim));
}
return ParseToken(
end, StrCat("expects an int64 list to end with ", TokKindToString(end)));
}
// param_list_to_shape ::= param_list '->' shape
bool HloParser::ParseParamListToShape(Shape* shape, LocTy* shape_loc) {
if (!ParseParamList() || !ParseToken(TokKind::kArrow, "expects '->'")) {
return false;
}
*shape_loc = lexer_.GetLoc();
return ParseShape(shape);
}
bool HloParser::CanBeParamListToShape() {
return lexer_.GetKind() == TokKind::kLparen;
}
// param_list ::= '(' param_list1 ')'
// param_list1
// ::= /*empty*/
// ::= param (',' param)*
// param ::= name shape
bool HloParser::ParseParamList() {
if (!ParseToken(TokKind::kLparen,
"expects '(' at the beginning of param list")) {
return false;
}
if (lexer_.GetKind() == TokKind::kRparen) {
// empty
} else {
do {
Shape shape;
string name;
if (!ParseName(&name) || !ParseShape(&shape)) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
return ParseToken(TokKind::kRparen, "expects ')' at the end of param list");
}
// shape ::= shape_val_
// shape ::= '(' tuple_elements ')'
// tuple_elements
// ::= /*empty*/
// ::= shape (',' shape)*
bool HloParser::ParseShape(Shape* result) {
if (EatIfPresent(TokKind::kLparen)) { // Tuple
std::vector<Shape> shapes;
if (lexer_.GetKind() == TokKind::kRparen) {
/*empty*/
} else {
// shape (',' shape)*
do {
shapes.emplace_back();
if (!ParseShape(&shapes.back())) {
return false;
}
} while (EatIfPresent(TokKind::kComma));
}
*result = ShapeUtil::MakeTupleShape(shapes);
return ParseToken(TokKind::kRparen, "expects ')' at the end of tuple.");
}
if (lexer_.GetKind() != TokKind::kShape) {
return TokenError("expects shape");
}
*result = lexer_.GetShapeVal();
lexer_.Lex();
return true;
}
bool HloParser::CanBeShape() {
// A non-tuple shape starts with a kShape token; a tuple shape starts with
// '('.
return lexer_.GetKind() == TokKind::kShape ||
lexer_.GetKind() == TokKind::kLparen;
}
bool HloParser::ParseName(string* result) {
VLOG(1) << "ParseName";
if (lexer_.GetKind() != TokKind::kIdent &&
lexer_.GetKind() != TokKind::kName) {
return TokenError("expects name");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseAttributeName(string* result) {
if (lexer_.GetKind() != TokKind::kAttributeName) {
return TokenError("expects attribute name");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseString(string* result) {
VLOG(1) << "ParseString";
if (lexer_.GetKind() != TokKind::kString) {
return TokenError("expects string");
}
*result = lexer_.GetStrVal();
lexer_.Lex();
return true;
}
bool HloParser::ParseDxD(const string& name, std::vector<int64>* result) {
LocTy loc = lexer_.GetLoc();
if (!result->empty()) {
return Error(loc,
Printf("sub-attribute '%s=' already exists", name.c_str()));
}
// 1D
if (lexer_.GetKind() == TokKind::kInt) {
int64 number;
if (!ParseInt64(&number)) {
return Error(loc, Printf("expects sub-attribute '%s=i'", name.c_str()));
}
result->push_back(number);
return true;
}
// 2D or higher.
if (lexer_.GetKind() == TokKind::kDxD) {
string str = lexer_.GetStrVal();
if (!SplitAndParseAsInts(str, 'x', result)) {
return Error(loc,
Printf("expects sub-attribute '%s=ixj...'", name.c_str()));
}
lexer_.Lex();
return true;
}
return TokenError("expects token type kInt or kDxD");
}
bool HloParser::ParseWindowPad(std::vector<std::vector<int64>>* pad) {
LocTy loc = lexer_.GetLoc();
if (!pad->empty()) {
return Error(loc, "sub-attribute 'pad=' already exists");
}
if (lexer_.GetKind() != TokKind::kPad) {
return TokenError("expects window pad pattern, e.g., '0_0x3_3'");
}
string str = lexer_.GetStrVal();
std::vector<string> padding_str = Split(str, 'x');
for (int i = 0; i < padding_str.size(); i++) {
std::vector<int64> low_high;
if (!SplitAndParseAsInts(padding_str[i], '_', &low_high) ||
low_high.size() != 2) {
return Error(loc,
"expects padding_low and padding_high separated by '_'");
}
pad->push_back(low_high);
}
lexer_.Lex();
return true;
}
// This is the inverse xla::ToString(PaddingConfig). The padding config string
// looks like "0_0_0x3_3_1". The string is first separated by 'x', each
// substring represents one PaddingConfigDimension. The substring is 3 (or 2)
// numbers joined by '_'.
bool HloParser::ParsePaddingConfig(PaddingConfig* padding) {
if (lexer_.GetKind() != TokKind::kPad) {
return TokenError("expects padding config, e.g., '0_0_0x3_3_1'");
}
LocTy loc = lexer_.GetLoc();
string str = lexer_.GetStrVal();
std::vector<string> padding_str = Split(str, 'x');
for (const auto& padding_dim_str : padding_str) {
std::vector<int64> padding_dim;
if (!SplitAndParseAsInts(padding_dim_str, '_', &padding_dim) ||
(padding_dim.size() != 2 && padding_dim.size() != 3)) {
return Error(loc,
"expects padding config pattern like 'low_high_interior' or "
"'low_high'");
}
auto* dim = padding->add_dimensions();
dim->set_edge_padding_low(padding_dim[0]);
dim->set_edge_padding_high(padding_dim[1]);
dim->set_interior_padding(padding_dim.size() == 3 ? padding_dim[2] : 0);
}
lexer_.Lex();
return true;
}
// '{' metadata_string '}'
bool HloParser::ParseMetadata(OpMetadata* metadata) {
std::unordered_map<string, AttrConfig> attrs;
optional<string> op_type;
optional<string> op_name;
optional<string> source_file;
optional<int32> source_line;
attrs["op_type"] = {/*required=*/false, AttrTy::kString, &op_type};
attrs["op_name"] = {/*required=*/false, AttrTy::kString, &op_name};
attrs["source_file"] = {/*required=*/false, AttrTy::kString, &source_file};
attrs["source_line"] = {/*required=*/false, AttrTy::kInt32, &source_line};
if (!ParseSubAttributes(attrs)) {
return false;
}
if (op_type) {
metadata->set_op_type(*op_type);
}
if (op_name) {
metadata->set_op_name(*op_name);
}
if (source_file) {
metadata->set_source_file(*source_file);
}
if (source_line) {
metadata->set_source_line(*source_line);
}
return true;
}
bool HloParser::ParseOpcode(HloOpcode* result) {
VLOG(1) << "ParseOpcode";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects opcode");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToHloOpcode(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects opcode but sees: %s, error: %s", val.c_str(),
status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseFusionKind(HloInstruction::FusionKind* result) {
VLOG(1) << "ParseFusionKind";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects fusion kind");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToFusionKind(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects fusion kind but sees: %s, error: %s", val.c_str(),
status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseRandomDistribution(RandomDistribution* result) {
VLOG(1) << "ParseRandomDistribution";
if (lexer_.GetKind() != TokKind::kIdent) {
return TokenError("expects random distribution");
}
string val = lexer_.GetStrVal();
auto status_or_result = StringToRandomDistribution(val);
if (!status_or_result.ok()) {
return TokenError(
Printf("expects random distribution but sees: %s, error: %s",
val.c_str(), status_or_result.status().error_message().c_str()));
}
*result = status_or_result.ValueOrDie();
lexer_.Lex();
return true;
}
bool HloParser::ParseInt64(int64* result) {
VLOG(1) << "ParseInt64";
if (lexer_.GetKind() != TokKind::kInt) {
return TokenError("expects integer");
}
*result = lexer_.GetInt64Val();
lexer_.Lex();
return true;
}
bool HloParser::ParseDouble(double* result) {
switch (lexer_.GetKind()) {
case TokKind::kDecimal:
*result = lexer_.GetDecimalVal();
break;
case TokKind::kInt:
*result = static_cast<double>(lexer_.GetInt64Val());
break;
case TokKind::kw_nan:
*result = std::numeric_limits<double>::quiet_NaN();
break;
case TokKind::kw_inf:
*result = std::numeric_limits<double>::infinity();
break;
case TokKind::kNegInf:
*result = -std::numeric_limits<double>::infinity();
break;
default:
return TokenError("expects decimal or integer");
}
lexer_.Lex();
return true;
}
bool HloParser::ParseBool(bool* result) {
if (lexer_.GetKind() != TokKind::kw_true &&
lexer_.GetKind() != TokKind::kw_false) {
return TokenError("expects true or false");
}
*result = lexer_.GetKind() == TokKind::kw_true;
lexer_.Lex();
return true;
}
bool HloParser::ParseToken(TokKind kind, const string& msg) {
VLOG(1) << "ParseToken " << TokKindToString(kind) << " " << msg;
if (lexer_.GetKind() != kind) {
return TokenError(msg);
}
lexer_.Lex();
return true;
}
bool HloParser::EatIfPresent(TokKind kind) {
if (lexer_.GetKind() != kind) {
return false;
}
lexer_.Lex();
return true;
}
bool HloParser::AddInstruction(const string& name, HloInstruction* instruction,
LocTy name_loc) {
auto result = instruction_pool_.insert({name, instruction});
if (!result.second) {
return Error(name_loc, StrCat("instruction already exists: ", name));
}
return true;
}
bool HloParser::AddComputation(const string& name, HloComputation* computation,
LocTy name_loc) {
auto result = computation_pool_.insert({name, computation});
if (!result.second) {
return Error(name_loc, StrCat("computation already exists: ", name));
}
return true;
}
} // namespace
StatusOr<std::unique_ptr<HloModule>> Parse(StringPiece str,
const HloModuleConfig& config) {
HloParser parser(str, config);
if (!parser.Run()) {
return InvalidArgument("Syntax error:\n%s", parser.GetError().c_str());
}
return parser.ConsumeHloModule();
}
StatusOr<std::unique_ptr<HloModule>> Parse(StringPiece str) {
HloModuleConfig config;
return Parse(str, config);
}
} // namespace tools
} // namespace xla
| Java |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using Xunit;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Commands.ServiceBus.Test")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("Commands.ServiceBus.Test")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("a893f297-3311-4224-8086-a4bb3c5e478a")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
[assembly: AssemblyVersion("0.1.0")]
[assembly: AssemblyFileVersion("0.1.0")]
[assembly: CollectionBehavior(DisableTestParallelization = true)]
| Java |
// WARNING: Please don't edit this file. It was generated by C++/WinRT v2.0.210930.14
#pragma once
#ifndef WINRT_Windows_Devices_Portable_1_H
#define WINRT_Windows_Devices_Portable_1_H
#include "winrt/impl/Windows.Devices.Portable.0.h"
WINRT_EXPORT namespace winrt::Windows::Devices::Portable
{
struct __declspec(empty_bases) IServiceDeviceStatics :
winrt::Windows::Foundation::IInspectable,
impl::consume_t<IServiceDeviceStatics>
{
IServiceDeviceStatics(std::nullptr_t = nullptr) noexcept {}
IServiceDeviceStatics(void* ptr, take_ownership_from_abi_t) noexcept : winrt::Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
struct __declspec(empty_bases) IStorageDeviceStatics :
winrt::Windows::Foundation::IInspectable,
impl::consume_t<IStorageDeviceStatics>
{
IStorageDeviceStatics(std::nullptr_t = nullptr) noexcept {}
IStorageDeviceStatics(void* ptr, take_ownership_from_abi_t) noexcept : winrt::Windows::Foundation::IInspectable(ptr, take_ownership_from_abi) {}
};
}
#endif
| Java |
//
// HWTextPart.h
// 黑马微博2期
//
// Created by apple on 14/11/15.
// Copyright (c) 2014年 heima. All rights reserved.
// 文字的一部分
#import <Foundation/Foundation.h>
@interface HWTextPart : NSObject
/** 这段文字的内容 */
@property (nonatomic, copy) NSString *text;
/** 这段文字的范围 */
@property (nonatomic, assign) NSRange range;
/** 是否为特殊文字 */
@property (nonatomic, assign, getter = isSpecical) BOOL special;
/** 是否为表情 */
@property (nonatomic, assign, getter = isEmotion) BOOL emotion;
@end
| Java |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=UTF-8" />
<title>ZKTableArchiveClient xref</title>
<link type="text/css" rel="stylesheet" href="../../../../../../stylesheet.css" />
</head>
<body>
<div id="overview"><a href="../../../../../../../apidocs/org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.html">View Javadoc</a></div><pre>
<a class="jxr_linenumber" name="1" href="#1">1</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="2" href="#2">2</a> <em class="jxr_javadoccomment"> * Licensed to the Apache Software Foundation (ASF) under one</em>
<a class="jxr_linenumber" name="3" href="#3">3</a> <em class="jxr_javadoccomment"> * or more contributor license agreements. See the NOTICE file</em>
<a class="jxr_linenumber" name="4" href="#4">4</a> <em class="jxr_javadoccomment"> * distributed with this work for additional information</em>
<a class="jxr_linenumber" name="5" href="#5">5</a> <em class="jxr_javadoccomment"> * regarding copyright ownership. The ASF licenses this file</em>
<a class="jxr_linenumber" name="6" href="#6">6</a> <em class="jxr_javadoccomment"> * to you under the Apache License, Version 2.0 (the</em>
<a class="jxr_linenumber" name="7" href="#7">7</a> <em class="jxr_javadoccomment"> * "License"); you may not use this file except in compliance</em>
<a class="jxr_linenumber" name="8" href="#8">8</a> <em class="jxr_javadoccomment"> * with the License. You may obtain a copy of the License at</em>
<a class="jxr_linenumber" name="9" href="#9">9</a> <em class="jxr_javadoccomment"> *</em>
<a class="jxr_linenumber" name="10" href="#10">10</a> <em class="jxr_javadoccomment"> * <a href="http://www.apache.org/licenses/LICENSE-2.0" target="alexandria_uri">http://www.apache.org/licenses/LICENSE-2.0</a></em>
<a class="jxr_linenumber" name="11" href="#11">11</a> <em class="jxr_javadoccomment"> *</em>
<a class="jxr_linenumber" name="12" href="#12">12</a> <em class="jxr_javadoccomment"> * Unless required by applicable law or agreed to in writing, software</em>
<a class="jxr_linenumber" name="13" href="#13">13</a> <em class="jxr_javadoccomment"> * distributed under the License is distributed on an "AS IS" BASIS,</em>
<a class="jxr_linenumber" name="14" href="#14">14</a> <em class="jxr_javadoccomment"> * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</em>
<a class="jxr_linenumber" name="15" href="#15">15</a> <em class="jxr_javadoccomment"> * See the License for the specific language governing permissions and</em>
<a class="jxr_linenumber" name="16" href="#16">16</a> <em class="jxr_javadoccomment"> * limitations under the License.</em>
<a class="jxr_linenumber" name="17" href="#17">17</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="18" href="#18">18</a> <strong class="jxr_keyword">package</strong> org.apache.hadoop.hbase.backup.example;
<a class="jxr_linenumber" name="19" href="#19">19</a>
<a class="jxr_linenumber" name="20" href="#20">20</a> <strong class="jxr_keyword">import</strong> java.io.IOException;
<a class="jxr_linenumber" name="21" href="#21">21</a>
<a class="jxr_linenumber" name="22" href="#22">22</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.hbase.classification.InterfaceAudience;
<a class="jxr_linenumber" name="23" href="#23">23</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.conf.Configuration;
<a class="jxr_linenumber" name="24" href="#24">24</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.conf.Configured;
<a class="jxr_linenumber" name="25" href="#25">25</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.hbase.client.ClusterConnection;
<a class="jxr_linenumber" name="26" href="#26">26</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.hbase.util.Bytes;
<a class="jxr_linenumber" name="27" href="#27">27</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.hbase.zookeeper.ZKUtil;
<a class="jxr_linenumber" name="28" href="#28">28</a> <strong class="jxr_keyword">import</strong> org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
<a class="jxr_linenumber" name="29" href="#29">29</a> <strong class="jxr_keyword">import</strong> org.apache.zookeeper.KeeperException;
<a class="jxr_linenumber" name="30" href="#30">30</a>
<a class="jxr_linenumber" name="31" href="#31">31</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="32" href="#32">32</a> <em class="jxr_javadoccomment"> * Example class for how to use the table archiving coordinated via zookeeper</em>
<a class="jxr_linenumber" name="33" href="#33">33</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="34" href="#34">34</a> @InterfaceAudience.Private
<a class="jxr_linenumber" name="35" href="#35">35</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">class</strong> <a href="../../../../../../org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.html">ZKTableArchiveClient</a> <strong class="jxr_keyword">extends</strong> Configured {
<a class="jxr_linenumber" name="36" href="#36">36</a>
<a class="jxr_linenumber" name="37" href="#37">37</a> <em class="jxr_javadoccomment">/**</em><em class="jxr_javadoccomment"> Configuration key for the archive node. */</em>
<a class="jxr_linenumber" name="38" href="#38">38</a> <strong class="jxr_keyword">private</strong> <strong class="jxr_keyword">static</strong> <strong class="jxr_keyword">final</strong> String ZOOKEEPER_ZNODE_HFILE_ARCHIVE_KEY = <span class="jxr_string">"zookeeper.znode.hfile.archive"</span>;
<a class="jxr_linenumber" name="39" href="#39">39</a> <strong class="jxr_keyword">private</strong> <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html">ClusterConnection</a> connection;
<a class="jxr_linenumber" name="40" href="#40">40</a>
<a class="jxr_linenumber" name="41" href="#41">41</a> <strong class="jxr_keyword">public</strong> <a href="../../../../../../org/apache/hadoop/hbase/backup/example/ZKTableArchiveClient.html">ZKTableArchiveClient</a>(Configuration conf, <a href="../../../../../../org/apache/hadoop/hbase/client/ClusterConnection.html">ClusterConnection</a> connection) {
<a class="jxr_linenumber" name="42" href="#42">42</a> <strong class="jxr_keyword">super</strong>(conf);
<a class="jxr_linenumber" name="43" href="#43">43</a> <strong class="jxr_keyword">this</strong>.connection = connection;
<a class="jxr_linenumber" name="44" href="#44">44</a> }
<a class="jxr_linenumber" name="45" href="#45">45</a>
<a class="jxr_linenumber" name="46" href="#46">46</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="47" href="#47">47</a> <em class="jxr_javadoccomment"> * Turn on backups for all HFiles for the given table.</em>
<a class="jxr_linenumber" name="48" href="#48">48</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="49" href="#49">49</a> <em class="jxr_javadoccomment"> * All deleted hfiles are moved to the archive directory under the table directory, rather than</em>
<a class="jxr_linenumber" name="50" href="#50">50</a> <em class="jxr_javadoccomment"> * being deleted.</em>
<a class="jxr_linenumber" name="51" href="#51">51</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="52" href="#52">52</a> <em class="jxr_javadoccomment"> * If backups are already enabled for this table, does nothing.</em>
<a class="jxr_linenumber" name="53" href="#53">53</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="54" href="#54">54</a> <em class="jxr_javadoccomment"> * If the table does not exist, the archiving the table's hfiles is still enabled as a future</em>
<a class="jxr_linenumber" name="55" href="#55">55</a> <em class="jxr_javadoccomment"> * table with that name may be created shortly.</em>
<a class="jxr_linenumber" name="56" href="#56">56</a> <em class="jxr_javadoccomment"> * @param table name of the table to start backing up</em>
<a class="jxr_linenumber" name="57" href="#57">57</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected exception occurs</em>
<a class="jxr_linenumber" name="58" href="#58">58</a> <em class="jxr_javadoccomment"> * @throws KeeperException if zookeeper can't be reached</em>
<a class="jxr_linenumber" name="59" href="#59">59</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="60" href="#60">60</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">void</strong> enableHFileBackupAsync(<strong class="jxr_keyword">final</strong> byte[] table) <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="61" href="#61">61</a> createHFileArchiveManager().enableHFileBackup(table).stop();
<a class="jxr_linenumber" name="62" href="#62">62</a> }
<a class="jxr_linenumber" name="63" href="#63">63</a>
<a class="jxr_linenumber" name="64" href="#64">64</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="65" href="#65">65</a> <em class="jxr_javadoccomment"> * Disable hfile backups for the given table.</em>
<a class="jxr_linenumber" name="66" href="#66">66</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="67" href="#67">67</a> <em class="jxr_javadoccomment"> * Previously backed up files are still retained (if present).</em>
<a class="jxr_linenumber" name="68" href="#68">68</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="69" href="#69">69</a> <em class="jxr_javadoccomment"> * Asynchronous operation - some extra HFiles may be retained, in the archive directory after</em>
<a class="jxr_linenumber" name="70" href="#70">70</a> <em class="jxr_javadoccomment"> * disable is called, dependent on the latency in zookeeper to the servers.</em>
<a class="jxr_linenumber" name="71" href="#71">71</a> <em class="jxr_javadoccomment"> * @param table name of the table stop backing up</em>
<a class="jxr_linenumber" name="72" href="#72">72</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected exception occurs</em>
<a class="jxr_linenumber" name="73" href="#73">73</a> <em class="jxr_javadoccomment"> * @throws KeeperException if zookeeper can't be reached</em>
<a class="jxr_linenumber" name="74" href="#74">74</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="75" href="#75">75</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">void</strong> disableHFileBackup(String table) <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="76" href="#76">76</a> disableHFileBackup(Bytes.toBytes(table));
<a class="jxr_linenumber" name="77" href="#77">77</a> }
<a class="jxr_linenumber" name="78" href="#78">78</a>
<a class="jxr_linenumber" name="79" href="#79">79</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="80" href="#80">80</a> <em class="jxr_javadoccomment"> * Disable hfile backups for the given table.</em>
<a class="jxr_linenumber" name="81" href="#81">81</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="82" href="#82">82</a> <em class="jxr_javadoccomment"> * Previously backed up files are still retained (if present).</em>
<a class="jxr_linenumber" name="83" href="#83">83</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="84" href="#84">84</a> <em class="jxr_javadoccomment"> * Asynchronous operation - some extra HFiles may be retained, in the archive directory after</em>
<a class="jxr_linenumber" name="85" href="#85">85</a> <em class="jxr_javadoccomment"> * disable is called, dependent on the latency in zookeeper to the servers.</em>
<a class="jxr_linenumber" name="86" href="#86">86</a> <em class="jxr_javadoccomment"> * @param table name of the table stop backing up</em>
<a class="jxr_linenumber" name="87" href="#87">87</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected exception occurs</em>
<a class="jxr_linenumber" name="88" href="#88">88</a> <em class="jxr_javadoccomment"> * @throws KeeperException if zookeeper can't be reached</em>
<a class="jxr_linenumber" name="89" href="#89">89</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="90" href="#90">90</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">void</strong> disableHFileBackup(<strong class="jxr_keyword">final</strong> byte[] table) <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="91" href="#91">91</a> createHFileArchiveManager().disableHFileBackup(table).stop();
<a class="jxr_linenumber" name="92" href="#92">92</a> }
<a class="jxr_linenumber" name="93" href="#93">93</a>
<a class="jxr_linenumber" name="94" href="#94">94</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="95" href="#95">95</a> <em class="jxr_javadoccomment"> * Disable hfile backups for all tables.</em>
<a class="jxr_linenumber" name="96" href="#96">96</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="97" href="#97">97</a> <em class="jxr_javadoccomment"> * Previously backed up files are still retained (if present).</em>
<a class="jxr_linenumber" name="98" href="#98">98</a> <em class="jxr_javadoccomment"> * <p></em>
<a class="jxr_linenumber" name="99" href="#99">99</a> <em class="jxr_javadoccomment"> * Asynchronous operation - some extra HFiles may be retained, in the archive directory after</em>
<a class="jxr_linenumber" name="100" href="#100">100</a> <em class="jxr_javadoccomment"> * disable is called, dependent on the latency in zookeeper to the servers.</em>
<a class="jxr_linenumber" name="101" href="#101">101</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected exception occurs</em>
<a class="jxr_linenumber" name="102" href="#102">102</a> <em class="jxr_javadoccomment"> * @throws KeeperException if zookeeper can't be reached</em>
<a class="jxr_linenumber" name="103" href="#103">103</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="104" href="#104">104</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">void</strong> disableHFileBackup() <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="105" href="#105">105</a> createHFileArchiveManager().disableHFileBackup().stop();
<a class="jxr_linenumber" name="106" href="#106">106</a> }
<a class="jxr_linenumber" name="107" href="#107">107</a>
<a class="jxr_linenumber" name="108" href="#108">108</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="109" href="#109">109</a> <em class="jxr_javadoccomment"> * Determine if archiving is enabled (but not necessarily fully propagated) for a table</em>
<a class="jxr_linenumber" name="110" href="#110">110</a> <em class="jxr_javadoccomment"> * @param table name of the table to check</em>
<a class="jxr_linenumber" name="111" href="#111">111</a> <em class="jxr_javadoccomment"> * @return <tt>true</tt> if it is, <tt>false</tt> otherwise</em>
<a class="jxr_linenumber" name="112" href="#112">112</a> <em class="jxr_javadoccomment"> * @throws IOException if a connection to ZooKeeper cannot be established</em>
<a class="jxr_linenumber" name="113" href="#113">113</a> <em class="jxr_javadoccomment"> * @throws KeeperException</em>
<a class="jxr_linenumber" name="114" href="#114">114</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="115" href="#115">115</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">boolean</strong> getArchivingEnabled(byte[] table) <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="116" href="#116">116</a> <a href="../../../../../../org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html">HFileArchiveManager</a> manager = createHFileArchiveManager();
<a class="jxr_linenumber" name="117" href="#117">117</a> <strong class="jxr_keyword">try</strong> {
<a class="jxr_linenumber" name="118" href="#118">118</a> <strong class="jxr_keyword">return</strong> manager.isArchivingEnabled(table);
<a class="jxr_linenumber" name="119" href="#119">119</a> } <strong class="jxr_keyword">finally</strong> {
<a class="jxr_linenumber" name="120" href="#120">120</a> manager.stop();
<a class="jxr_linenumber" name="121" href="#121">121</a> }
<a class="jxr_linenumber" name="122" href="#122">122</a> }
<a class="jxr_linenumber" name="123" href="#123">123</a>
<a class="jxr_linenumber" name="124" href="#124">124</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="125" href="#125">125</a> <em class="jxr_javadoccomment"> * Determine if archiving is enabled (but not necessarily fully propagated) for a table</em>
<a class="jxr_linenumber" name="126" href="#126">126</a> <em class="jxr_javadoccomment"> * @param table name of the table to check</em>
<a class="jxr_linenumber" name="127" href="#127">127</a> <em class="jxr_javadoccomment"> * @return <tt>true</tt> if it is, <tt>false</tt> otherwise</em>
<a class="jxr_linenumber" name="128" href="#128">128</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected network issue occurs</em>
<a class="jxr_linenumber" name="129" href="#129">129</a> <em class="jxr_javadoccomment"> * @throws KeeperException if zookeeper can't be reached</em>
<a class="jxr_linenumber" name="130" href="#130">130</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="131" href="#131">131</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">boolean</strong> getArchivingEnabled(String table) <strong class="jxr_keyword">throws</strong> IOException, KeeperException {
<a class="jxr_linenumber" name="132" href="#132">132</a> <strong class="jxr_keyword">return</strong> getArchivingEnabled(Bytes.toBytes(table));
<a class="jxr_linenumber" name="133" href="#133">133</a> }
<a class="jxr_linenumber" name="134" href="#134">134</a>
<a class="jxr_linenumber" name="135" href="#135">135</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="136" href="#136">136</a> <em class="jxr_javadoccomment"> * @return A new {@link HFileArchiveManager} to manage which tables' hfiles should be archived</em>
<a class="jxr_linenumber" name="137" href="#137">137</a> <em class="jxr_javadoccomment"> * rather than deleted.</em>
<a class="jxr_linenumber" name="138" href="#138">138</a> <em class="jxr_javadoccomment"> * @throws KeeperException if we can't reach zookeeper</em>
<a class="jxr_linenumber" name="139" href="#139">139</a> <em class="jxr_javadoccomment"> * @throws IOException if an unexpected network issue occurs</em>
<a class="jxr_linenumber" name="140" href="#140">140</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="141" href="#141">141</a> <strong class="jxr_keyword">private</strong> <strong class="jxr_keyword">synchronized</strong> <a href="../../../../../../org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html">HFileArchiveManager</a> createHFileArchiveManager() <strong class="jxr_keyword">throws</strong> KeeperException,
<a class="jxr_linenumber" name="142" href="#142">142</a> IOException {
<a class="jxr_linenumber" name="143" href="#143">143</a> <strong class="jxr_keyword">return</strong> <strong class="jxr_keyword">new</strong> <a href="../../../../../../org/apache/hadoop/hbase/backup/example/HFileArchiveManager.html">HFileArchiveManager</a>(<strong class="jxr_keyword">this</strong>.connection, <strong class="jxr_keyword">this</strong>.getConf());
<a class="jxr_linenumber" name="144" href="#144">144</a> }
<a class="jxr_linenumber" name="145" href="#145">145</a>
<a class="jxr_linenumber" name="146" href="#146">146</a> <em class="jxr_javadoccomment">/**</em>
<a class="jxr_linenumber" name="147" href="#147">147</a> <em class="jxr_javadoccomment"> * @param conf conf to read for the base archive node</em>
<a class="jxr_linenumber" name="148" href="#148">148</a> <em class="jxr_javadoccomment"> * @param zooKeeper zookeeper to used for building the full path</em>
<a class="jxr_linenumber" name="149" href="#149">149</a> <em class="jxr_javadoccomment"> * @return get the znode for long-term archival of a table for</em>
<a class="jxr_linenumber" name="150" href="#150">150</a> <em class="jxr_javadoccomment"> */</em>
<a class="jxr_linenumber" name="151" href="#151">151</a> <strong class="jxr_keyword">public</strong> <strong class="jxr_keyword">static</strong> String getArchiveZNode(Configuration conf, <a href="../../../../../../org/apache/hadoop/hbase/zookeeper/ZooKeeperWatcher.html">ZooKeeperWatcher</a> zooKeeper) {
<a class="jxr_linenumber" name="152" href="#152">152</a> <strong class="jxr_keyword">return</strong> ZKUtil.joinZNode(zooKeeper.baseZNode, conf.get(ZOOKEEPER_ZNODE_HFILE_ARCHIVE_KEY,
<a class="jxr_linenumber" name="153" href="#153">153</a> TableHFileArchiveTracker.HFILE_ARCHIVE_ZNODE_PARENT));
<a class="jxr_linenumber" name="154" href="#154">154</a> }
<a class="jxr_linenumber" name="155" href="#155">155</a> }
</pre>
<hr/><div id="footer">This page was automatically generated by <a href="http://maven.apache.org/">Maven</a></div></body>
</html>
| Java |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.ssl;
import io.netty.buffer.ByteBuf;
import io.netty.util.internal.NativeLibraryLoader;
import io.netty.util.internal.SystemPropertyUtil;
import io.netty.util.internal.logging.InternalLogger;
import io.netty.util.internal.logging.InternalLoggerFactory;
import org.apache.tomcat.jni.Buffer;
import org.apache.tomcat.jni.Library;
import org.apache.tomcat.jni.Pool;
import org.apache.tomcat.jni.SSL;
import org.apache.tomcat.jni.SSLContext;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Locale;
import java.util.Set;
/**
* Tells if <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public final class OpenSsl {
private static final InternalLogger logger = InternalLoggerFactory.getInstance(OpenSsl.class);
private static final String LINUX = "linux";
private static final String UNKNOWN = "unknown";
private static final Throwable UNAVAILABILITY_CAUSE;
private static final Set<String> AVAILABLE_CIPHER_SUITES;
static {
Throwable cause = null;
// Test if netty-tcnative is in the classpath first.
try {
Class.forName("org.apache.tomcat.jni.SSL", false, OpenSsl.class.getClassLoader());
} catch (ClassNotFoundException t) {
cause = t;
logger.debug(
"netty-tcnative not in the classpath; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable.");
}
// If in the classpath, try to load the native library and initialize netty-tcnative.
if (cause == null) {
try {
// The JNI library was not already loaded. Load it now.
loadTcNative();
} catch (Throwable t) {
cause = t;
logger.debug(
"Failed to load netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable, unless the " +
"application has already loaded the symbols by some other means. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
try {
initializeTcNative();
// The library was initialized successfully. If loading the library failed above,
// reset the cause now since it appears that the library was loaded by some other
// means.
cause = null;
} catch (Throwable t) {
if (cause == null) {
cause = t;
}
logger.debug(
"Failed to initialize netty-tcnative; " +
OpenSslEngine.class.getSimpleName() + " will be unavailable. " +
"See http://netty.io/wiki/forked-tomcat-native.html for more information.", t);
}
}
UNAVAILABILITY_CAUSE = cause;
if (cause == null) {
final Set<String> availableCipherSuites = new LinkedHashSet<String>(128);
final long aprPool = Pool.create(0);
try {
final long sslCtx = SSLContext.make(aprPool, SSL.SSL_PROTOCOL_ALL, SSL.SSL_MODE_SERVER);
try {
SSLContext.setOptions(sslCtx, SSL.SSL_OP_ALL);
SSLContext.setCipherSuite(sslCtx, "ALL");
final long ssl = SSL.newSSL(sslCtx, true);
try {
for (String c: SSL.getCiphers(ssl)) {
// Filter out bad input.
if (c == null || c.length() == 0 || availableCipherSuites.contains(c)) {
continue;
}
availableCipherSuites.add(c);
}
} finally {
SSL.freeSSL(ssl);
}
} finally {
SSLContext.free(sslCtx);
}
} catch (Exception e) {
logger.warn("Failed to get the list of available OpenSSL cipher suites.", e);
} finally {
Pool.destroy(aprPool);
}
AVAILABLE_CIPHER_SUITES = Collections.unmodifiableSet(availableCipherSuites);
} else {
AVAILABLE_CIPHER_SUITES = Collections.emptySet();
}
}
/**
* Returns {@code true} if and only if
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support
* are available.
*/
public static boolean isAvailable() {
return UNAVAILABILITY_CAUSE == null;
}
/**
* Returns {@code true} if the used version of openssl supports
* <a href="https://tools.ietf.org/html/rfc7301">ALPN</a>.
*/
public static boolean isAlpnSupported() {
return version() >= 0x10002000L;
}
/**
* Returns the version of the used available OpenSSL library or {@code -1} if {@link #isAvailable()}
* returns {@code false}.
*/
public static int version() {
if (isAvailable()) {
return SSL.version();
}
return -1;
}
/**
* Returns the version string of the used available OpenSSL library or {@code null} if {@link #isAvailable()}
* returns {@code false}.
*/
public static String versionString() {
if (isAvailable()) {
return SSL.versionString();
}
return null;
}
/**
* Ensure that <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and
* its OpenSSL support are available.
*
* @throws UnsatisfiedLinkError if unavailable
*/
public static void ensureAvailability() {
if (UNAVAILABILITY_CAUSE != null) {
throw (Error) new UnsatisfiedLinkError(
"failed to load the required native library").initCause(UNAVAILABILITY_CAUSE);
}
}
/**
* Returns the cause of unavailability of
* <a href="http://netty.io/wiki/forked-tomcat-native.html">{@code netty-tcnative}</a> and its OpenSSL support.
*
* @return the cause if unavailable. {@code null} if available.
*/
public static Throwable unavailabilityCause() {
return UNAVAILABILITY_CAUSE;
}
/**
* Returns all the available OpenSSL cipher suites.
* Please note that the returned array may include the cipher suites that are insecure or non-functional.
*/
public static Set<String> availableCipherSuites() {
return AVAILABLE_CIPHER_SUITES;
}
/**
* Returns {@code true} if and only if the specified cipher suite is available in OpenSSL.
* Both Java-style cipher suite and OpenSSL-style cipher suite are accepted.
*/
public static boolean isCipherSuiteAvailable(String cipherSuite) {
String converted = CipherSuiteConverter.toOpenSsl(cipherSuite);
if (converted != null) {
cipherSuite = converted;
}
return AVAILABLE_CIPHER_SUITES.contains(cipherSuite);
}
static boolean isError(long errorCode) {
return errorCode != SSL.SSL_ERROR_NONE;
}
static long memoryAddress(ByteBuf buf) {
assert buf.isDirect();
return buf.hasMemoryAddress() ? buf.memoryAddress() : Buffer.address(buf.nioBuffer());
}
private OpenSsl() { }
private static void loadTcNative() throws Exception {
String os = normalizeOs(SystemPropertyUtil.get("os.name", ""));
String arch = normalizeArch(SystemPropertyUtil.get("os.arch", ""));
Set<String> libNames = new LinkedHashSet<String>(3);
// First, try loading the platform-specific library. Platform-specific
// libraries will be available if using a tcnative uber jar.
libNames.add("netty-tcnative-" + os + '-' + arch);
if (LINUX.equalsIgnoreCase(os)) {
// Fedora SSL lib so naming (libssl.so.10 vs libssl.so.1.0.0)..
libNames.add("netty-tcnative-" + os + '-' + arch + "-fedora");
}
// finally the default library.
libNames.add("netty-tcnative");
NativeLibraryLoader.loadFirstAvailable(SSL.class.getClassLoader(),
libNames.toArray(new String[libNames.size()]));
}
private static void initializeTcNative() throws Exception {
Library.initialize("provided");
SSL.initialize(null);
}
private static String normalizeOs(String value) {
value = normalize(value);
if (value.startsWith("aix")) {
return "aix";
}
if (value.startsWith("hpux")) {
return "hpux";
}
if (value.startsWith("os400")) {
// Avoid the names such as os4000
if (value.length() <= 5 || !Character.isDigit(value.charAt(5))) {
return "os400";
}
}
if (value.startsWith(LINUX)) {
return LINUX;
}
if (value.startsWith("macosx") || value.startsWith("osx")) {
return "osx";
}
if (value.startsWith("freebsd")) {
return "freebsd";
}
if (value.startsWith("openbsd")) {
return "openbsd";
}
if (value.startsWith("netbsd")) {
return "netbsd";
}
if (value.startsWith("solaris") || value.startsWith("sunos")) {
return "sunos";
}
if (value.startsWith("windows")) {
return "windows";
}
return UNKNOWN;
}
private static String normalizeArch(String value) {
value = normalize(value);
if (value.matches("^(x8664|amd64|ia32e|em64t|x64)$")) {
return "x86_64";
}
if (value.matches("^(x8632|x86|i[3-6]86|ia32|x32)$")) {
return "x86_32";
}
if (value.matches("^(ia64|itanium64)$")) {
return "itanium_64";
}
if (value.matches("^(sparc|sparc32)$")) {
return "sparc_32";
}
if (value.matches("^(sparcv9|sparc64)$")) {
return "sparc_64";
}
if (value.matches("^(arm|arm32)$")) {
return "arm_32";
}
if ("aarch64".equals(value)) {
return "aarch_64";
}
if (value.matches("^(ppc|ppc32)$")) {
return "ppc_32";
}
if ("ppc64".equals(value)) {
return "ppc_64";
}
if ("ppc64le".equals(value)) {
return "ppcle_64";
}
if ("s390".equals(value)) {
return "s390_32";
}
if ("s390x".equals(value)) {
return "s390_64";
}
return UNKNOWN;
}
private static String normalize(String value) {
return value.toLowerCase(Locale.US).replaceAll("[^a-z0-9]+", "");
}
}
| Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_51) on Fri Jun 28 10:55:02 CEST 2013 -->
<TITLE>
UpdateInfoListener
</TITLE>
<META NAME="date" CONTENT="2013-06-28">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../doclava-developer-docs.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="UpdateInfoListener";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/UpdateInfoListener.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../net/hockeyapp/android/UpdateFragment.html" title="class in net.hockeyapp.android"><B>PREV CLASS</B></A>
<A HREF="../../../net/hockeyapp/android/UpdateManager.html" title="class in net.hockeyapp.android"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?net/hockeyapp/android/UpdateInfoListener.html" target="_top"><B>FRAMES</B></A>
<A HREF="UpdateInfoListener.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<!-- ======== START OF CLASS DATA ======== -->
<H2>
<FONT SIZE="-1">
net.hockeyapp.android</FONT>
<BR>
Interface UpdateInfoListener</H2>
<DL>
<DT><B>All Known Implementing Classes:</B> <DD><A HREF="../../../net/hockeyapp/android/UpdateActivity.html" title="class in net.hockeyapp.android">UpdateActivity</A>, <A HREF="../../../net/hockeyapp/android/UpdateFragment.html" title="class in net.hockeyapp.android">UpdateFragment</A></DD>
</DL>
<HR>
<DL>
<DT><PRE>public interface <B>UpdateInfoListener</B></DL>
</PRE>
<P>
<h4>Description</h4>
Abstract class for callbacks to be invoked from UpdateActivity
and UpdateFragment.
<h4>License</h4>
<pre>
Copyright (c) 2011-2013 Bit Stadium GmbH
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
</pre>
<P>
<P>
<DL>
<DT><B>Author:</B></DT>
<DD>Thomas Dohmke</DD>
</DL>
<HR>
<P>
<!-- ========== METHOD SUMMARY =========== -->
<A NAME="method_summary"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="2"><FONT SIZE="+2">
<B>Method Summary</B></FONT></TH>
</TR>
<TR BGCOLOR="white" CLASS="TableRowColor">
<TD ALIGN="right" VALIGN="top" WIDTH="1%"><FONT SIZE="-1">
<CODE> int</CODE></FONT></TD>
<TD><CODE><B><A HREF="../../../net/hockeyapp/android/UpdateInfoListener.html#getCurrentVersionCode()">getCurrentVersionCode</A></B>()</CODE>
<BR>
Implement to return the app's current version code.</TD>
</TR>
</TABLE>
<P>
<!-- ============ METHOD DETAIL ========== -->
<A NAME="method_detail"><!-- --></A>
<TABLE BORDER="1" WIDTH="100%" CELLPADDING="3" CELLSPACING="0" SUMMARY="">
<TR BGCOLOR="#CCCCFF" CLASS="TableHeadingColor">
<TH ALIGN="left" COLSPAN="1"><FONT SIZE="+2">
<B>Method Detail</B></FONT></TH>
</TR>
</TABLE>
<A NAME="getCurrentVersionCode()"><!-- --></A><H3>
getCurrentVersionCode</H3>
<PRE>
int <B>getCurrentVersionCode</B>()</PRE>
<DL>
<DD>Implement to return the app's current version code.
<P>
<DD><DL>
</DL>
</DD>
</DL>
<!-- ========= END OF CLASS DATA ========= -->
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Class</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="class-use/UpdateInfoListener.html"><FONT CLASS="NavBarFont1"><B>Use</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../net/hockeyapp/android/UpdateFragment.html" title="class in net.hockeyapp.android"><B>PREV CLASS</B></A>
<A HREF="../../../net/hockeyapp/android/UpdateManager.html" title="class in net.hockeyapp.android"><B>NEXT CLASS</B></A></FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../index.html?net/hockeyapp/android/UpdateInfoListener.html" target="_top"><B>FRAMES</B></A>
<A HREF="UpdateInfoListener.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
<TR>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
SUMMARY: NESTED | FIELD | CONSTR | <A HREF="#method_summary">METHOD</A></FONT></TD>
<TD VALIGN="top" CLASS="NavBarCell3"><FONT SIZE="-2">
DETAIL: FIELD | CONSTR | <A HREF="#method_detail">METHOD</A></FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| Java |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/workmail/WorkMail_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/workmail/model/ResourceType.h>
#include <aws/workmail/model/EntityState.h>
#include <aws/core/utils/DateTime.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace WorkMail
{
namespace Model
{
/**
* <p>The representation of a resource.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/workmail-2017-10-01/Resource">AWS
* API Reference</a></p>
*/
class AWS_WORKMAIL_API Resource
{
public:
Resource();
Resource(Aws::Utils::Json::JsonView jsonValue);
Resource& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>The identifier of the resource.</p>
*/
inline const Aws::String& GetId() const{ return m_id; }
/**
* <p>The identifier of the resource.</p>
*/
inline bool IdHasBeenSet() const { return m_idHasBeenSet; }
/**
* <p>The identifier of the resource.</p>
*/
inline void SetId(const Aws::String& value) { m_idHasBeenSet = true; m_id = value; }
/**
* <p>The identifier of the resource.</p>
*/
inline void SetId(Aws::String&& value) { m_idHasBeenSet = true; m_id = std::move(value); }
/**
* <p>The identifier of the resource.</p>
*/
inline void SetId(const char* value) { m_idHasBeenSet = true; m_id.assign(value); }
/**
* <p>The identifier of the resource.</p>
*/
inline Resource& WithId(const Aws::String& value) { SetId(value); return *this;}
/**
* <p>The identifier of the resource.</p>
*/
inline Resource& WithId(Aws::String&& value) { SetId(std::move(value)); return *this;}
/**
* <p>The identifier of the resource.</p>
*/
inline Resource& WithId(const char* value) { SetId(value); return *this;}
/**
* <p>The email of the resource.</p>
*/
inline const Aws::String& GetEmail() const{ return m_email; }
/**
* <p>The email of the resource.</p>
*/
inline bool EmailHasBeenSet() const { return m_emailHasBeenSet; }
/**
* <p>The email of the resource.</p>
*/
inline void SetEmail(const Aws::String& value) { m_emailHasBeenSet = true; m_email = value; }
/**
* <p>The email of the resource.</p>
*/
inline void SetEmail(Aws::String&& value) { m_emailHasBeenSet = true; m_email = std::move(value); }
/**
* <p>The email of the resource.</p>
*/
inline void SetEmail(const char* value) { m_emailHasBeenSet = true; m_email.assign(value); }
/**
* <p>The email of the resource.</p>
*/
inline Resource& WithEmail(const Aws::String& value) { SetEmail(value); return *this;}
/**
* <p>The email of the resource.</p>
*/
inline Resource& WithEmail(Aws::String&& value) { SetEmail(std::move(value)); return *this;}
/**
* <p>The email of the resource.</p>
*/
inline Resource& WithEmail(const char* value) { SetEmail(value); return *this;}
/**
* <p>The name of the resource.</p>
*/
inline const Aws::String& GetName() const{ return m_name; }
/**
* <p>The name of the resource.</p>
*/
inline bool NameHasBeenSet() const { return m_nameHasBeenSet; }
/**
* <p>The name of the resource.</p>
*/
inline void SetName(const Aws::String& value) { m_nameHasBeenSet = true; m_name = value; }
/**
* <p>The name of the resource.</p>
*/
inline void SetName(Aws::String&& value) { m_nameHasBeenSet = true; m_name = std::move(value); }
/**
* <p>The name of the resource.</p>
*/
inline void SetName(const char* value) { m_nameHasBeenSet = true; m_name.assign(value); }
/**
* <p>The name of the resource.</p>
*/
inline Resource& WithName(const Aws::String& value) { SetName(value); return *this;}
/**
* <p>The name of the resource.</p>
*/
inline Resource& WithName(Aws::String&& value) { SetName(std::move(value)); return *this;}
/**
* <p>The name of the resource.</p>
*/
inline Resource& WithName(const char* value) { SetName(value); return *this;}
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline const ResourceType& GetType() const{ return m_type; }
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline bool TypeHasBeenSet() const { return m_typeHasBeenSet; }
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline void SetType(const ResourceType& value) { m_typeHasBeenSet = true; m_type = value; }
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline void SetType(ResourceType&& value) { m_typeHasBeenSet = true; m_type = std::move(value); }
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline Resource& WithType(const ResourceType& value) { SetType(value); return *this;}
/**
* <p>The type of the resource: equipment or room.</p>
*/
inline Resource& WithType(ResourceType&& value) { SetType(std::move(value)); return *this;}
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline const EntityState& GetState() const{ return m_state; }
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline bool StateHasBeenSet() const { return m_stateHasBeenSet; }
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline void SetState(const EntityState& value) { m_stateHasBeenSet = true; m_state = value; }
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline void SetState(EntityState&& value) { m_stateHasBeenSet = true; m_state = std::move(value); }
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline Resource& WithState(const EntityState& value) { SetState(value); return *this;}
/**
* <p>The state of the resource, which can be ENABLED, DISABLED, or DELETED.</p>
*/
inline Resource& WithState(EntityState&& value) { SetState(std::move(value)); return *this;}
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline const Aws::Utils::DateTime& GetEnabledDate() const{ return m_enabledDate; }
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline bool EnabledDateHasBeenSet() const { return m_enabledDateHasBeenSet; }
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline void SetEnabledDate(const Aws::Utils::DateTime& value) { m_enabledDateHasBeenSet = true; m_enabledDate = value; }
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline void SetEnabledDate(Aws::Utils::DateTime&& value) { m_enabledDateHasBeenSet = true; m_enabledDate = std::move(value); }
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline Resource& WithEnabledDate(const Aws::Utils::DateTime& value) { SetEnabledDate(value); return *this;}
/**
* <p>The date indicating when the resource was enabled for Amazon WorkMail
* use.</p>
*/
inline Resource& WithEnabledDate(Aws::Utils::DateTime&& value) { SetEnabledDate(std::move(value)); return *this;}
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline const Aws::Utils::DateTime& GetDisabledDate() const{ return m_disabledDate; }
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline bool DisabledDateHasBeenSet() const { return m_disabledDateHasBeenSet; }
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline void SetDisabledDate(const Aws::Utils::DateTime& value) { m_disabledDateHasBeenSet = true; m_disabledDate = value; }
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline void SetDisabledDate(Aws::Utils::DateTime&& value) { m_disabledDateHasBeenSet = true; m_disabledDate = std::move(value); }
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline Resource& WithDisabledDate(const Aws::Utils::DateTime& value) { SetDisabledDate(value); return *this;}
/**
* <p>The date indicating when the resource was disabled from Amazon WorkMail
* use.</p>
*/
inline Resource& WithDisabledDate(Aws::Utils::DateTime&& value) { SetDisabledDate(std::move(value)); return *this;}
private:
Aws::String m_id;
bool m_idHasBeenSet;
Aws::String m_email;
bool m_emailHasBeenSet;
Aws::String m_name;
bool m_nameHasBeenSet;
ResourceType m_type;
bool m_typeHasBeenSet;
EntityState m_state;
bool m_stateHasBeenSet;
Aws::Utils::DateTime m_enabledDate;
bool m_enabledDateHasBeenSet;
Aws::Utils::DateTime m_disabledDate;
bool m_disabledDateHasBeenSet;
};
} // namespace Model
} // namespace WorkMail
} // namespace Aws
| Java |
/*
* Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
* Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
* Created By:
* Maintained By:
*/
//= require can.jquery-all
//= require models/cacheable
(function(ns, can) {
can.Model.Cacheable("CMS.Models.Document", {
root_object : "document"
, root_collection : "documents"
, findAll : "GET /api/documents"
, create : function(params) {
var _params = {
document : {
title : params.document.title
, description : params.document.description
, link : params.document.link
}
};
return $.ajax({
type : "POST"
, "url" : "/api/documents"
, dataType : "json"
, data : _params
});
}
, search : function(request, response) {
return $.ajax({
type : "get"
, url : "/api/documents"
, dataType : "json"
, data : {s : request.term}
, success : function(data) {
response($.map( data, function( item ) {
return can.extend({}, item.document, {
label: item.document.title
? item.document.title
+ (item.document.link_url
? " (" + item.document.link_url + ")"
: "")
: item.document.link_url
, value: item.document.id
});
}));
}
});
}
}, {
init : function () {
this._super && this._super();
// this.bind("change", function(ev, attr, how, newVal, oldVal) {
// var obj;
// if(obj = CMS.Models.ObjectDocument.findInCacheById(this.id) && attr !== "id") {
// obj.attr(attr, newVal);
// }
// });
var that = this;
this.each(function(value, name) {
if (value === null)
that.attr(name, undefined);
});
}
});
can.Model.Cacheable("CMS.Models.ObjectDocument", {
root_object : "object_document"
, root_collection : "object_documents"
, findAll: "GET /api/object_documents"
, create: "POST /api/object_documents"
, destroy : "DELETE /api/object_documents/{id}"
}, {
init : function() {
var _super = this._super;
function reinit() {
var that = this;
typeof _super === "function" && _super.call(this);
this.attr("document", CMS.Models.get_instance(
"Document", this.document_id || (this.document && this.document.id)));
this.attr("documentable", CMS.Models.get_instance(
this.documentable_type || (this.documentable && this.documentable.type),
this.documentable_id || (this.documentable && this.documentable.id)));
/*this.attr(
"document"
, CMS.Models.Document.findInCacheById(this.document_id)
|| new CMS.Models.Document(this.document && this.document.serialize ? this.document.serialize() : this.document));
*/
this.each(function(value, name) {
if (value === null)
that.removeAttr(name);
});
}
this.bind("created", can.proxy(reinit, this));
reinit.call(this);
}
});
})(this, can);
| Java |
/**
*
* Copyright 2017 Paul Schaub, 2020 Florian Schmaus
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.omemo;
import static org.jivesoftware.smackx.omemo.util.OmemoConstants.OMEMO_NAMESPACE_V_AXOLOTL;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.WeakHashMap;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jivesoftware.smack.ConnectionListener;
import org.jivesoftware.smack.Manager;
import org.jivesoftware.smack.SmackException;
import org.jivesoftware.smack.SmackException.NotConnectedException;
import org.jivesoftware.smack.XMPPConnection;
import org.jivesoftware.smack.XMPPException;
import org.jivesoftware.smack.packet.Message;
import org.jivesoftware.smack.packet.MessageBuilder;
import org.jivesoftware.smack.packet.Stanza;
import org.jivesoftware.smack.util.Async;
import org.jivesoftware.smackx.carbons.CarbonManager;
import org.jivesoftware.smackx.carbons.packet.CarbonExtension;
import org.jivesoftware.smackx.disco.ServiceDiscoveryManager;
import org.jivesoftware.smackx.hints.element.StoreHint;
import org.jivesoftware.smackx.mam.MamManager;
import org.jivesoftware.smackx.muc.MultiUserChat;
import org.jivesoftware.smackx.muc.MultiUserChatManager;
import org.jivesoftware.smackx.muc.RoomInfo;
import org.jivesoftware.smackx.omemo.element.OmemoBundleElement;
import org.jivesoftware.smackx.omemo.element.OmemoDeviceListElement;
import org.jivesoftware.smackx.omemo.element.OmemoDeviceListElement_VAxolotl;
import org.jivesoftware.smackx.omemo.element.OmemoElement;
import org.jivesoftware.smackx.omemo.exceptions.CannotEstablishOmemoSessionException;
import org.jivesoftware.smackx.omemo.exceptions.CorruptedOmemoKeyException;
import org.jivesoftware.smackx.omemo.exceptions.CryptoFailedException;
import org.jivesoftware.smackx.omemo.exceptions.NoOmemoSupportException;
import org.jivesoftware.smackx.omemo.exceptions.NoRawSessionException;
import org.jivesoftware.smackx.omemo.exceptions.UndecidedOmemoIdentityException;
import org.jivesoftware.smackx.omemo.internal.OmemoCachedDeviceList;
import org.jivesoftware.smackx.omemo.internal.OmemoDevice;
import org.jivesoftware.smackx.omemo.listener.OmemoMessageListener;
import org.jivesoftware.smackx.omemo.listener.OmemoMucMessageListener;
import org.jivesoftware.smackx.omemo.trust.OmemoFingerprint;
import org.jivesoftware.smackx.omemo.trust.OmemoTrustCallback;
import org.jivesoftware.smackx.omemo.trust.TrustState;
import org.jivesoftware.smackx.omemo.util.MessageOrOmemoMessage;
import org.jivesoftware.smackx.omemo.util.OmemoConstants;
import org.jivesoftware.smackx.pep.PepEventListener;
import org.jivesoftware.smackx.pep.PepManager;
import org.jivesoftware.smackx.pubsub.PubSubException;
import org.jivesoftware.smackx.pubsub.PubSubManager;
import org.jivesoftware.smackx.pubsub.packet.PubSub;
import org.jxmpp.jid.BareJid;
import org.jxmpp.jid.DomainBareJid;
import org.jxmpp.jid.EntityBareJid;
import org.jxmpp.jid.EntityFullJid;
/**
* Manager that allows sending messages encrypted with OMEMO.
* This class also provides some methods useful for a client that implements OMEMO.
*
* @author Paul Schaub
*/
public final class OmemoManager extends Manager {
private static final Logger LOGGER = Logger.getLogger(OmemoManager.class.getName());
private static final Integer UNKNOWN_DEVICE_ID = -1;
private static final WeakHashMap<XMPPConnection, TreeMap<Integer, OmemoManager>> INSTANCES = new WeakHashMap<>();
private final OmemoService<?, ?, ?, ?, ?, ?, ?, ?, ?> service;
private final HashSet<OmemoMessageListener> omemoMessageListeners = new HashSet<>();
private final HashSet<OmemoMucMessageListener> omemoMucMessageListeners = new HashSet<>();
private final PepManager pepManager;
private OmemoTrustCallback trustCallback;
private BareJid ownJid;
private Integer deviceId;
/**
* Private constructor.
*
* @param connection connection
* @param deviceId deviceId
*/
private OmemoManager(XMPPConnection connection, Integer deviceId) {
super(connection);
service = OmemoService.getInstance();
pepManager = PepManager.getInstanceFor(connection);
this.deviceId = deviceId;
if (connection.isAuthenticated()) {
initBareJidAndDeviceId(this);
} else {
connection.addConnectionListener(new ConnectionListener() {
@Override
public void authenticated(XMPPConnection connection, boolean resumed) {
initBareJidAndDeviceId(OmemoManager.this);
}
});
}
service.registerRatchetForManager(this);
// StanzaListeners
resumeStanzaAndPEPListeners();
}
/**
* Return an OmemoManager instance for the given connection and deviceId.
* If there was an OmemoManager for the connection and id before, return it. Otherwise create a new OmemoManager
* instance and return it.
*
* @param connection XmppConnection.
* @param deviceId MUST NOT be null and MUST be greater than 0.
*
* @return OmemoManager instance for the given connection and deviceId.
*/
public static synchronized OmemoManager getInstanceFor(XMPPConnection connection, Integer deviceId) {
if (deviceId == null || deviceId < 1) {
throw new IllegalArgumentException("DeviceId MUST NOT be null and MUST be greater than 0.");
}
TreeMap<Integer, OmemoManager> managersOfConnection = INSTANCES.get(connection);
if (managersOfConnection == null) {
managersOfConnection = new TreeMap<>();
INSTANCES.put(connection, managersOfConnection);
}
OmemoManager manager = managersOfConnection.get(deviceId);
if (manager == null) {
manager = new OmemoManager(connection, deviceId);
managersOfConnection.put(deviceId, manager);
}
return manager;
}
/**
* Returns an OmemoManager instance for the given connection. If there was one manager for the connection before,
* return it. If there were multiple managers before, return the one with the lowest deviceId.
* If there was no manager before, return a new one. As soon as the connection gets authenticated, the manager
* will look for local deviceIDs and select the lowest one as its id. If there are not local deviceIds, the manager
* will assign itself a random id.
*
* @param connection XmppConnection.
*
* @return OmemoManager instance for the given connection and a determined deviceId.
*/
public static synchronized OmemoManager getInstanceFor(XMPPConnection connection) {
TreeMap<Integer, OmemoManager> managers = INSTANCES.get(connection);
if (managers == null) {
managers = new TreeMap<>();
INSTANCES.put(connection, managers);
}
OmemoManager manager;
if (managers.size() == 0) {
manager = new OmemoManager(connection, UNKNOWN_DEVICE_ID);
managers.put(UNKNOWN_DEVICE_ID, manager);
} else {
manager = managers.get(managers.firstKey());
}
return manager;
}
/**
* Set a TrustCallback for this particular OmemoManager.
* TrustCallbacks are used to query and modify trust decisions.
*
* @param callback trustCallback.
*/
public void setTrustCallback(OmemoTrustCallback callback) {
if (trustCallback != null) {
throw new IllegalStateException("TrustCallback can only be set once.");
}
trustCallback = callback;
}
/**
* Return the TrustCallback of this manager.
*
* @return callback that is used for trust decisions.
*/
OmemoTrustCallback getTrustCallback() {
return trustCallback;
}
/**
* Initializes the OmemoManager. This method must be called before the manager can be used.
*
* @throws CorruptedOmemoKeyException if the OMEMO key is corrupted.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws IOException if an I/O error occurred.
*/
public synchronized void initialize()
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, InterruptedException,
SmackException.NoResponseException, SmackException.NotConnectedException, XMPPException.XMPPErrorException,
PubSubException.NotALeafNodeException, IOException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
if (getTrustCallback() == null) {
throw new IllegalStateException("No TrustCallback set.");
}
getOmemoService().init(new LoggedInOmemoManager(this));
}
/**
* Initialize the manager without blocking. Once the manager is successfully initialized, the finishedCallback will
* be notified. It will also get notified, if an error occurs.
*
* @param finishedCallback callback that gets called once the manager is initialized.
*/
public void initializeAsync(final InitializationFinishedCallback finishedCallback) {
Async.go(new Runnable() {
@Override
public void run() {
try {
initialize();
finishedCallback.initializationFinished(OmemoManager.this);
} catch (Exception e) {
finishedCallback.initializationFailed(e);
}
}
});
}
/**
* Return a set of all OMEMO capable devices of a contact.
* Note, that this method does not explicitly refresh the device list of the contact, so it might be outdated.
*
* @see #requestDeviceListUpdateFor(BareJid)
*
* @param contact contact we want to get a set of device of.
* @return set of known devices of that contact.
*
* @throws IOException if an I/O error occurred.
*/
public Set<OmemoDevice> getDevicesOf(BareJid contact) throws IOException {
OmemoCachedDeviceList list = getOmemoService().getOmemoStoreBackend().loadCachedDeviceList(getOwnDevice(), contact);
HashSet<OmemoDevice> devices = new HashSet<>();
for (int deviceId : list.getActiveDevices()) {
devices.add(new OmemoDevice(contact, deviceId));
}
return devices;
}
/**
* OMEMO encrypt a cleartext message for a single recipient.
* Note that this method does NOT set the 'to' attribute of the message.
*
* @param recipient recipients bareJid
* @param message text to encrypt
* @return encrypted message
*
* @throws CryptoFailedException when something crypto related fails
* @throws UndecidedOmemoIdentityException When there are undecided devices
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public OmemoMessage.Sent encrypt(BareJid recipient, String message)
throws CryptoFailedException, UndecidedOmemoIdentityException,
InterruptedException, SmackException.NotConnectedException,
SmackException.NoResponseException, SmackException.NotLoggedInException, IOException {
Set<BareJid> recipients = new HashSet<>();
recipients.add(recipient);
return encrypt(recipients, message);
}
/**
* OMEMO encrypt a cleartext message for multiple recipients.
*
* @param recipients recipients barejids
* @param message text to encrypt
* @return encrypted message.
*
* @throws CryptoFailedException When something crypto related fails
* @throws UndecidedOmemoIdentityException When there are undecided devices.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoMessage.Sent encrypt(Set<BareJid> recipients, String message)
throws CryptoFailedException, UndecidedOmemoIdentityException,
InterruptedException, SmackException.NotConnectedException,
SmackException.NoResponseException, SmackException.NotLoggedInException, IOException {
LoggedInOmemoManager guard = new LoggedInOmemoManager(this);
Set<OmemoDevice> devices = getDevicesOf(getOwnJid());
for (BareJid recipient : recipients) {
devices.addAll(getDevicesOf(recipient));
}
return service.createOmemoMessage(guard, devices, message);
}
/**
* Encrypt a message for all recipients in the MultiUserChat.
*
* @param muc multiUserChat
* @param message message to send
* @return encrypted message
*
* @throws UndecidedOmemoIdentityException when there are undecided devices.
* @throws CryptoFailedException if the OMEMO cryptography failed.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws NoOmemoSupportException When the muc doesn't support OMEMO.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoMessage.Sent encrypt(MultiUserChat muc, String message)
throws UndecidedOmemoIdentityException, CryptoFailedException,
XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, NoOmemoSupportException,
SmackException.NotLoggedInException, IOException {
if (!multiUserChatSupportsOmemo(muc)) {
throw new NoOmemoSupportException();
}
Set<BareJid> recipients = new HashSet<>();
for (EntityFullJid e : muc.getOccupants()) {
recipients.add(muc.getOccupant(e).getJid().asBareJid());
}
return encrypt(recipients, message);
}
/**
* Manually decrypt an OmemoElement.
* This method should only be used for use-cases, where the internal listeners don't pick up on an incoming message.
* (for example MAM query results).
*
* @param sender bareJid of the message sender (must be the jid of the contact who sent the message)
* @param omemoElement omemoElement
* @return decrypted OmemoMessage
*
* @throws SmackException.NotLoggedInException if the Manager is not authenticated
* @throws CorruptedOmemoKeyException if our or their key is corrupted
* @throws NoRawSessionException if the message was not a preKeyMessage, but we had no session with the contact
* @throws CryptoFailedException if decryption fails
* @throws IOException if an I/O error occurred.
*/
public OmemoMessage.Received decrypt(BareJid sender, OmemoElement omemoElement)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, NoRawSessionException,
CryptoFailedException, IOException {
LoggedInOmemoManager managerGuard = new LoggedInOmemoManager(this);
return getOmemoService().decryptMessage(managerGuard, sender, omemoElement);
}
/**
* Decrypt messages from a MAM query.
*
* @param mamQuery The MAM query
* @return list of decrypted OmemoMessages
*
* @throws SmackException.NotLoggedInException if the Manager is not authenticated.
* @throws IOException if an I/O error occurred.
*/
public List<MessageOrOmemoMessage> decryptMamQueryResult(MamManager.MamQuery mamQuery)
throws SmackException.NotLoggedInException, IOException {
return new ArrayList<>(getOmemoService().decryptMamQueryResult(new LoggedInOmemoManager(this), mamQuery));
}
/**
* Trust that a fingerprint belongs to an OmemoDevice.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
*/
public void trustOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
trustCallback.setTrust(device, fingerprint, TrustState.trusted);
}
/**
* Distrust the fingerprint/OmemoDevice tuple.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
*/
public void distrustOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
trustCallback.setTrust(device, fingerprint, TrustState.untrusted);
}
/**
* Returns true, if the fingerprint/OmemoDevice tuple is trusted, otherwise false.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
* @return <code>true</code> if this is a trusted OMEMO identity.
*/
public boolean isTrustedOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
return trustCallback.getTrust(device, fingerprint) == TrustState.trusted;
}
/**
* Returns true, if the fingerprint/OmemoDevice tuple is decided by the user.
* The fingerprint must be the lowercase, hexadecimal fingerprint of the identityKey of the device and must
* be of length 64.
*
* @param device device
* @param fingerprint fingerprint
* @return <code>true</code> if the trust is decided for the identity.
*/
public boolean isDecidedOmemoIdentity(OmemoDevice device, OmemoFingerprint fingerprint) {
if (trustCallback == null) {
throw new IllegalStateException("No TrustCallback set.");
}
return trustCallback.getTrust(device, fingerprint) != TrustState.undecided;
}
/**
* Send a ratchet update message. This can be used to advance the ratchet of a session in order to maintain forward
* secrecy.
*
* @param recipient recipient
*
* @throws CorruptedOmemoKeyException When the used identityKeys are corrupted
* @throws CryptoFailedException When something fails with the crypto
* @throws CannotEstablishOmemoSessionException When we can't establish a session with the recipient
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws NoSuchAlgorithmException if no such algorithm is available.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws IOException if an I/O error occurred.
*/
public synchronized void sendRatchetUpdateMessage(OmemoDevice recipient)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, InterruptedException,
SmackException.NoResponseException, NoSuchAlgorithmException, SmackException.NotConnectedException,
CryptoFailedException, CannotEstablishOmemoSessionException, IOException {
XMPPConnection connection = connection();
MessageBuilder message = connection.getStanzaFactory()
.buildMessageStanza()
.to(recipient.getJid());
OmemoElement element = getOmemoService().createRatchetUpdateElement(new LoggedInOmemoManager(this), recipient);
message.addExtension(element);
// Set MAM Storage hint
StoreHint.set(message);
connection.sendStanza(message.build());
}
/**
* Returns true, if the contact has any active devices published in a deviceList.
*
* @param contact contact
* @return true if contact has at least one OMEMO capable device.
*
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws IOException if an I/O error occurred.
*/
public synchronized boolean contactSupportsOmemo(BareJid contact)
throws InterruptedException, PubSubException.NotALeafNodeException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException {
OmemoCachedDeviceList deviceList = getOmemoService().refreshDeviceList(connection(), getOwnDevice(), contact);
return !deviceList.getActiveDevices().isEmpty();
}
/**
* Returns true, if the MUC with the EntityBareJid multiUserChat is non-anonymous and members only (prerequisite
* for OMEMO encryption in MUC).
*
* @param multiUserChat MUC
* @return true if chat supports OMEMO
*
* @throws XMPPException.XMPPErrorException if there was an XMPP protocol level error
* @throws SmackException.NotConnectedException if the connection is not connected
* @throws InterruptedException if the thread is interrupted
* @throws SmackException.NoResponseException if the server does not respond
*/
public boolean multiUserChatSupportsOmemo(MultiUserChat multiUserChat)
throws XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException {
EntityBareJid jid = multiUserChat.getRoom();
RoomInfo roomInfo = MultiUserChatManager.getInstanceFor(connection()).getRoomInfo(jid);
return roomInfo.isNonanonymous() && roomInfo.isMembersOnly();
}
/**
* Returns true, if the Server supports PEP.
*
* @param connection XMPPConnection
* @param server domainBareJid of the server to test
* @return true if server supports pep
*
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
*/
public static boolean serverSupportsOmemo(XMPPConnection connection, DomainBareJid server)
throws XMPPException.XMPPErrorException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException {
return ServiceDiscoveryManager.getInstanceFor(connection)
.discoverInfo(server).containsFeature(PubSub.NAMESPACE);
}
/**
* Return the fingerprint of our identity key.
*
* @return our own OMEMO fingerprint
*
* @throws SmackException.NotLoggedInException if we don't know our bareJid yet.
* @throws CorruptedOmemoKeyException if our identityKey is corrupted.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoFingerprint getOwnFingerprint()
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
return getOmemoService().getOmemoStoreBackend().getFingerprint(getOwnDevice());
}
/**
* Get the fingerprint of a contacts device.
*
* @param device contacts OmemoDevice
* @return fingerprint of the given OMEMO device.
*
* @throws CannotEstablishOmemoSessionException if we have no session yet, and are unable to create one.
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws CorruptedOmemoKeyException if the copy of the fingerprint we have is corrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized OmemoFingerprint getFingerprint(OmemoDevice device)
throws CannotEstablishOmemoSessionException, SmackException.NotLoggedInException,
CorruptedOmemoKeyException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
if (device.equals(getOwnDevice())) {
return getOwnFingerprint();
}
return getOmemoService().getOmemoStoreBackend()
.getFingerprintAndMaybeBuildSession(new LoggedInOmemoManager(this), device);
}
/**
* Return all OmemoFingerprints of active devices of a contact.
* TODO: Make more fail-safe
*
* @param contact contact
* @return Map of all active devices of the contact and their fingerprints.
*
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws CorruptedOmemoKeyException if the OMEMO key is corrupted.
* @throws CannotEstablishOmemoSessionException if no OMEMO session could be established.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized HashMap<OmemoDevice, OmemoFingerprint> getActiveFingerprints(BareJid contact)
throws SmackException.NotLoggedInException, CorruptedOmemoKeyException,
CannotEstablishOmemoSessionException, SmackException.NotConnectedException, InterruptedException,
SmackException.NoResponseException, IOException {
if (getOwnJid() == null) {
throw new SmackException.NotLoggedInException();
}
HashMap<OmemoDevice, OmemoFingerprint> fingerprints = new HashMap<>();
OmemoCachedDeviceList deviceList = getOmemoService().getOmemoStoreBackend().loadCachedDeviceList(getOwnDevice(),
contact);
for (int id : deviceList.getActiveDevices()) {
OmemoDevice device = new OmemoDevice(contact, id);
OmemoFingerprint fingerprint = getFingerprint(device);
if (fingerprint != null) {
fingerprints.put(device, fingerprint);
}
}
return fingerprints;
}
/**
* Add an OmemoMessageListener. This listener will be informed about incoming OMEMO messages
* (as well as KeyTransportMessages) and OMEMO encrypted message carbons.
*
* @param listener OmemoMessageListener
*/
public void addOmemoMessageListener(OmemoMessageListener listener) {
omemoMessageListeners.add(listener);
}
/**
* Remove an OmemoMessageListener.
*
* @param listener OmemoMessageListener
*/
public void removeOmemoMessageListener(OmemoMessageListener listener) {
omemoMessageListeners.remove(listener);
}
/**
* Add an OmemoMucMessageListener. This listener will be informed about incoming OMEMO encrypted MUC messages.
*
* @param listener OmemoMessageListener.
*/
public void addOmemoMucMessageListener(OmemoMucMessageListener listener) {
omemoMucMessageListeners.add(listener);
}
/**
* Remove an OmemoMucMessageListener.
*
* @param listener OmemoMucMessageListener
*/
public void removeOmemoMucMessageListener(OmemoMucMessageListener listener) {
omemoMucMessageListeners.remove(listener);
}
/**
* Request a deviceList update from contact contact.
*
* @param contact contact we want to obtain the deviceList from.
*
* @throws InterruptedException if the calling thread was interrupted.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
*/
public synchronized void requestDeviceListUpdateFor(BareJid contact)
throws InterruptedException, PubSubException.NotALeafNodeException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException {
getOmemoService().refreshDeviceList(connection(), getOwnDevice(), contact);
}
/**
* Publish a new device list with just our own deviceId in it.
*
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws InterruptedException if the calling thread was interrupted.
* @throws XMPPException.XMPPErrorException if there was an XMPP error returned.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws IOException if an I/O error occurred.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
*/
public void purgeDeviceList()
throws SmackException.NotLoggedInException, InterruptedException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, SmackException.NoResponseException, IOException, PubSubException.NotALeafNodeException {
getOmemoService().purgeDeviceList(new LoggedInOmemoManager(this));
}
public List<Exception> purgeEverything() throws NotConnectedException, InterruptedException, IOException {
List<Exception> exceptions = new ArrayList<>(5);
PubSubManager pm = PubSubManager.getInstanceFor(getConnection(), getOwnJid());
try {
requestDeviceListUpdateFor(getOwnJid());
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
OmemoCachedDeviceList deviceList = OmemoService.getInstance().getOmemoStoreBackend()
.loadCachedDeviceList(getOwnDevice(), getOwnJid());
for (int id : deviceList.getAllDevices()) {
try {
pm.getLeafNode(OmemoConstants.PEP_NODE_BUNDLE_FROM_DEVICE_ID(id)).deleteAllItems();
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException | PubSubException.NotAPubSubNodeException e) {
exceptions.add(e);
}
try {
pm.deleteNode(OmemoConstants.PEP_NODE_BUNDLE_FROM_DEVICE_ID(id));
} catch (SmackException.NoResponseException | XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
}
try {
pm.getLeafNode(OmemoConstants.PEP_NODE_DEVICE_LIST).deleteAllItems();
} catch (SmackException.NoResponseException | PubSubException.NotALeafNodeException
| XMPPException.XMPPErrorException | PubSubException.NotAPubSubNodeException e) {
exceptions.add(e);
}
try {
pm.deleteNode(OmemoConstants.PEP_NODE_DEVICE_LIST);
} catch (SmackException.NoResponseException | XMPPException.XMPPErrorException e) {
exceptions.add(e);
}
return exceptions;
}
/**
* Rotate the signedPreKey published in our OmemoBundle and republish it. This should be done every now and
* then (7-14 days). The old signedPreKey should be kept for some more time (a month or so) to enable decryption
* of messages that have been sent since the key was changed.
*
* @throws CorruptedOmemoKeyException When the IdentityKeyPair is damaged.
* @throws InterruptedException XMPP error
* @throws XMPPException.XMPPErrorException XMPP error
* @throws SmackException.NotConnectedException XMPP error
* @throws SmackException.NoResponseException XMPP error
* @throws SmackException.NotLoggedInException if the XMPP connection is not authenticated.
* @throws IOException if an I/O error occurred.
* @throws PubSubException.NotALeafNodeException if a PubSub leaf node operation was attempted on a non-leaf node.
*/
public synchronized void rotateSignedPreKey()
throws CorruptedOmemoKeyException, SmackException.NotLoggedInException, XMPPException.XMPPErrorException,
SmackException.NotConnectedException, InterruptedException, SmackException.NoResponseException,
IOException, PubSubException.NotALeafNodeException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
// generate key
getOmemoService().getOmemoStoreBackend().changeSignedPreKey(getOwnDevice());
// publish
OmemoBundleElement bundle = getOmemoService().getOmemoStoreBackend().packOmemoBundle(getOwnDevice());
OmemoService.publishBundle(connection(), getOwnDevice(), bundle);
}
/**
* Return true, if the given Stanza contains an OMEMO element 'encrypted'.
*
* @param stanza stanza
* @return true if stanza has extension 'encrypted'
*/
static boolean stanzaContainsOmemoElement(Stanza stanza) {
return stanza.hasExtension(OmemoElement.NAME_ENCRYPTED, OMEMO_NAMESPACE_V_AXOLOTL);
}
/**
* Throw an IllegalStateException if no OmemoService is set.
*/
private void throwIfNoServiceSet() {
if (service == null) {
throw new IllegalStateException("No OmemoService set in OmemoManager.");
}
}
/**
* Returns a pseudo random number from the interval [1, Integer.MAX_VALUE].
*
* @return a random deviceId.
*/
public static int randomDeviceId() {
return new Random().nextInt(Integer.MAX_VALUE - 1) + 1;
}
/**
* Return the BareJid of the user.
*
* @return our own bare JID.
*/
public BareJid getOwnJid() {
if (ownJid == null && connection().isAuthenticated()) {
ownJid = connection().getUser().asBareJid();
}
return ownJid;
}
/**
* Return the deviceId of this OmemoManager.
*
* @return this OmemoManagers deviceId.
*/
public synchronized Integer getDeviceId() {
return deviceId;
}
/**
* Return the OmemoDevice of the user.
*
* @return our own OmemoDevice
*/
public synchronized OmemoDevice getOwnDevice() {
BareJid jid = getOwnJid();
if (jid == null) {
return null;
}
return new OmemoDevice(jid, getDeviceId());
}
/**
* Set the deviceId of the manager to nDeviceId.
*
* @param nDeviceId new deviceId
*/
synchronized void setDeviceId(int nDeviceId) {
// Move this instance inside the HashMaps
INSTANCES.get(connection()).remove(getDeviceId());
INSTANCES.get(connection()).put(nDeviceId, this);
this.deviceId = nDeviceId;
}
/**
* Notify all registered OmemoMessageListeners about a received OmemoMessage.
*
* @param stanza original stanza
* @param decryptedMessage decrypted OmemoMessage.
*/
void notifyOmemoMessageReceived(Stanza stanza, OmemoMessage.Received decryptedMessage) {
for (OmemoMessageListener l : omemoMessageListeners) {
l.onOmemoMessageReceived(stanza, decryptedMessage);
}
}
/**
* Notify all registered OmemoMucMessageListeners of an incoming OmemoMessageElement in a MUC.
*
* @param muc MultiUserChat the message was received in.
* @param stanza Original Stanza.
* @param decryptedMessage Decrypted OmemoMessage.
*/
void notifyOmemoMucMessageReceived(MultiUserChat muc,
Stanza stanza,
OmemoMessage.Received decryptedMessage) {
for (OmemoMucMessageListener l : omemoMucMessageListeners) {
l.onOmemoMucMessageReceived(muc, stanza, decryptedMessage);
}
}
/**
* Notify all registered OmemoMessageListeners of an incoming OMEMO encrypted Carbon Copy.
* Remember: If you want to receive OMEMO encrypted carbon copies, you have to enable carbons using
* {@link CarbonManager#enableCarbons()}.
*
* @param direction direction of the carbon copy
* @param carbonCopy carbon copy itself
* @param wrappingMessage wrapping message
* @param decryptedCarbonCopy decrypted carbon copy OMEMO element
*/
void notifyOmemoCarbonCopyReceived(CarbonExtension.Direction direction,
Message carbonCopy,
Message wrappingMessage,
OmemoMessage.Received decryptedCarbonCopy) {
for (OmemoMessageListener l : omemoMessageListeners) {
l.onOmemoCarbonCopyReceived(direction, carbonCopy, wrappingMessage, decryptedCarbonCopy);
}
}
/**
* Register stanza listeners needed for OMEMO.
* This method is called automatically in the constructor and should only be used to restore the previous state
* after {@link #stopStanzaAndPEPListeners()} was called.
*/
public void resumeStanzaAndPEPListeners() {
CarbonManager carbonManager = CarbonManager.getInstanceFor(connection());
// Remove listeners to avoid them getting added twice
connection().removeAsyncStanzaListener(this::internalOmemoMessageStanzaListener);
carbonManager.removeCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
// Add listeners
pepManager.addPepEventListener(OmemoConstants.PEP_NODE_DEVICE_LIST, OmemoDeviceListElement.class, pepOmemoDeviceListEventListener);
connection().addAsyncStanzaListener(this::internalOmemoMessageStanzaListener, OmemoManager::isOmemoMessage);
carbonManager.addCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
}
/**
* Remove active stanza listeners needed for OMEMO.
*/
public void stopStanzaAndPEPListeners() {
pepManager.removePepEventListener(pepOmemoDeviceListEventListener);
connection().removeAsyncStanzaListener(this::internalOmemoMessageStanzaListener);
CarbonManager.getInstanceFor(connection()).removeCarbonCopyReceivedListener(this::internalOmemoCarbonCopyListener);
}
/**
* Build a fresh session with a contacts device.
* This might come in handy if a session is broken.
*
* @param contactsDevice OmemoDevice of a contact.
*
* @throws InterruptedException if the calling thread was interrupted.
* @throws SmackException.NoResponseException if there was no response from the remote entity.
* @throws CorruptedOmemoKeyException if our or their identityKey is corrupted.
* @throws SmackException.NotConnectedException if the XMPP connection is not connected.
* @throws CannotEstablishOmemoSessionException if no new session can be established.
* @throws SmackException.NotLoggedInException if the connection is not authenticated.
*/
public void rebuildSessionWith(OmemoDevice contactsDevice)
throws InterruptedException, SmackException.NoResponseException, CorruptedOmemoKeyException,
SmackException.NotConnectedException, CannotEstablishOmemoSessionException,
SmackException.NotLoggedInException {
if (!connection().isAuthenticated()) {
throw new SmackException.NotLoggedInException();
}
getOmemoService().buildFreshSessionWithDevice(connection(), getOwnDevice(), contactsDevice);
}
/**
* Get our connection.
*
* @return the connection of this manager
*/
XMPPConnection getConnection() {
return connection();
}
/**
* Return the OMEMO service object.
*
* @return the OmemoService object related to this OmemoManager.
*/
OmemoService<?, ?, ?, ?, ?, ?, ?, ?, ?> getOmemoService() {
throwIfNoServiceSet();
return service;
}
/**
* StanzaListener that listens for incoming Stanzas which contain OMEMO elements.
*/
private void internalOmemoMessageStanzaListener(final Stanza packet) {
Async.go(new Runnable() {
@Override
public void run() {
try {
getOmemoService().onOmemoMessageStanzaReceived(packet,
new LoggedInOmemoManager(OmemoManager.this));
} catch (SmackException.NotLoggedInException | IOException e) {
LOGGER.log(Level.SEVERE, "Exception while processing OMEMO stanza", e);
}
}
});
}
/**
* CarbonCopyListener that listens for incoming carbon copies which contain OMEMO elements.
*/
private void internalOmemoCarbonCopyListener(final CarbonExtension.Direction direction,
final Message carbonCopy,
final Message wrappingMessage) {
Async.go(new Runnable() {
@Override
public void run() {
if (isOmemoMessage(carbonCopy)) {
try {
getOmemoService().onOmemoCarbonCopyReceived(direction, carbonCopy, wrappingMessage,
new LoggedInOmemoManager(OmemoManager.this));
} catch (SmackException.NotLoggedInException | IOException e) {
LOGGER.log(Level.SEVERE, "Exception while processing OMEMO stanza", e);
}
}
}
});
}
@SuppressWarnings("UnnecessaryLambda")
private final PepEventListener<OmemoDeviceListElement> pepOmemoDeviceListEventListener =
(from, receivedDeviceList, id, message) -> {
// Device List <list>
OmemoCachedDeviceList deviceList;
try {
getOmemoService().getOmemoStoreBackend().mergeCachedDeviceList(getOwnDevice(), from,
receivedDeviceList);
if (!from.asBareJid().equals(getOwnJid())) {
return;
}
deviceList = getOmemoService().cleanUpDeviceList(getOwnDevice());
} catch (IOException e) {
LOGGER.log(Level.SEVERE,
"IOException while processing OMEMO PEP device updates. Message: " + message,
e);
return;
}
final OmemoDeviceListElement_VAxolotl newDeviceList = new OmemoDeviceListElement_VAxolotl(deviceList);
if (!newDeviceList.copyDeviceIds().equals(receivedDeviceList.copyDeviceIds())) {
LOGGER.log(Level.FINE, "Republish deviceList due to changes:" +
" Received: " + Arrays.toString(receivedDeviceList.copyDeviceIds().toArray()) +
" Published: " + Arrays.toString(newDeviceList.copyDeviceIds().toArray()));
Async.go(new Runnable() {
@Override
public void run() {
try {
OmemoService.publishDeviceList(connection(), newDeviceList);
} catch (InterruptedException | XMPPException.XMPPErrorException |
SmackException.NotConnectedException | SmackException.NoResponseException | PubSubException.NotALeafNodeException e) {
LOGGER.log(Level.WARNING, "Could not publish our deviceList upon an received update.", e);
}
}
});
}
};
/**
* StanzaFilter that filters messages containing a OMEMO element.
*/
private static boolean isOmemoMessage(Stanza stanza) {
return stanza instanceof Message && OmemoManager.stanzaContainsOmemoElement(stanza);
}
/**
* Guard class which ensures that the wrapped OmemoManager knows its BareJid.
*/
public static class LoggedInOmemoManager {
private final OmemoManager manager;
public LoggedInOmemoManager(OmemoManager manager)
throws SmackException.NotLoggedInException {
if (manager == null) {
throw new IllegalArgumentException("OmemoManager cannot be null.");
}
if (manager.getOwnJid() == null) {
if (manager.getConnection().isAuthenticated()) {
manager.ownJid = manager.getConnection().getUser().asBareJid();
} else {
throw new SmackException.NotLoggedInException();
}
}
this.manager = manager;
}
public OmemoManager get() {
return manager;
}
}
/**
* Callback which can be used to get notified, when the OmemoManager finished initializing.
*/
public interface InitializationFinishedCallback {
void initializationFinished(OmemoManager manager);
void initializationFailed(Exception cause);
}
/**
* Get the bareJid of the user from the authenticated XMPP connection.
* If our deviceId is unknown, use the bareJid to look up deviceIds available in the omemoStore.
* If there are ids available, choose the smallest one. Otherwise generate a random deviceId.
*
* @param manager OmemoManager
*/
private static void initBareJidAndDeviceId(OmemoManager manager) {
if (!manager.getConnection().isAuthenticated()) {
throw new IllegalStateException("Connection MUST be authenticated.");
}
if (manager.ownJid == null) {
manager.ownJid = manager.getConnection().getUser().asBareJid();
}
if (UNKNOWN_DEVICE_ID.equals(manager.deviceId)) {
SortedSet<Integer> storedDeviceIds = manager.getOmemoService().getOmemoStoreBackend().localDeviceIdsOf(manager.ownJid);
if (storedDeviceIds.size() > 0) {
manager.setDeviceId(storedDeviceIds.first());
} else {
manager.setDeviceId(randomDeviceId());
}
}
}
}
| Java |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.support.vectordrawable.app;
import android.os.Bundle;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.appcompat.app.AppCompatActivity;
import androidx.vectordrawable.graphics.drawable.SeekableAnimatedVectorDrawable;
import com.example.android.support.vectordrawable.R;
/**
* Demonstrates usage of {@link SeekableAnimatedVectorDrawable}.
*/
public class SeekableDemo extends AppCompatActivity {
@Override
protected void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.seekable_demo);
final ImageView image = findViewById(R.id.image);
final Button start = findViewById(R.id.start);
final Button stop = findViewById(R.id.stop);
final SeekBar seekBar = findViewById(R.id.seek);
final SeekableAnimatedVectorDrawable avd =
SeekableAnimatedVectorDrawable.create(this, R.drawable.ic_hourglass_animation);
if (avd == null) {
finish();
return;
}
avd.registerAnimationCallback(new SeekableAnimatedVectorDrawable.AnimationCallback() {
@Override
public void onAnimationStart(@NonNull SeekableAnimatedVectorDrawable drawable) {
onAnimationRunning();
}
@Override
public void onAnimationEnd(@NonNull SeekableAnimatedVectorDrawable drawable) {
start.setEnabled(true);
start.setText(R.string.start);
stop.setEnabled(false);
seekBar.setProgress(0);
}
@Override
public void onAnimationPause(@NonNull SeekableAnimatedVectorDrawable drawable) {
start.setEnabled(true);
start.setText(R.string.resume);
stop.setEnabled(true);
}
@Override
public void onAnimationResume(@NonNull SeekableAnimatedVectorDrawable drawable) {
onAnimationRunning();
}
private void onAnimationRunning() {
start.setEnabled(true);
start.setText(R.string.pause);
stop.setEnabled(true);
}
@Override
public void onAnimationUpdate(@NonNull SeekableAnimatedVectorDrawable drawable) {
seekBar.setProgress((int) drawable.getCurrentPlayTime());
}
});
image.setImageDrawable(avd);
seekBar.setMax((int) avd.getTotalDuration());
start.setOnClickListener((v) -> {
if (!avd.isRunning()) {
avd.start();
} else if (!avd.isPaused()) {
avd.pause();
} else {
avd.resume();
}
});
stop.setOnClickListener((v) -> avd.stop());
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
if (fromUser) {
avd.setCurrentPlayTime(progress);
}
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
}
});
}
}
| Java |
// (C) Copyright 2015 Moodle Pty Ltd.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
import { NgModule } from '@angular/core';
import { IonicPageModule } from 'ionic-angular';
import { TranslateModule } from '@ngx-translate/core';
import { CoreComponentsModule } from '@components/components.module';
import { CoreDirectivesModule } from '@directives/directives.module';
import { CorePipesModule } from '@pipes/pipes.module';
import { AddonModChatComponentsModule } from '../../components/components.module';
import { AddonModChatSessionsPage } from './sessions';
@NgModule({
declarations: [
AddonModChatSessionsPage,
],
imports: [
CoreComponentsModule,
CoreDirectivesModule,
CorePipesModule,
AddonModChatComponentsModule,
IonicPageModule.forChild(AddonModChatSessionsPage),
TranslateModule.forChild()
],
})
export class AddonModChatSessionsPageModule {}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.harmony.sql.tests.javax.sql.rowset.serial;
import javax.sql.rowset.serial.SerialException;
import junit.framework.TestCase;
import org.apache.harmony.testframework.serialization.SerializationTest;
public class SerialExceptionTest extends TestCase {
/**
* @tests serialization/deserialization compatibility.
*/
public void testSerializationSelf() throws Exception {
SerializationTest.verifySelf(new SerialException());
}
/**
* @tests serialization/deserialization compatibility with RI.
*/
public void testSerializationCompatibility() throws Exception {
SerializationTest.verifyGolden(this, new SerialException());
}
}
| Java |
/*
* Copyright 2020 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.thoughtworks.go.domain.materials.scm;
import com.thoughtworks.go.config.materials.PluggableSCMMaterial;
import com.thoughtworks.go.domain.MaterialRevision;
import com.thoughtworks.go.domain.config.Configuration;
import com.thoughtworks.go.domain.config.ConfigurationProperty;
import com.thoughtworks.go.domain.materials.MaterialAgent;
import com.thoughtworks.go.domain.materials.Modification;
import com.thoughtworks.go.domain.scm.SCM;
import com.thoughtworks.go.plugin.access.scm.SCMExtension;
import com.thoughtworks.go.plugin.access.scm.SCMProperty;
import com.thoughtworks.go.plugin.access.scm.SCMPropertyConfiguration;
import com.thoughtworks.go.plugin.access.scm.revision.SCMRevision;
import com.thoughtworks.go.plugin.api.response.Result;
import com.thoughtworks.go.util.command.ConsoleOutputStreamConsumer;
import org.apache.commons.lang3.StringUtils;
import java.io.File;
import static com.thoughtworks.go.util.command.TaggedStreamConsumer.PREP_ERR;
public class PluggableSCMMaterialAgent implements MaterialAgent {
private SCMExtension scmExtension;
private MaterialRevision revision;
private File workingDirectory;
private final ConsoleOutputStreamConsumer consumer;
public PluggableSCMMaterialAgent(SCMExtension scmExtension,
MaterialRevision revision,
File workingDirectory,
ConsoleOutputStreamConsumer consumer) {
this.scmExtension = scmExtension;
this.revision = revision;
this.workingDirectory = workingDirectory;
this.consumer = consumer;
}
@Override
public void prepare() {
try {
PluggableSCMMaterial material = (PluggableSCMMaterial) revision.getMaterial();
Modification latestModification = revision.getLatestModification();
SCMRevision scmRevision = new SCMRevision(latestModification.getRevision(), latestModification.getModifiedTime(), null, null, latestModification.getAdditionalDataMap(), null);
File destinationFolder = material.workingDirectory(workingDirectory);
Result result = scmExtension.checkout(material.getScmConfig().getPluginConfiguration().getId(), buildSCMPropertyConfigurations(material.getScmConfig()), destinationFolder.getAbsolutePath(), scmRevision);
handleCheckoutResult(material, result);
} catch (Exception e) {
consumer.taggedErrOutput(PREP_ERR, String.format("Material %s checkout failed: %s", revision.getMaterial().getDisplayName(), e.getMessage()));
throw e;
}
}
private void handleCheckoutResult(PluggableSCMMaterial material, Result result) {
if (result.isSuccessful()) {
if (StringUtils.isNotBlank(result.getMessagesForDisplay())) {
consumer.stdOutput(result.getMessagesForDisplay());
}
} else {
consumer.taggedErrOutput(PREP_ERR, String.format("Material %s checkout failed: %s", material.getDisplayName(), result.getMessagesForDisplay()));
throw new RuntimeException(String.format("Material %s checkout failed: %s", material.getDisplayName(), result.getMessagesForDisplay()));
}
}
private SCMPropertyConfiguration buildSCMPropertyConfigurations(SCM scmConfig) {
SCMPropertyConfiguration scmPropertyConfiguration = new SCMPropertyConfiguration();
populateConfiguration(scmConfig.getConfiguration(), scmPropertyConfiguration);
return scmPropertyConfiguration;
}
private void populateConfiguration(Configuration configuration,
com.thoughtworks.go.plugin.api.config.Configuration pluginConfiguration) {
for (ConfigurationProperty configurationProperty : configuration) {
pluginConfiguration.add(new SCMProperty(configurationProperty.getConfigurationKey().getName(), configurationProperty.getValue()));
}
}
}
| Java |
/**
* Copyright (c) 2010 Yahoo! Inc. All rights reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.oozie.service;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.oozie.CoordinatorActionBean;
import org.apache.oozie.ErrorCode;
import org.apache.oozie.WorkflowActionBean;
import org.apache.oozie.command.CommandException;
import org.apache.oozie.command.coord.CoordActionCheckCommand;
import org.apache.oozie.command.coord.CoordActionCheckXCommand;
import org.apache.oozie.command.wf.ActionCheckCommand;
import org.apache.oozie.command.wf.ActionCheckXCommand;
import org.apache.oozie.executor.jpa.CoordActionsRunningGetJPAExecutor;
import org.apache.oozie.executor.jpa.JPAExecutorException;
import org.apache.oozie.executor.jpa.WorkflowActionsRunningGetJPAExecutor;
import org.apache.oozie.util.XCallable;
import org.apache.oozie.util.XLog;
/**
* The Action Checker Service queue ActionCheckCommands to check the status of
* running actions and CoordActionCheckCommands to check the status of
* coordinator actions. The delay between checks on the same action can be
* configured.
*/
public class ActionCheckerService implements Service {
public static final String CONF_PREFIX = Service.CONF_PREFIX + "ActionCheckerService.";
/**
* The frequency at which the ActionCheckService will run.
*/
public static final String CONF_ACTION_CHECK_INTERVAL = CONF_PREFIX + "action.check.interval";
/**
* The time, in seconds, between an ActionCheck for the same action.
*/
public static final String CONF_ACTION_CHECK_DELAY = CONF_PREFIX + "action.check.delay";
/**
* The number of callables to be queued in a batch.
*/
public static final String CONF_CALLABLE_BATCH_SIZE = CONF_PREFIX + "callable.batch.size";
protected static final String INSTRUMENTATION_GROUP = "actionchecker";
protected static final String INSTR_CHECK_ACTIONS_COUNTER = "checks_wf_actions";
protected static final String INSTR_CHECK_COORD_ACTIONS_COUNTER = "checks_coord_actions";
private static boolean useXCommand = true;
/**
* {@link ActionCheckRunnable} is the runnable which is scheduled to run and
* queue Action checks.
*/
static class ActionCheckRunnable implements Runnable {
private int actionCheckDelay;
private List<XCallable<Void>> callables;
private StringBuilder msg = null;
public ActionCheckRunnable(int actionCheckDelay) {
this.actionCheckDelay = actionCheckDelay;
}
public void run() {
XLog.Info.get().clear();
XLog LOG = XLog.getLog(getClass());
msg = new StringBuilder();
try {
runWFActionCheck();
runCoordActionCheck();
}
catch (CommandException ce) {
LOG.error("Unable to run action checks, ", ce);
}
LOG.debug("QUEUING [{0}] for potential checking", msg.toString());
if (null != callables) {
boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables);
if (ret == false) {
LOG.warn("Unable to queue the callables commands for CheckerService. "
+ "Most possibly command queue is full. Queue size is :"
+ Services.get().get(CallableQueueService.class).queueSize());
}
callables = null;
}
}
/**
* check workflow actions
*
* @throws CommandException
*/
private void runWFActionCheck() throws CommandException {
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService == null) {
throw new CommandException(ErrorCode.E0610);
}
List<WorkflowActionBean> actions;
try {
actions = jpaService
.execute(new WorkflowActionsRunningGetJPAExecutor(actionCheckDelay));
}
catch (JPAExecutorException je) {
throw new CommandException(je);
}
if (actions == null || actions.size() == 0) {
return;
}
msg.append(" WF_ACTIONS : " + actions.size());
for (WorkflowActionBean action : actions) {
Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP,
INSTR_CHECK_ACTIONS_COUNTER, 1);
if (useXCommand) {
queueCallable(new ActionCheckXCommand(action.getId()));
}
else {
queueCallable(new ActionCheckCommand(action.getId()));
}
}
}
/**
* check coordinator actions
*
* @throws CommandException
*/
private void runCoordActionCheck() throws CommandException {
JPAService jpaService = Services.get().get(JPAService.class);
if (jpaService == null) {
throw new CommandException(ErrorCode.E0610);
}
List<CoordinatorActionBean> cactions;
try {
cactions = jpaService.execute(new CoordActionsRunningGetJPAExecutor(
actionCheckDelay));
}
catch (JPAExecutorException je) {
throw new CommandException(je);
}
if (cactions == null || cactions.size() == 0) {
return;
}
msg.append(" COORD_ACTIONS : " + cactions.size());
for (CoordinatorActionBean caction : cactions) {
Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP,
INSTR_CHECK_COORD_ACTIONS_COUNTER, 1);
if (useXCommand) {
queueCallable(new CoordActionCheckXCommand(caction.getId(), actionCheckDelay));
}
else {
queueCallable(new CoordActionCheckCommand(caction.getId(), actionCheckDelay));
}
}
}
/**
* Adds callables to a list. If the number of callables in the list
* reaches {@link ActionCheckerService#CONF_CALLABLE_BATCH_SIZE}, the
* entire batch is queued and the callables list is reset.
*
* @param callable the callable to queue.
*/
private void queueCallable(XCallable<Void> callable) {
if (callables == null) {
callables = new ArrayList<XCallable<Void>>();
}
callables.add(callable);
if (callables.size() == Services.get().getConf().getInt(CONF_CALLABLE_BATCH_SIZE, 10)) {
boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables);
if (ret == false) {
XLog.getLog(getClass()).warn(
"Unable to queue the callables commands for CheckerService. "
+ "Most possibly command queue is full. Queue size is :"
+ Services.get().get(CallableQueueService.class).queueSize());
}
callables = new ArrayList<XCallable<Void>>();
}
}
}
/**
* Initializes the Action Check service.
*
* @param services services instance.
*/
@Override
public void init(Services services) {
Configuration conf = services.getConf();
Runnable actionCheckRunnable = new ActionCheckRunnable(conf.getInt(CONF_ACTION_CHECK_DELAY, 600));
services.get(SchedulerService.class).schedule(actionCheckRunnable, 10,
conf.getInt(CONF_ACTION_CHECK_INTERVAL, 60), SchedulerService.Unit.SEC);
if (Services.get().getConf().getBoolean(USE_XCOMMAND, true) == false) {
useXCommand = false;
}
}
/**
* Destroy the Action Checker Services.
*/
@Override
public void destroy() {
}
/**
* Return the public interface for the action checker service.
*
* @return {@link ActionCheckerService}.
*/
@Override
public Class<? extends Service> getInterface() {
return ActionCheckerService.class;
}
}
| Java |
package com.google.api.ads.dfp.jaxws.v201508;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlSchemaType;
import javax.xml.bind.annotation.XmlType;
/**
*
* The content partner related validation errors.
*
*
* <p>Java class for ContentPartnerError complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="ContentPartnerError">
* <complexContent>
* <extension base="{https://www.google.com/apis/ads/publisher/v201508}ApiError">
* <sequence>
* <element name="reason" type="{https://www.google.com/apis/ads/publisher/v201508}ContentPartnerError.Reason" minOccurs="0"/>
* </sequence>
* </extension>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "ContentPartnerError", propOrder = {
"reason"
})
public class ContentPartnerError
extends ApiError
{
@XmlSchemaType(name = "string")
protected ContentPartnerErrorReason reason;
/**
* Gets the value of the reason property.
*
* @return
* possible object is
* {@link ContentPartnerErrorReason }
*
*/
public ContentPartnerErrorReason getReason() {
return reason;
}
/**
* Sets the value of the reason property.
*
* @param value
* allowed object is
* {@link ContentPartnerErrorReason }
*
*/
public void setReason(ContentPartnerErrorReason value) {
this.reason = value;
}
}
| Java |
# Copyright 2015, Google, Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from getting_started import main
def test_main(cloud_config, capsys):
main(cloud_config.project)
out, _ = capsys.readouterr()
assert re.search(re.compile(
r'Query Results:.hamlet', re.DOTALL), out)
| Java |
<?php namespace Neomerx\JsonApi\Parameters\Headers;
/**
* Copyright 2015 [email protected] (www.neomerx.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
use \InvalidArgumentException;
use \Neomerx\JsonApi\Contracts\Parameters\Headers\MediaTypeInterface;
/**
* @package Neomerx\JsonApi
*/
class MediaType implements MediaTypeInterface
{
/**
* @var string
*/
private $type;
/**
* @var string
*/
private $subType;
/**
* @var string
*/
private $mediaType;
/**
* @var array<string,string>|null
*/
private $parameters;
/**
* @param string $type
* @param string $subType
* @param array<string,string>|null $parameters
*/
public function __construct($type, $subType, $parameters = null)
{
$type = trim($type);
if (empty($type) === true) {
throw new InvalidArgumentException('type');
}
$subType = trim($subType);
if (empty($subType) === true) {
throw new InvalidArgumentException('subType');
}
if ($parameters !== null && is_array($parameters) === false) {
throw new InvalidArgumentException('parameters');
}
$this->type = $type;
$this->subType = $subType;
$this->mediaType = $type . '/' . $subType;
$this->parameters = $parameters;
}
/**
* @inheritdoc
*/
public function getType()
{
return $this->type;
}
/**
* @inheritdoc
*/
public function getSubType()
{
return $this->subType;
}
/**
* @inheritdoc
*/
public function getMediaType()
{
return $this->mediaType;
}
/**
* @inheritdoc
*/
public function getParameters()
{
return $this->parameters;
}
/**
* @inheritdoc
*/
public function matchesTo(MediaTypeInterface $mediaType)
{
return
$this->isTypeMatches($mediaType) &&
$this->isSubTypeMatches($mediaType) &&
$this->isMediaParametersEqual($mediaType);
}
/**
* @inheritdoc
*/
public function equalsTo(MediaTypeInterface $mediaType)
{
return
$this->isTypeEquals($mediaType) &&
$this->isSubTypeEquals($mediaType) &&
$this->isMediaParametersEqual($mediaType);
}
/**
* Parse media type.
*
* @param int $position
* @param string $mediaType
*
* @return MediaType
*/
public static function parse($position, $mediaType)
{
$position ?: null;
$fields = explode(';', $mediaType);
if (strpos($fields[0], '/') === false) {
throw new InvalidArgumentException('mediaType');
}
list($type, $subType) = explode('/', $fields[0], 2);
$parameters = null;
$count = count($fields);
for ($idx = 1; $idx < $count; ++$idx) {
if (strpos($fields[$idx], '=') === false) {
throw new InvalidArgumentException('mediaType');
}
list($key, $value) = explode('=', $fields[$idx], 2);
$parameters[trim($key)] = trim($value, ' "');
}
return new MediaType($type, $subType, $parameters);
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isTypeMatches(MediaTypeInterface $mediaType)
{
return $this->getType() === $mediaType->getType() || $mediaType->getType() === '*';
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isTypeEquals(MediaTypeInterface $mediaType)
{
return $this->getType() === $mediaType->getType();
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isSubTypeMatches(MediaTypeInterface $mediaType)
{
return $this->getSubType() === $mediaType->getSubType() || $mediaType->getSubType() === '*';
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isSubTypeEquals(MediaTypeInterface $mediaType)
{
return $this->getSubType() === $mediaType->getSubType();
}
/**
* @param MediaTypeInterface $mediaType
*
* @return bool
*/
private function isMediaParametersEqual(MediaTypeInterface $mediaType)
{
if ($this->getParameters() === null && $mediaType->getParameters() === null) {
return true;
} elseif ($this->getParameters() !== null && $mediaType->getParameters() !== null) {
$intersect = array_intersect($this->getParameters(), $mediaType->getParameters());
return (count($this->getParameters()) === count($intersect));
}
return false;
}
}
| Java |
---
layout: default
description: Sequential Access and Cursors
---
Sequential Access and Cursors
=============================
If a query returns a cursor (for example by calling `db._query(...)`), then you can use *hasNext* and *next* to
iterate over the result set or *toArray* to convert it to an array.
If the number of query results is expected to be big, it is possible to
limit the amount of documents transferred between the server and the client
to a specific value. This value is called *batchSize*. The *batchSize*
can optionally be set before or when a simple query is executed.
If the server has more documents than should be returned in a single batch,
the server will set the *hasMore* attribute in the result. It will also
return the id of the server-side cursor in the *id* attribute in the result.
This id can be used with the cursor API to fetch any outstanding results from
the server and dispose the server-side cursor afterwards.
The initial *batchSize* value can be set using the *setBatchSize*
method that is available for each type of simple query, or when the simple
query is executed using its *execute* method. If no *batchSize* value
is specified, the server will pick a reasonable default value.
Has Next
--------
<!-- js/common/modules/@arangodb/simple-query-common.js -->
checks if the cursor is exhausted
`cursor.hasNext()`
The *hasNext* operator returns *true*, then the cursor still has
documents. In this case the next document can be accessed using the
*next* operator, which will advance the cursor.
**Examples**
{% arangoshexample examplevar="examplevar" script="script" result="result" %}
@startDocuBlockInline cursorHasNext
@EXAMPLE_ARANGOSH_OUTPUT{cursorHasNext}
~ db._create("five");
~ db.five.save({ name : "one" });
~ db.five.save({ name : "two" });
~ db.five.save({ name : "three" });
~ db.five.save({ name : "four" });
~ db.five.save({ name : "five" });
var a = db._query("FOR x IN five RETURN x");
while (a.hasNext()) print(a.next());
~ db._drop("five")
@END_EXAMPLE_ARANGOSH_OUTPUT
@endDocuBlock cursorHasNext
{% endarangoshexample %}
{% include arangoshexample.html id=examplevar script=script result=result %}
Next
----
<!-- js/common/modules/@arangodb/simple-query-common.js -->
returns the next result document
`cursor.next()`
If the *hasNext* operator returns *true*, then the underlying
cursor of the simple query still has documents. In this case the
next document can be accessed using the *next* operator, which
will advance the underlying cursor. If you use *next* on an
exhausted cursor, then *undefined* is returned.
**Examples**
{% arangoshexample examplevar="examplevar" script="script" result="result" %}
@startDocuBlockInline cursorNext
@EXAMPLE_ARANGOSH_OUTPUT{cursorNext}
~ db._create("five");
~ db.five.save({ name : "one" });
~ db.five.save({ name : "two" });
~ db.five.save({ name : "three" });
~ db.five.save({ name : "four" });
~ db.five.save({ name : "five" });
db._query("FOR x IN five RETURN x").next();
~ db._drop("five")
@END_EXAMPLE_ARANGOSH_OUTPUT
@endDocuBlock cursorNext
{% endarangoshexample %}
{% include arangoshexample.html id=examplevar script=script result=result %}
Set Batch size
--------------
<!-- js/common/modules/@arangodb/simple-query-common.js -->
sets the batch size for any following requests
`cursor.setBatchSize(number)`
Sets the batch size for queries. The batch size determines how many results
are at most transferred from the server to the client in one chunk.
Get Batch size
--------------
<!-- js/common/modules/@arangodb/simple-query-common.js -->
returns the batch size
`cursor.getBatchSize()`
Returns the batch size for queries. If the returned value is undefined, the
server will determine a sensible batch size for any following requests.
Execute Query
-------------
<!-- js/common/modules/@arangodb/simple-query-common.js -->
executes a query
`query.execute(batchSize)`
Executes a simple query. If the optional batchSize value is specified,
the server will return at most batchSize values in one roundtrip.
The batchSize cannot be adjusted after the query is first executed.
**Note**: There is no need to explicitly call the execute method if another
means of fetching the query results is chosen. The following two approaches
lead to the same result:
{% arangoshexample examplevar="examplevar" script="script" result="result" %}
@startDocuBlockInline executeQueryNoBatchSize
@EXAMPLE_ARANGOSH_OUTPUT{executeQueryNoBatchSize}
~ db._create("users");
~ db.users.save({ name: "Gerhard" });
~ db.users.save({ name: "Helmut" });
~ db.users.save({ name: "Angela" });
result = db.users.all().toArray();
| var q = db._query("FOR x IN users RETURN x");
| result = [ ];
| while (q.hasNext()) {
| result.push(q.next());
}
~ db._drop("users")
@END_EXAMPLE_ARANGOSH_OUTPUT
@endDocuBlock executeQueryNoBatchSize
{% endarangoshexample %}
{% include arangoshexample.html id=examplevar script=script result=result %}
The following two alternatives both use a batchSize and return the same
result:
{% arangoshexample examplevar="examplevar" script="script" result="result" %}
@startDocuBlockInline executeQueryBatchSize
@EXAMPLE_ARANGOSH_OUTPUT{executeQueryBatchSize}
~ db._create("users");
~ db.users.save({ name: "Gerhard" });
~ db.users.save({ name: "Helmut" });
~ db.users.save({ name: "Angela" });
q = db.users.all(); q.setBatchSize(20); q.execute(); while (q.hasNext()) { print(q.next()); }
q = db.users.all(); q.execute(20); while (q.hasNext()) { print(q.next()); }
~ db._drop("users")
@END_EXAMPLE_ARANGOSH_OUTPUT
@endDocuBlock executeQueryBatchSize
{% endarangoshexample %}
{% include arangoshexample.html id=examplevar script=script result=result %}
Dispose
-------
<!-- js/common/modules/@arangodb/simple-query-common.js -->
disposes the result
`cursor.dispose()`
If you are no longer interested in any further results, you should call
*dispose* in order to free any resources associated with the cursor.
After calling *dispose* you can no longer access the cursor.
Count
-----
<!-- js/common/modules/@arangodb/simple-query-common.js -->
counts the number of documents
`cursor.count()`
The *count* operator counts the number of document in the result set and
returns that number. The *count* operator ignores any limits and returns
the total number of documents found.
**Note**: Not all simple queries support counting. In this case *null* is
returned (Simple queries are deprecated).
`cursor.count(true)`
If the result set was limited by the *limit* operator or documents were
skiped using the *skip* operator, the *count* operator with argument
*true* will use the number of elements in the final result set - after
applying *limit* and *skip*.
**Note**: Not all simple queries support counting. In this case *null* is
returned (Simple queries are deprecated)..
| Java |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"Example code to perform int8 GEMM"
import logging
import sys
import numpy as np
import tvm
from tvm import te
from tvm import autotvm
from tvm.topi.cuda.tensor_intrin import dp4a
DO_TUNING = True
PRETUNED_INDEX = 75333
intrin_dp4a = dp4a("local", "local", "local")
@autotvm.template
def gemm_int8(n, m, l):
A = te.placeholder((n, l), name="A", dtype="int8")
B = te.placeholder((m, l), name="B", dtype="int8")
k = te.reduce_axis((0, l), name="k")
C = te.compute(
(n, m),
lambda i, j: te.sum(A[i, k].astype("int32") * B[j, k].astype("int32"), axis=k),
name="C",
)
cfg = autotvm.get_config()
s = te.create_schedule(C.op)
y, x = C.op.axis
AA = s.cache_read(A, "shared", [C])
BB = s.cache_read(B, "shared", [C])
AL = s.cache_read(AA, "local", [C])
BL = s.cache_read(BB, "local", [C])
CC = s.cache_write(C, "local")
k = CC.op.reduce_axis[0]
cfg.define_split(
"tile_k",
cfg.axis(k),
num_outputs=3,
filter=lambda entity: entity.size[2] == 4 and entity.size[0] * 2 >= entity.size[1],
)
ko, kt, ki = cfg["tile_k"].apply(s, CC, k)
s[CC].tensorize(ki, intrin_dp4a)
block_x = te.thread_axis("blockIdx.x")
block_y = te.thread_axis("blockIdx.y")
thread_x = te.thread_axis("threadIdx.x")
thread_y = te.thread_axis("threadIdx.y")
def block_size_filter(entity):
return (
entity.size[0] * 2 >= entity.size[1] * 2
and entity.size[1] <= 16
and entity.size[3] <= 4
)
cfg.define_split("tile_y", cfg.axis(y), num_outputs=4, filter=block_size_filter)
cfg.define_split("tile_x", cfg.axis(x), num_outputs=4, filter=block_size_filter)
by, tyz, ty, yi = cfg["tile_y"].apply(s, C, y)
bx, txz, tx, xi = cfg["tile_x"].apply(s, C, x)
s[C].bind(by, block_y)
s[C].bind(bx, block_x)
s[C].bind(tyz, te.thread_axis("vthread"))
s[C].bind(txz, te.thread_axis("vthread"))
s[C].bind(ty, thread_y)
s[C].bind(tx, thread_x)
s[C].reorder(by, bx, tyz, txz, ty, tx, yi, xi)
s[CC].compute_at(s[C], tx)
yo, xo = CC.op.axis
s[CC].reorder(ko, kt, yo, xo, ki)
s[CC].unroll(kt)
for stage in [AL, BL]:
s[stage].compute_at(s[CC], kt)
_, xi = s[stage].split(stage.op.axis[1], factor=4)
s[stage].vectorize(xi)
s[stage].double_buffer()
cfg.define_knob("storage_align", [16, 48])
for stage in [AA, BB]:
s[stage].storage_align(s[stage].op.axis[0], cfg["storage_align"].val, 0)
s[stage].compute_at(s[CC], ko)
fused = s[stage].fuse(*s[stage].op.axis)
ty, tx = s[stage].split(fused, nparts=cfg["tile_y"].size[2])
tx, xi = s[stage].split(tx, nparts=cfg["tile_x"].size[2])
_, xi = s[stage].split(xi, factor=16)
s[stage].bind(ty, thread_y)
s[stage].bind(tx, thread_x)
s[stage].vectorize(xi)
cfg.define_knob("auto_unroll_max_step", [512, 1500])
s[C].pragma(by, "auto_unroll_max_step", cfg["auto_unroll_max_step"].val)
s[C].pragma(by, "unroll_explicit", False)
cfg.add_flop(n * m * l * 2)
return s, [A, B, C]
if __name__ == "__main__":
N = 2048
n = m = l = N
logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
task = autotvm.task.create(gemm_int8, args=(n, m, l), target="cuda")
print(task.config_space)
measure_option = autotvm.measure_option(
builder=autotvm.LocalBuilder(),
runner=autotvm.LocalRunner(repeat=3, min_repeat_ms=100, timeout=4),
)
log_name = "gemm_int8.log"
if DO_TUNING:
tuner = autotvm.tuner.XGBTuner(task)
tuner.tune(
n_trial=1000,
measure_option=measure_option,
callbacks=[autotvm.callback.log_to_file(log_name)],
)
dispatch_context = autotvm.apply_history_best(log_name)
best_config = dispatch_context.query(task.target, task.workload)
print("\nBest config:")
print(best_config)
else:
config = task.config_space.get(PRETUNED_INDEX)
dispatch_context = autotvm.task.ApplyConfig(config)
print("Using pretuned config:")
print(config)
with dispatch_context:
with tvm.target.Target("cuda"):
s, arg_bufs = gemm_int8(n, m, l)
f = tvm.build(s, arg_bufs, "cuda", name="gemm_int8")
dev = tvm.device("cuda", 0)
a_np = np.random.randint(size=(n, l), low=-128, high=127, dtype="int8")
b_np = np.random.randint(size=(m, l), low=-128, high=127, dtype="int8")
a = tvm.nd.array(a_np, dev)
b = tvm.nd.array(b_np, dev)
c = tvm.nd.array(np.zeros((n, m), dtype="int32"), dev)
f(a, b, c)
tvm.testing.assert_allclose(
c.numpy(), np.dot(a_np.astype("int32"), b_np.T.astype("int32")), rtol=1e-5
)
num_ops = 2 * l * m * n
num_runs = 1000
timer_f = f.time_evaluator(f.entry_name, dev, number=num_runs)
t = timer_f(a, b, c).mean
GOPS = num_ops / (t * 1e3) / 1e6
print("average time cost of %d runs = %g ms, %g GOPS." % (num_runs, t * 1e3, GOPS))
| Java |
/**
* Copyright 2017 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.github.ambry.utils.Pair;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.TreeMap;
/**
* Hold the data structures needed by {@link BlobStoreStats} to serve requests. The class also exposes helper methods
* used to modify and access the stored data structures.
*/
class ScanResults {
// A NavigableMap that stores buckets for container valid data size. The key of the map is the end time of each
// bucket and the value is the corresponding valid data size map. For example, there are two buckets with end time
// t1 and t2. Bucket with end time t2 includes all events whose operation time is greater than or equal to t1 but
// strictly less than t2.
// Each bucket except for the very first one contains the delta in valid data size that occurred prior to the bucket
// end time. The very first bucket's end time is the forecast start time for containers and it contains the valid data
// size map at the forecast start time. The very first bucket is used as a base value, requested valid data size is
// computed by applying the deltas from appropriate buckets on the base value.
private final NavigableMap<Long, Map<String, Map<String, Long>>> containerBuckets = new TreeMap<>();
// A NavigableMap that stores buckets for log segment valid data size. The rest of the structure is similar
// to containerBuckets.
private final NavigableMap<Long, NavigableMap<String, Long>> logSegmentBuckets = new TreeMap<>();
final long containerForecastStartTimeMs;
final long containerLastBucketTimeMs;
final long containerForecastEndTimeMs;
final long logSegmentForecastStartTimeMs;
final long logSegmentLastBucketTimeMs;
final long logSegmentForecastEndTimeMs;
Offset scannedEndOffset = null;
/**
* Create the bucket data structures in advance based on the given scanStartTime and segmentScanTimeOffset.
*/
ScanResults(long startTimeInMs, long logSegmentForecastOffsetMs, int bucketCount, long bucketSpanInMs) {
long containerBucketTimeMs = startTimeInMs;
long logSegmentBucketTimeMs = startTimeInMs - logSegmentForecastOffsetMs;
for (int i = 0; i < bucketCount; i++) {
containerBuckets.put(containerBucketTimeMs, new HashMap<>());
logSegmentBuckets.put(logSegmentBucketTimeMs, new TreeMap<>(LogSegmentNameHelper.COMPARATOR));
containerBucketTimeMs += bucketSpanInMs;
logSegmentBucketTimeMs += bucketSpanInMs;
}
containerForecastStartTimeMs = containerBuckets.firstKey();
containerLastBucketTimeMs = containerBuckets.lastKey();
containerForecastEndTimeMs = containerLastBucketTimeMs + bucketSpanInMs;
logSegmentForecastStartTimeMs = logSegmentBuckets.firstKey();
logSegmentLastBucketTimeMs = logSegmentBuckets.lastKey();
logSegmentForecastEndTimeMs = logSegmentLastBucketTimeMs + bucketSpanInMs;
}
/**
* Given a reference time, return the key of the appropriate container bucket whose end time is strictly greater than
* the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getContainerBucketKey(long referenceTimeInMs) {
return containerBuckets.higherKey(referenceTimeInMs);
}
/**
* Given a reference time, return the key of the appropriate log segment bucket whose end time is strictly greater
* than the reference time.
* @param referenceTimeInMs the reference time or operation time of an event.
* @return the appropriate bucket key (bucket end time) to indicate which bucket will an event with
* the given reference time as operation time belong to.
*/
Long getLogSegmentBucketKey(long referenceTimeInMs) {
return logSegmentBuckets.higherKey(referenceTimeInMs);
}
/**
* Helper function to update the container base value bucket with the given value.
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBaseBucket(String serviceId, String containerId, long value) {
updateContainerBucket(containerBuckets.firstKey(), serviceId, containerId, value);
}
/**
* Helper function to update the log segment base value bucket with the given value.
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBaseBucket(String logSegmentName, long value) {
updateLogSegmentBucket(logSegmentBuckets.firstKey(), logSegmentName, value);
}
/**
* Helper function to update a container bucket with the given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param serviceId the serviceId of the map entry to be updated
* @param containerId the containerId of the map entry to be updated
* @param value the value to be added
*/
void updateContainerBucket(Long bucketKey, String serviceId, String containerId, long value) {
if (bucketKey != null && containerBuckets.containsKey(bucketKey)) {
Map<String, Map<String, Long>> existingBucketEntry = containerBuckets.get(bucketKey);
updateNestedMapHelper(existingBucketEntry, serviceId, containerId, value);
}
}
/**
* Helper function to update a log segment bucket with a given value.
* @param bucketKey the bucket key to specify which bucket will be updated
* @param logSegmentName the log segment name of the map entry to be updated
* @param value the value to be added
*/
void updateLogSegmentBucket(Long bucketKey, String logSegmentName, long value) {
if (bucketKey != null && logSegmentBuckets.containsKey(bucketKey)) {
Map<String, Long> existingBucketEntry = logSegmentBuckets.get(bucketKey);
updateMapHelper(existingBucketEntry, logSegmentName, value);
}
}
/**
* Given a reference time in milliseconds return the corresponding valid data size per log segment map by aggregating
* all buckets whose end time is less than or equal to the reference time.
* @param referenceTimeInMS the reference time in ms until which deletes and expiration are relevant
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per log segment {@link NavigableMap}.
*/
Pair<Long, NavigableMap<String, Long>> getValidSizePerLogSegment(Long referenceTimeInMS) {
NavigableMap<String, Long> validSizePerLogSegment = new TreeMap<>(logSegmentBuckets.firstEntry().getValue());
NavigableMap<Long, NavigableMap<String, Long>> subMap =
logSegmentBuckets.subMap(logSegmentBuckets.firstKey(), false, referenceTimeInMS, true);
for (Map.Entry<Long, NavigableMap<String, Long>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Long> bucketEntry : bucket.getValue().entrySet()) {
updateMapHelper(validSizePerLogSegment, bucketEntry.getKey(), bucketEntry.getValue());
}
}
Long lastReferenceBucketTimeInMs = subMap.isEmpty() ? logSegmentBuckets.firstKey() : subMap.lastKey();
return new Pair<>(lastReferenceBucketTimeInMs, validSizePerLogSegment);
}
/**
* Given a reference time in ms return the corresponding valid data size per container map by aggregating all buckets
* whose end time is less than or equal to the reference time.
* @param referenceTimeInMs the reference time in ms until which deletes and expiration are relevant.
* @return a {@link Pair} whose first element is the end time of the last bucket that was aggregated and whose second
* element is the requested valid data size per container {@link Map}.
*/
Map<String, Map<String, Long>> getValidSizePerContainer(Long referenceTimeInMs) {
Map<String, Map<String, Long>> validSizePerContainer = new HashMap<>();
for (Map.Entry<String, Map<String, Long>> accountEntry : containerBuckets.firstEntry().getValue().entrySet()) {
validSizePerContainer.put(accountEntry.getKey(), new HashMap<>(accountEntry.getValue()));
}
NavigableMap<Long, Map<String, Map<String, Long>>> subMap =
containerBuckets.subMap(containerBuckets.firstKey(), false, referenceTimeInMs, true);
for (Map.Entry<Long, Map<String, Map<String, Long>>> bucket : subMap.entrySet()) {
for (Map.Entry<String, Map<String, Long>> accountEntry : bucket.getValue().entrySet()) {
for (Map.Entry<String, Long> containerEntry : accountEntry.getValue().entrySet()) {
updateNestedMapHelper(validSizePerContainer, accountEntry.getKey(), containerEntry.getKey(),
containerEntry.getValue());
}
}
}
return validSizePerContainer;
}
/**
* Helper function to update nested map data structure.
* @param nestedMap nested {@link Map} to be updated
* @param firstKey of the nested map
* @param secondKey of the nested map
* @param value the value to be added at the corresponding entry
*/
private void updateNestedMapHelper(Map<String, Map<String, Long>> nestedMap, String firstKey, String secondKey,
Long value) {
if (!nestedMap.containsKey(firstKey)) {
nestedMap.put(firstKey, new HashMap<String, Long>());
}
updateMapHelper(nestedMap.get(firstKey), secondKey, value);
}
/**
* Helper function to update map data structure.
* @param map {@link Map} to be updated
* @param key of the map
* @param value the value to be added at the corresponding entry
*/
private void updateMapHelper(Map<String, Long> map, String key, Long value) {
Long newValue = map.containsKey(key) ? map.get(key) + value : value;
map.put(key, newValue);
}
}
| Java |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package mungeopts
import (
"time"
"k8s.io/kubernetes/pkg/util/sets"
"k8s.io/test-infra/mungegithub/options"
)
var (
// Server holds the values of options used by mungers that serve web services.
Server struct {
Address string
WWWRoot string
}
// GCS holds the values of GCS options.
GCS struct {
BucketName string
LogDir string
// PullLogDir is the directory of the pr builder jenkins
PullLogDir string
// PullKey is a string to look for in a job name to figure out if it's
// a pull (presubmit) job.
PullKey string
}
// RequiredContexts holds options that specify which status contexts are required for various
// actions.
RequiredContexts struct {
Merge []string
Retest []string
}
// Maximum time to wait for tests in a PR to start or finish.
// This should be >2x as long as it normally takes for a PR
// to complete, to avoid congestion collapse in the queue.
PRMaxWaitTime time.Duration
)
// RegisterOptions registers options that may be used by any munger, feature, or report. It returns
// any options that require a restart when changed.
func RegisterOptions(opts *options.Options) sets.String {
// Options for mungers that run web servers.
opts.RegisterString(&Server.Address, "address", ":8080", "The address to listen on for HTTP Status")
opts.RegisterString(&Server.WWWRoot, "www", "www", "Path to static web files to serve from the webserver")
// GCS options:
opts.RegisterString(&GCS.BucketName, "gcs-bucket", "", "Name of GCS bucket.")
opts.RegisterString(&GCS.LogDir, "gcs-logs-dir", "", "Directory containing test logs.")
opts.RegisterString(&GCS.PullLogDir, "pull-logs-dir", "", "Directory of the PR builder.")
opts.RegisterString(&GCS.PullKey, "pull-key", "", "String to look for in job name for it to be a pull (presubmit) job.")
// Status context options:
opts.RegisterStringSlice(&RequiredContexts.Retest, "required-retest-contexts", []string{}, "Comma separate list of statuses which will be retested and which must come back green after the `retest-body` comment is posted to a PR")
opts.RegisterStringSlice(&RequiredContexts.Merge, "required-contexts", []string{}, "Comma separate list of status contexts required for a PR to be considered ok to merge")
opts.RegisterDuration(&PRMaxWaitTime, "pr-max-wait-time", 2*time.Hour, "Maximum time to wait for tests in a PR to start or finish")
return sets.NewString("address", "www")
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.wicket.application;
import java.io.IOException;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Enumeration;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import java.util.TreeSet;
import org.apache.wicket.util.collections.UrlExternalFormComparator;
import org.apache.wicket.util.file.File;
import org.apache.wicket.util.listener.IChangeListener;
import org.apache.wicket.util.time.Duration;
import org.apache.wicket.util.watch.IModificationWatcher;
import org.apache.wicket.util.watch.ModificationWatcher;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Custom ClassLoader that reverses the classloader lookups, and that is able to notify a listener
* when a class file is changed.
*
* @author <a href="mailto:[email protected]">Jean-Baptiste Quenot</a>
*/
public class ReloadingClassLoader extends URLClassLoader
{
private static final Logger log = LoggerFactory.getLogger(ReloadingClassLoader.class);
private static final Set<URL> urls = new TreeSet<URL>(new UrlExternalFormComparator());
private static final List<String> patterns = new ArrayList<String>();
private IChangeListener listener;
private final Duration pollFrequency = Duration.seconds(3);
private final IModificationWatcher watcher;
static
{
addClassLoaderUrls(ReloadingClassLoader.class.getClassLoader());
excludePattern("org.apache.wicket.*");
includePattern("org.apache.wicket.examples.*");
}
/**
*
* @param name
* @return true if class if found, false otherwise
*/
protected boolean tryClassHere(String name)
{
// don't include classes in the java or javax.servlet package
if (name != null && (name.startsWith("java.") || name.startsWith("javax.servlet")))
{
return false;
}
// Scan includes, then excludes
boolean tryHere;
// If no explicit includes, try here
if (patterns == null || patterns.size() == 0)
{
tryHere = true;
}
else
{
// See if it matches include patterns
tryHere = false;
for (String rawpattern : patterns)
{
if (rawpattern.length() <= 1)
{
continue;
}
// FIXME it seems that only "includes" are handled. "Excludes" are ignored
boolean isInclude = rawpattern.substring(0, 1).equals("+");
String pattern = rawpattern.substring(1);
if (WildcardMatcherHelper.match(pattern, name) != null)
{
tryHere = isInclude;
}
}
}
return tryHere;
}
/**
* Include a pattern
*
* @param pattern
* the pattern to include
*/
public static void includePattern(String pattern)
{
patterns.add("+" + pattern);
}
/**
* Exclude a pattern
*
* @param pattern
* the pattern to exclude
*/
public static void excludePattern(String pattern)
{
patterns.add("-" + pattern);
}
/**
* Returns the list of all configured inclusion or exclusion patterns
*
* @return list of patterns as String
*/
public static List<String> getPatterns()
{
return patterns;
}
/**
* Add the location of a directory containing class files
*
* @param url
* the URL for the directory
*/
public static void addLocation(URL url)
{
urls.add(url);
}
/**
* Returns the list of all configured locations of directories containing class files
*
* @return list of locations as URL
*/
public static Set<URL> getLocations()
{
return urls;
}
/**
* Add all the url locations we can find for the provided class loader
*
* @param loader
* class loader
*/
private static void addClassLoaderUrls(ClassLoader loader)
{
if (loader != null)
{
final Enumeration<URL> resources;
try
{
resources = loader.getResources("");
}
catch (IOException e)
{
throw new RuntimeException(e);
}
while (resources.hasMoreElements())
{
URL location = resources.nextElement();
ReloadingClassLoader.addLocation(location);
}
}
}
/**
* Create a new reloading ClassLoader from a list of URLs, and initialize the
* ModificationWatcher to detect class file modifications
*
* @param parent
* the parent classloader in case the class file cannot be loaded from the above
* locations
*/
public ReloadingClassLoader(ClassLoader parent)
{
super(new URL[] { }, parent);
// probably doubles from this class, but just in case
addClassLoaderUrls(parent);
for (URL url : urls)
{
addURL(url);
}
watcher = new ModificationWatcher(pollFrequency);
}
/**
* Gets a resource from this <code>ClassLoader</class>. If the
* resource does not exist in this one, we check the parent.
* Please note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to work around inconsistent class loaders from third
* party vendors.
*
* @param name
* of resource
*/
@Override
public final URL getResource(final String name)
{
URL resource = findResource(name);
ClassLoader parent = getParent();
if (resource == null && parent != null)
{
resource = parent.getResource(name);
}
return resource;
}
/**
* Loads the class from this <code>ClassLoader</class>. If the
* class does not exist in this one, we check the parent. Please
* note that this is the exact opposite of the
* <code>ClassLoader</code> spec. We use it to load the class from the same classloader as
* WicketFilter or WicketServlet. When found, the class file is watched for modifications.
*
* @param name
* the name of the class
* @param resolve
* if <code>true</code> then resolve the class
* @return the resulting <code>Class</code> object
* @exception ClassNotFoundException
* if the class could not be found
*/
@Override
public final Class<?> loadClass(String name, boolean resolve) throws ClassNotFoundException
{
// First check if it's already loaded
Class<?> clazz = findLoadedClass(name);
if (clazz == null)
{
final ClassLoader parent = getParent();
if (tryClassHere(name))
{
try
{
clazz = findClass(name);
watchForModifications(clazz);
}
catch (ClassNotFoundException cnfe)
{
if (parent == null)
{
// Propagate exception
throw cnfe;
}
}
}
if (clazz == null)
{
if (parent == null)
{
throw new ClassNotFoundException(name);
}
else
{
// Will throw a CFNE if not found in parent
// see http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6500212
// clazz = parent.loadClass(name);
clazz = Class.forName(name, false, parent);
}
}
}
if (resolve)
{
resolveClass(clazz);
}
return clazz;
}
/**
* Sets the listener that will be notified when a class changes
*
* @param listener
* the listener to notify upon class change
*/
public void setListener(IChangeListener listener)
{
this.listener = listener;
}
/**
* Watch changes of a class file by locating it in the list of location URLs and adding the
* corresponding file to the ModificationWatcher
*
* @param clz
* the class to watch
*/
private void watchForModifications(Class<?> clz)
{
// Watch class in the future
Iterator<URL> locationsIterator = urls.iterator();
File clzFile = null;
while (locationsIterator.hasNext())
{
// FIXME only works for directories, but JARs etc could be checked
// as well
URL location = locationsIterator.next();
String clzLocation = location.getFile() + clz.getName().replaceAll("\\.", "/") +
".class";
log.debug("clzLocation=" + clzLocation);
clzFile = new File(clzLocation);
final File finalClzFile = clzFile;
if (clzFile.exists())
{
log.info("Watching changes of class " + clzFile);
watcher.add(clzFile, new IChangeListener()
{
@Override
public void onChange()
{
log.info("Class file " + finalClzFile + " has changed, reloading");
try
{
listener.onChange();
}
catch (Exception e)
{
log.error("Could not notify listener", e);
// If an error occurs when the listener is notified,
// remove the watched object to avoid rethrowing the
// exception at next check
// FIXME check if class file has been deleted
watcher.remove(finalClzFile);
}
}
});
break;
}
else
{
log.debug("Class file does not exist: " + clzFile);
}
}
if (clzFile != null && !clzFile.exists())
{
log.debug("Could not locate class " + clz.getName());
}
}
/**
* Remove the ModificationWatcher from the current reloading class loader
*/
public void destroy()
{
watcher.destroy();
}
}
| Java |
package jadx.tests.integration.conditions;
import org.junit.jupiter.api.Test;
import jadx.tests.api.SmaliTest;
import static jadx.tests.api.utils.assertj.JadxAssertions.assertThat;
@SuppressWarnings("CommentedOutCode")
public class TestTernary4 extends SmaliTest {
// @formatter:off
/*
private Set test(HashMap<String, Object> hashMap) {
boolean z;
HashSet hashSet = new HashSet();
synchronized (this.defaultValuesByPath) {
for (String next : this.defaultValuesByPath.keySet()) {
Object obj = hashMap.get(next);
if (obj != null) {
z = !getValueObject(next).equals(obj);
} else {
z = this.valuesByPath.get(next) != null;;
}
if (z) {
hashSet.add(next);
}
}
}
return hashSet;
}
*/
// @formatter:on
@Test
public void test() {
assertThat(getClassNodeFromSmali())
.code()
.removeBlockComments()
.doesNotContain("5")
.doesNotContain("try");
}
}
| Java |
package jadx.tests.integration.debuginfo;
import org.junit.jupiter.api.Test;
import jadx.core.dex.nodes.ClassNode;
import jadx.tests.api.SmaliTest;
import static jadx.tests.api.utils.JadxMatchers.containsOne;
import static org.hamcrest.MatcherAssert.assertThat;
public class TestVariablesNames extends SmaliTest {
// @formatter:off
/*
public static class TestCls {
public void test(String s, int k) {
f1(s);
int i = k + 3;
String s2 = "i" + i;
f2(i, s2);
double d = i * 5;
String s3 = "d" + d;
f3(d, s3);
}
private void f1(String s) {
}
private void f2(int i, String i2) {
}
private void f3(double d, String d2) {
}
}
*/
// @formatter:on
/**
* Parameter register reused in variables assign with different types and names
* No variables names in debug info
*/
@Test
public void test() {
ClassNode cls = getClassNodeFromSmaliWithPath("debuginfo", "TestVariablesNames");
String code = cls.getCode().toString();
// TODO: don't use current variables naming in tests
assertThat(code, containsOne("f1(str);"));
assertThat(code, containsOne("f2(i2, \"i\" + i2);"));
assertThat(code, containsOne("f3(d, \"d\" + d);"));
}
}
| Java |
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 1997-2009. All Rights Reserved.
*
* The contents of this file are subject to the Erlang Public License,
* Version 1.1, (the "License"); you may not use this file except in
* compliance with the License. You should have received a copy of the
* Erlang Public License along with this software. If not, it can be
* retrieved online at http://www.erlang.org/.
*
* Software distributed under the License is distributed on an "AS IS"
* basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
* the License for the specific language governing rights and limitations
* under the License.
*
* %CopyrightEnd%
*/
/*----------------------------------------------------------------------
** Purpose : System dependant driver declarations
**---------------------------------------------------------------------- */
#ifndef __DRIVER_INT_H__
#define __DRIVER_INT_H__
#include <ioLib.h>
typedef struct iovec SysIOVec;
#endif
| Java |
/*!
* commander
* Copyright(c) 2011 TJ Holowaychuk <[email protected]>
* MIT Licensed
*/
/**
* Module dependencies.
*/
var EventEmitter = require('events').EventEmitter
, spawn = require('child_process').spawn
, keypress = require('keypress')
, fs = require('fs')
, exists = fs.existsSync
, path = require('path')
, tty = require('tty')
, dirname = path.dirname
, basename = path.basename;
/**
* Expose the root command.
*/
exports = module.exports = new Command;
/**
* Expose `Command`.
*/
exports.Command = Command;
/**
* Expose `Option`.
*/
exports.Option = Option;
/**
* Initialize a new `Option` with the given `flags` and `description`.
*
* @param {String} flags
* @param {String} description
* @api public
*/
function Option(flags, description) {
this.flags = flags;
this.required = ~flags.indexOf('<');
this.optional = ~flags.indexOf('[');
this.bool = !~flags.indexOf('-no-');
flags = flags.split(/[ ,|]+/);
if (flags.length > 1 && !/^[[<]/.test(flags[1])) this.short = flags.shift();
this.long = flags.shift();
this.description = description || '';
}
/**
* Return option name.
*
* @return {String}
* @api private
*/
Option.prototype.name = function(){
return this.long
.replace('--', '')
.replace('no-', '');
};
/**
* Check if `arg` matches the short or long flag.
*
* @param {String} arg
* @return {Boolean}
* @api private
*/
Option.prototype.is = function(arg){
return arg == this.short
|| arg == this.long;
};
/**
* Initialize a new `Command`.
*
* @param {String} name
* @api public
*/
function Command(name) {
this.commands = [];
this.options = [];
this._args = [];
this._name = name;
}
/**
* Inherit from `EventEmitter.prototype`.
*/
Command.prototype.__proto__ = EventEmitter.prototype;
/**
* Add command `name`.
*
* The `.action()` callback is invoked when the
* command `name` is specified via __ARGV__,
* and the remaining arguments are applied to the
* function for access.
*
* When the `name` is "*" an un-matched command
* will be passed as the first arg, followed by
* the rest of __ARGV__ remaining.
*
* Examples:
*
* program
* .version('0.0.1')
* .option('-C, --chdir <path>', 'change the working directory')
* .option('-c, --config <path>', 'set config path. defaults to ./deploy.conf')
* .option('-T, --no-tests', 'ignore test hook')
*
* program
* .command('setup')
* .description('run remote setup commands')
* .action(function(){
* console.log('setup');
* });
*
* program
* .command('exec <cmd>')
* .description('run the given remote command')
* .action(function(cmd){
* console.log('exec "%s"', cmd);
* });
*
* program
* .command('*')
* .description('deploy the given env')
* .action(function(env){
* console.log('deploying "%s"', env);
* });
*
* program.parse(process.argv);
*
* @param {String} name
* @param {String} [desc]
* @return {Command} the new command
* @api public
*/
Command.prototype.command = function(name, desc){
var args = name.split(/ +/);
var cmd = new Command(args.shift());
if (desc) cmd.description(desc);
if (desc) this.executables = true;
this.commands.push(cmd);
cmd.parseExpectedArgs(args);
cmd.parent = this;
if (desc) return this;
return cmd;
};
/**
* Add an implicit `help [cmd]` subcommand
* which invokes `--help` for the given command.
*
* @api private
*/
Command.prototype.addImplicitHelpCommand = function() {
this.command('help [cmd]', 'display help for [cmd]');
};
/**
* Parse expected `args`.
*
* For example `["[type]"]` becomes `[{ required: false, name: 'type' }]`.
*
* @param {Array} args
* @return {Command} for chaining
* @api public
*/
Command.prototype.parseExpectedArgs = function(args){
if (!args.length) return;
var self = this;
args.forEach(function(arg){
switch (arg[0]) {
case '<':
self._args.push({ required: true, name: arg.slice(1, -1) });
break;
case '[':
self._args.push({ required: false, name: arg.slice(1, -1) });
break;
}
});
return this;
};
/**
* Register callback `fn` for the command.
*
* Examples:
*
* program
* .command('help')
* .description('display verbose help')
* .action(function(){
* // output help here
* });
*
* @param {Function} fn
* @return {Command} for chaining
* @api public
*/
Command.prototype.action = function(fn){
var self = this;
this.parent.on(this._name, function(args, unknown){
// Parse any so-far unknown options
unknown = unknown || [];
var parsed = self.parseOptions(unknown);
// Output help if necessary
outputHelpIfNecessary(self, parsed.unknown);
// If there are still any unknown options, then we simply
// die, unless someone asked for help, in which case we give it
// to them, and then we die.
if (parsed.unknown.length > 0) {
self.unknownOption(parsed.unknown[0]);
}
// Leftover arguments need to be pushed back. Fixes issue #56
if (parsed.args.length) args = parsed.args.concat(args);
self._args.forEach(function(arg, i){
if (arg.required && null == args[i]) {
self.missingArgument(arg.name);
}
});
// Always append ourselves to the end of the arguments,
// to make sure we match the number of arguments the user
// expects
if (self._args.length) {
args[self._args.length] = self;
} else {
args.push(self);
}
fn.apply(this, args);
});
return this;
};
/**
* Define option with `flags`, `description` and optional
* coercion `fn`.
*
* The `flags` string should contain both the short and long flags,
* separated by comma, a pipe or space. The following are all valid
* all will output this way when `--help` is used.
*
* "-p, --pepper"
* "-p|--pepper"
* "-p --pepper"
*
* Examples:
*
* // simple boolean defaulting to false
* program.option('-p, --pepper', 'add pepper');
*
* --pepper
* program.pepper
* // => Boolean
*
* // simple boolean defaulting to false
* program.option('-C, --no-cheese', 'remove cheese');
*
* program.cheese
* // => true
*
* --no-cheese
* program.cheese
* // => true
*
* // required argument
* program.option('-C, --chdir <path>', 'change the working directory');
*
* --chdir /tmp
* program.chdir
* // => "/tmp"
*
* // optional argument
* program.option('-c, --cheese [type]', 'add cheese [marble]');
*
* @param {String} flags
* @param {String} description
* @param {Function|Mixed} fn or default
* @param {Mixed} defaultValue
* @return {Command} for chaining
* @api public
*/
Command.prototype.option = function(flags, description, fn, defaultValue){
var self = this
, option = new Option(flags, description)
, oname = option.name()
, name = camelcase(oname);
// default as 3rd arg
if ('function' != typeof fn) defaultValue = fn, fn = null;
// preassign default value only for --no-*, [optional], or <required>
if (false == option.bool || option.optional || option.required) {
// when --no-* we make sure default is true
if (false == option.bool) defaultValue = true;
// preassign only if we have a default
if (undefined !== defaultValue) self[name] = defaultValue;
}
// register the option
this.options.push(option);
// when it's passed assign the value
// and conditionally invoke the callback
this.on(oname, function(val){
// coercion
if (null != val && fn) val = fn(val);
// unassigned or bool
if ('boolean' == typeof self[name] || 'undefined' == typeof self[name]) {
// if no value, bool true, and we have a default, then use it!
if (null == val) {
self[name] = option.bool
? defaultValue || true
: false;
} else {
self[name] = val;
}
} else if (null !== val) {
// reassign
self[name] = val;
}
});
return this;
};
/**
* Parse `argv`, settings options and invoking commands when defined.
*
* @param {Array} argv
* @return {Command} for chaining
* @api public
*/
Command.prototype.parse = function(argv){
// implicit help
if (this.executables) this.addImplicitHelpCommand();
// store raw args
this.rawArgs = argv;
// guess name
this._name = this._name || basename(argv[1]);
// process argv
var parsed = this.parseOptions(this.normalize(argv.slice(2)));
var args = this.args = parsed.args;
var result = this.parseArgs(this.args, parsed.unknown);
// executable sub-commands, skip .parseArgs()
if (this.executables) return this.executeSubCommand(argv, args, parsed.unknown);
return result;
};
/**
* Execute a sub-command executable.
*
* @param {Array} argv
* @param {Array} args
* @param {Array} unknown
* @api private
*/
Command.prototype.executeSubCommand = function(argv, args, unknown) {
args = args.concat(unknown);
if (!args.length) this.help();
if ('help' == args[0] && 1 == args.length) this.help();
// <cmd> --help
if ('help' == args[0]) {
args[0] = args[1];
args[1] = '--help';
}
// executable
var dir = dirname(argv[1]);
var bin = basename(argv[1]) + '-' + args[0];
// check for ./<bin> first
var local = path.join(dir, bin);
// run it
args = args.slice(1);
var proc = spawn(local, args, { stdio: 'inherit', customFds: [0, 1, 2] });
proc.on('error', function(err){
if (err.code == "ENOENT") {
console.error('\n %s(1) does not exist, try --help\n', bin);
} else if (err.code == "EACCES") {
console.error('\n %s(1) not executable. try chmod or run with root\n', bin);
}
});
this.runningCommand = proc;
};
/**
* Normalize `args`, splitting joined short flags. For example
* the arg "-abc" is equivalent to "-a -b -c".
* This also normalizes equal sign and splits "--abc=def" into "--abc def".
*
* @param {Array} args
* @return {Array}
* @api private
*/
Command.prototype.normalize = function(args){
var ret = []
, arg
, index;
for (var i = 0, len = args.length; i < len; ++i) {
arg = args[i];
if (arg.length > 1 && '-' == arg[0] && '-' != arg[1]) {
arg.slice(1).split('').forEach(function(c){
ret.push('-' + c);
});
} else if (/^--/.test(arg) && ~(index = arg.indexOf('='))) {
ret.push(arg.slice(0, index), arg.slice(index + 1));
} else {
ret.push(arg);
}
}
return ret;
};
/**
* Parse command `args`.
*
* When listener(s) are available those
* callbacks are invoked, otherwise the "*"
* event is emitted and those actions are invoked.
*
* @param {Array} args
* @return {Command} for chaining
* @api private
*/
Command.prototype.parseArgs = function(args, unknown){
var cmds = this.commands
, len = cmds.length
, name;
if (args.length) {
name = args[0];
if (this.listeners(name).length) {
this.emit(args.shift(), args, unknown);
} else {
this.emit('*', args);
}
} else {
outputHelpIfNecessary(this, unknown);
// If there were no args and we have unknown options,
// then they are extraneous and we need to error.
if (unknown.length > 0) {
this.unknownOption(unknown[0]);
}
}
return this;
};
/**
* Return an option matching `arg` if any.
*
* @param {String} arg
* @return {Option}
* @api private
*/
Command.prototype.optionFor = function(arg){
for (var i = 0, len = this.options.length; i < len; ++i) {
if (this.options[i].is(arg)) {
return this.options[i];
}
}
};
/**
* Parse options from `argv` returning `argv`
* void of these options.
*
* @param {Array} argv
* @return {Array}
* @api public
*/
Command.prototype.parseOptions = function(argv){
var args = []
, len = argv.length
, literal
, option
, arg;
var unknownOptions = [];
// parse options
for (var i = 0; i < len; ++i) {
arg = argv[i];
// literal args after --
if ('--' == arg) {
literal = true;
continue;
}
if (literal) {
args.push(arg);
continue;
}
// find matching Option
option = this.optionFor(arg);
// option is defined
if (option) {
// requires arg
if (option.required) {
arg = argv[++i];
if (null == arg) return this.optionMissingArgument(option);
if ('-' == arg[0] && '-' != arg) return this.optionMissingArgument(option, arg);
this.emit(option.name(), arg);
// optional arg
} else if (option.optional) {
arg = argv[i+1];
if (null == arg || ('-' == arg[0] && '-' != arg)) {
arg = null;
} else {
++i;
}
this.emit(option.name(), arg);
// bool
} else {
this.emit(option.name());
}
continue;
}
// looks like an option
if (arg.length > 1 && '-' == arg[0]) {
unknownOptions.push(arg);
// If the next argument looks like it might be
// an argument for this option, we pass it on.
// If it isn't, then it'll simply be ignored
if (argv[i+1] && '-' != argv[i+1][0]) {
unknownOptions.push(argv[++i]);
}
continue;
}
// arg
args.push(arg);
}
return { args: args, unknown: unknownOptions };
};
/**
* Argument `name` is missing.
*
* @param {String} name
* @api private
*/
Command.prototype.missingArgument = function(name){
console.error();
console.error(" error: missing required argument `%s'", name);
console.error();
process.exit(1);
};
/**
* `Option` is missing an argument, but received `flag` or nothing.
*
* @param {String} option
* @param {String} flag
* @api private
*/
Command.prototype.optionMissingArgument = function(option, flag){
console.error();
if (flag) {
console.error(" error: option `%s' argument missing, got `%s'", option.flags, flag);
} else {
console.error(" error: option `%s' argument missing", option.flags);
}
console.error();
process.exit(1);
};
/**
* Unknown option `flag`.
*
* @param {String} flag
* @api private
*/
Command.prototype.unknownOption = function(flag){
console.error();
console.error(" error: unknown option `%s'", flag);
console.error();
process.exit(1);
};
/**
* Set the program version to `str`.
*
* This method auto-registers the "-V, --version" flag
* which will print the version number when passed.
*
* @param {String} str
* @param {String} flags
* @return {Command} for chaining
* @api public
*/
Command.prototype.version = function(str, flags){
if (0 == arguments.length) return this._version;
this._version = str;
flags = flags || '-V, --version';
this.option(flags, 'output the version number');
this.on('version', function(){
console.log(str);
process.exit(0);
});
return this;
};
/**
* Set the description `str`.
*
* @param {String} str
* @return {String|Command}
* @api public
*/
Command.prototype.description = function(str){
if (0 == arguments.length) return this._description;
this._description = str;
return this;
};
/**
* Set / get the command usage `str`.
*
* @param {String} str
* @return {String|Command}
* @api public
*/
Command.prototype.usage = function(str){
var args = this._args.map(function(arg){
return arg.required
? '<' + arg.name + '>'
: '[' + arg.name + ']';
});
var usage = '[options'
+ (this.commands.length ? '] [command' : '')
+ ']'
+ (this._args.length ? ' ' + args : '');
if (0 == arguments.length) return this._usage || usage;
this._usage = str;
return this;
};
/**
* Return the largest option length.
*
* @return {Number}
* @api private
*/
Command.prototype.largestOptionLength = function(){
return this.options.reduce(function(max, option){
return Math.max(max, option.flags.length);
}, 0);
};
/**
* Return help for options.
*
* @return {String}
* @api private
*/
Command.prototype.optionHelp = function(){
var width = this.largestOptionLength();
// Prepend the help information
return [pad('-h, --help', width) + ' ' + 'output usage information']
.concat(this.options.map(function(option){
return pad(option.flags, width)
+ ' ' + option.description;
}))
.join('\n');
};
/**
* Return command help documentation.
*
* @return {String}
* @api private
*/
Command.prototype.commandHelp = function(){
if (!this.commands.length) return '';
return [
''
, ' Commands:'
, ''
, this.commands.map(function(cmd){
var args = cmd._args.map(function(arg){
return arg.required
? '<' + arg.name + '>'
: '[' + arg.name + ']';
}).join(' ');
return pad(cmd._name
+ (cmd.options.length
? ' [options]'
: '') + ' ' + args, 22)
+ (cmd.description()
? ' ' + cmd.description()
: '');
}).join('\n').replace(/^/gm, ' ')
, ''
].join('\n');
};
/**
* Return program help documentation.
*
* @return {String}
* @api private
*/
Command.prototype.helpInformation = function(){
return [
''
, ' Usage: ' + this._name + ' ' + this.usage()
, '' + this.commandHelp()
, ' Options:'
, ''
, '' + this.optionHelp().replace(/^/gm, ' ')
, ''
, ''
].join('\n');
};
/**
* Prompt for a `Number`.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptForNumber = function(str, fn){
var self = this;
this.promptSingleLine(str, function parseNumber(val){
val = Number(val);
if (isNaN(val)) return self.promptSingleLine(str + '(must be a number) ', parseNumber);
fn(val);
});
};
/**
* Prompt for a `Date`.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptForDate = function(str, fn){
var self = this;
this.promptSingleLine(str, function parseDate(val){
val = new Date(val);
if (isNaN(val.getTime())) return self.promptSingleLine(str + '(must be a date) ', parseDate);
fn(val);
});
};
/**
* Prompt for a `Regular Expression`.
*
* @param {String} str
* @param {Object} pattern regular expression object to test
* @param {Function} fn
* @api private
*/
Command.prototype.promptForRegexp = function(str, pattern, fn){
var self = this;
this.promptSingleLine(str, function parseRegexp(val){
if(!pattern.test(val)) return self.promptSingleLine(str + '(regular expression mismatch) ', parseRegexp);
fn(val);
});
};
/**
* Single-line prompt.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptSingleLine = function(str, fn){
// determine if the 2nd argument is a regular expression
if (arguments[1].global !== undefined && arguments[1].multiline !== undefined) {
return this.promptForRegexp(str, arguments[1], arguments[2]);
} else if ('function' == typeof arguments[2]) {
return this['promptFor' + (fn.name || fn)](str, arguments[2]);
}
process.stdout.write(str);
process.stdin.setEncoding('utf8');
process.stdin.once('data', function(val){
fn(val.trim());
}).resume();
};
/**
* Multi-line prompt.
*
* @param {String} str
* @param {Function} fn
* @api private
*/
Command.prototype.promptMultiLine = function(str, fn){
var buf = [];
console.log(str);
process.stdin.setEncoding('utf8');
process.stdin.on('data', function(val){
if ('\n' == val || '\r\n' == val) {
process.stdin.removeAllListeners('data');
fn(buf.join('\n'));
} else {
buf.push(val.trimRight());
}
}).resume();
};
/**
* Prompt `str` and callback `fn(val)`
*
* Commander supports single-line and multi-line prompts.
* To issue a single-line prompt simply add white-space
* to the end of `str`, something like "name: ", whereas
* for a multi-line prompt omit this "description:".
*
*
* Examples:
*
* program.prompt('Username: ', function(name){
* console.log('hi %s', name);
* });
*
* program.prompt('Description:', function(desc){
* console.log('description was "%s"', desc.trim());
* });
*
* @param {String|Object} str
* @param {Function} fn
* @api public
*/
Command.prototype.prompt = function(str, fn){
var self = this;
if ('string' == typeof str) {
if (/ $/.test(str)) return this.promptSingleLine.apply(this, arguments);
this.promptMultiLine(str, fn);
} else {
var keys = Object.keys(str)
, obj = {};
function next() {
var key = keys.shift()
, label = str[key];
if (!key) return fn(obj);
self.prompt(label, function(val){
obj[key] = val;
next();
});
}
next();
}
};
/**
* Prompt for password with `str`, `mask` char and callback `fn(val)`.
*
* The mask string defaults to '', aka no output is
* written while typing, you may want to use "*" etc.
*
* Examples:
*
* program.password('Password: ', function(pass){
* console.log('got "%s"', pass);
* process.stdin.destroy();
* });
*
* program.password('Password: ', '*', function(pass){
* console.log('got "%s"', pass);
* process.stdin.destroy();
* });
*
* @param {String} str
* @param {String} mask
* @param {Function} fn
* @api public
*/
Command.prototype.password = function(str, mask, fn){
var self = this
, buf = '';
// default mask
if ('function' == typeof mask) {
fn = mask;
mask = '';
}
keypress(process.stdin);
function setRawMode(mode) {
if (process.stdin.setRawMode) {
process.stdin.setRawMode(mode);
} else {
tty.setRawMode(mode);
}
};
setRawMode(true);
process.stdout.write(str);
// keypress
process.stdin.on('keypress', function(c, key){
if (key && 'enter' == key.name) {
console.log();
process.stdin.pause();
process.stdin.removeAllListeners('keypress');
setRawMode(false);
if (!buf.trim().length) return self.password(str, mask, fn);
fn(buf);
return;
}
if (key && key.ctrl && 'c' == key.name) {
console.log('%s', buf);
process.exit();
}
process.stdout.write(mask);
buf += c;
}).resume();
};
/**
* Confirmation prompt with `str` and callback `fn(bool)`
*
* Examples:
*
* program.confirm('continue? ', function(ok){
* console.log(' got %j', ok);
* process.stdin.destroy();
* });
*
* @param {String} str
* @param {Function} fn
* @api public
*/
Command.prototype.confirm = function(str, fn, verbose){
var self = this;
this.prompt(str, function(ok){
if (!ok.trim()) {
if (!verbose) str += '(yes or no) ';
return self.confirm(str, fn, true);
}
fn(parseBool(ok));
});
};
/**
* Choice prompt with `list` of items and callback `fn(index, item)`
*
* Examples:
*
* var list = ['tobi', 'loki', 'jane', 'manny', 'luna'];
*
* console.log('Choose the coolest pet:');
* program.choose(list, function(i){
* console.log('you chose %d "%s"', i, list[i]);
* process.stdin.destroy();
* });
*
* @param {Array} list
* @param {Number|Function} index or fn
* @param {Function} fn
* @api public
*/
Command.prototype.choose = function(list, index, fn){
var self = this
, hasDefault = 'number' == typeof index;
if (!hasDefault) {
fn = index;
index = null;
}
list.forEach(function(item, i){
if (hasDefault && i == index) {
console.log('* %d) %s', i + 1, item);
} else {
console.log(' %d) %s', i + 1, item);
}
});
function again() {
self.prompt(' : ', function(val){
val = parseInt(val, 10) - 1;
if (hasDefault && isNaN(val)) val = index;
if (null == list[val]) {
again();
} else {
fn(val, list[val]);
}
});
}
again();
};
/**
* Output help information for this command
*
* @api public
*/
Command.prototype.outputHelp = function(){
process.stdout.write(this.helpInformation());
this.emit('--help');
};
/**
* Output help information and exit.
*
* @api public
*/
Command.prototype.help = function(){
this.outputHelp();
process.exit();
};
/**
* Camel-case the given `flag`
*
* @param {String} flag
* @return {String}
* @api private
*/
function camelcase(flag) {
return flag.split('-').reduce(function(str, word){
return str + word[0].toUpperCase() + word.slice(1);
});
}
/**
* Parse a boolean `str`.
*
* @param {String} str
* @return {Boolean}
* @api private
*/
function parseBool(str) {
return /^y|yes|ok|true$/i.test(str);
}
/**
* Pad `str` to `width`.
*
* @param {String} str
* @param {Number} width
* @return {String}
* @api private
*/
function pad(str, width) {
var len = Math.max(0, width - str.length);
return str + Array(len + 1).join(' ');
}
/**
* Output help information if necessary
*
* @param {Command} command to output help for
* @param {Array} array of options to search for -h or --help
* @api private
*/
function outputHelpIfNecessary(cmd, options) {
options = options || [];
for (var i = 0; i < options.length; i++) {
if (options[i] == '--help' || options[i] == '-h') {
cmd.outputHelp();
process.exit(0);
}
}
}
| Java |
package com.cardshifter.gdx.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.math.Rectangle;
import com.badlogic.gdx.math.Vector2;
import com.badlogic.gdx.scenes.scene2d.Actor;
import com.badlogic.gdx.scenes.scene2d.InputEvent;
import com.badlogic.gdx.scenes.scene2d.ui.*;
import com.badlogic.gdx.scenes.scene2d.utils.ClickListener;
import com.cardshifter.api.incoming.UseAbilityMessage;
import com.cardshifter.api.messages.Message;
import com.cardshifter.api.outgoing.*;
import com.cardshifter.gdx.*;
import com.cardshifter.gdx.ui.CardshifterClientContext;
import com.cardshifter.gdx.ui.EntityView;
import com.cardshifter.gdx.ui.PlayerView;
import com.cardshifter.gdx.ui.cards.CardView;
import com.cardshifter.gdx.ui.cards.CardViewSmall;
import com.cardshifter.gdx.ui.zones.CompactHiddenZoneView;
import com.cardshifter.gdx.ui.zones.DefaultZoneView;
import com.cardshifter.gdx.ui.zones.ZoneView;
import java.util.*;
import java.util.List;
/**
* Created by Simon on 1/31/2015.
*/
public class GameScreen implements Screen, TargetableCallback {
private final CardshifterGame game;
private final CardshifterClient client;
private final int playerIndex;
private final int gameId;
private final Table table;
private final Map<Integer, ZoneView> zoneViews = new HashMap<Integer, ZoneView>();
private final Map<Integer, EntityView> entityViews = new HashMap<Integer, EntityView>();
private final Map<String, Container<Actor>> holders = new HashMap<String, Container<Actor>>();
private final List<EntityView> targetsSelected = new ArrayList<EntityView>();
private final Screen parentScreen;
private AvailableTargetsMessage targetsAvailable;
private final TargetableCallback onTarget = new TargetableCallback() {
@Override
public boolean addEntity(EntityView view) {
if (targetsSelected.contains(view)) {
targetsSelected.remove(view);
Gdx.app.log("GameScreen", "Removing selection " + view.getId());
view.setTargetable(TargetStatus.TARGETABLE, this);
return false;
}
if (targetsAvailable != null && targetsAvailable.getMax() == 1 && targetsAvailable.getMin() == 1) {
Gdx.app.log("GameScreen", "Sending selection " + view.getId());
client.send(new UseAbilityMessage(gameId, targetsAvailable.getEntity(), targetsAvailable.getAction(), new int[]{ view.getId() }));
return false;
}
Gdx.app.log("GameScreen", "Adding selection " + view.getId());
view.setTargetable(TargetStatus.TARGETED, this);
return targetsSelected.add(view);
}
};
private final CardshifterClientContext context;
//private final float screenWidth;
private final float screenHeight;
public GameScreen(final CardshifterGame game, final CardshifterClient client, NewGameMessage message, final Screen parentScreen) {
this.parentScreen = parentScreen;
this.game = game;
this.client = client;
this.playerIndex = message.getPlayerIndex();
this.gameId = message.getGameId();
this.context = new CardshifterClientContext(game.skin, message.getGameId(), client, game.stage);
//this.screenWidth = CardshifterGame.STAGE_WIDTH;
this.screenHeight = CardshifterGame.STAGE_HEIGHT;
this.table = new Table(game.skin);
Table leftTable = new Table(game.skin);
Table topTable = new Table(game.skin);
//Table rightTable = new Table(game.skin);
Table centerTable = new Table(game.skin);
TextButton backToMenu = new TextButton("Back to menu", game.skin);
backToMenu.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
game.setScreen(parentScreen);
}
});
leftTable.add(backToMenu).expandX().fill().row();
addZoneHolder(leftTable, 1 - this.playerIndex, "").expandY().fillY();
addZoneHolder(leftTable, this.playerIndex, "").expandY().fillY();
leftTable.add("controls").row();
TextButton actionDone = new TextButton("Done", game.skin);
actionDone.addListener(new ClickListener() {
@Override
public void clicked(InputEvent event, float x, float y) {
if (targetsAvailable != null) {
int selected = targetsSelected.size();
if (selected >= targetsAvailable.getMin() && selected <= targetsAvailable.getMax()) {
int[] targets = new int[targetsSelected.size()];
for (int i = 0; i < targets.length; i++) {
targets[i] = targetsSelected.get(i).getId();
}
UseAbilityMessage message = new UseAbilityMessage(gameId, targetsAvailable.getEntity(), targetsAvailable.getAction(), targets);
client.send(message);
}
}
}
});
leftTable.add(actionDone);
topTable.add(leftTable).left().expandY().fillY();
topTable.add(centerTable).center().expandX().expandY().fill();
//topTable.add(rightTable).right().width(150).expandY().fillY();
addZoneHolder(centerTable, 1 - this.playerIndex, "Hand").top().height(this.screenHeight/4);
addZoneHolder(centerTable, 1 - this.playerIndex, "Battlefield").height(this.screenHeight/4);
addZoneHolder(centerTable, this.playerIndex, "Battlefield").height(this.screenHeight/4);
this.table.add(topTable).expand().fill().row();
addZoneHolder(this.table, this.playerIndex, "Hand").height(140).expandX().fill();
this.table.setFillParent(true);
}
private Cell<Container<Actor>> addZoneHolder(Table table, int i, String name) {
Container<Actor> container = new Container<Actor>();
container.setName(name);
// container.fill();
Cell<Container<Actor>> cell = table.add(container).expandX().fillX();
table.row();
holders.put(i + name, container);
return cell;
}
@Override
public void render(float delta) {
}
@Override
public void resize(int width, int height) {
}
@Override
public void show() {
game.stage.addActor(table);
}
@Override
public void hide() {
table.remove();
}
@Override
public void pause() {
}
@Override
public void resume() {
}
@Override
public void dispose() {
}
public Map<Class<? extends Message>, SpecificHandler<?>> getHandlers() {
Map<Class<? extends Message>, SpecificHandler<?>> handlers =
new HashMap<Class<? extends Message>, SpecificHandler<?>>();
handlers.put(AvailableTargetsMessage.class, new SpecificHandler<AvailableTargetsMessage>() {
@Override
public void handle(AvailableTargetsMessage message) {
targetsAvailable = message;
targetsSelected.clear();
for (EntityView view : entityViews.values()) {
view.setTargetable(TargetStatus.NOT_TARGETABLE, onTarget);
}
for (int id : message.getTargets()) {
EntityView view = entityViews.get(id);
if (view != null) {
view.setTargetable(TargetStatus.TARGETABLE, onTarget);
}
}
}
});
handlers.put(UsableActionMessage.class, new SpecificHandler<UsableActionMessage>() {
@Override
public void handle(UsableActionMessage message) {
int id = message.getId();
EntityView view = entityViews.get(id);
if (view != null) {
view.usableAction(message);
if (view instanceof CardViewSmall) {
((CardViewSmall)view).setUsable(GameScreen.this);
}
}
}
});
handlers.put(CardInfoMessage.class, new SpecificHandler<CardInfoMessage>() {
@Override
public void handle(CardInfoMessage message) {
ZoneView zone = getZoneView(message.getZone());
if (zone != null) {
zone.removeCard(message.getId());
}
EntityView entityView = entityViews.remove(message.getId());
if (entityView != null) {
entityView.remove();
}
if (zone != null) {
entityViews.put(message.getId(), zone.addCard(message));
}
}
});
handlers.put(EntityRemoveMessage.class, new SpecificHandler<EntityRemoveMessage>() {
@Override
public void handle(EntityRemoveMessage message) {
EntityView view = entityViews.get(message.getEntity());
for (ZoneView zone : zoneViews.values()) {
if (zone.hasCard(message.getEntity())) {
zone.removeCard(message.getEntity());
}
}
if (view != null) {
view.entityRemoved();
entityViews.remove(message.getEntity());
}
}
});
handlers.put(GameOverMessage.class, new SpecificHandler<GameOverMessage>() {
@Override
public void handle(GameOverMessage message) {
Dialog dialog = new Dialog("Game Over!", context.getSkin()) {
@Override
protected void result(Object object) {
game.setScreen(parentScreen);
}
};
dialog.button("OK");
dialog.show(context.getStage());
}
});
handlers.put(PlayerMessage.class, new SpecificHandler<PlayerMessage>() {
@Override
public void handle(PlayerMessage message) {
PlayerView playerView = new PlayerView(context, message);
entityViews.put(message.getId(), playerView);
Container<Actor> holder = holders.get(String.valueOf(message.getIndex()));
if (holder != null) {
holder.setActor(playerView.getActor());
}
}
});
handlers.put(ResetAvailableActionsMessage.class, new SpecificHandler<ResetAvailableActionsMessage>() {
@Override
public void handle(ResetAvailableActionsMessage message) {
for (EntityView view : entityViews.values()) {
view.setTargetable(TargetStatus.NOT_TARGETABLE, null);
view.clearUsableActions();
}
}
});
handlers.put(UpdateMessage.class, new SpecificHandler<UpdateMessage>() {
@Override
public void handle(UpdateMessage message) {
EntityView entityView = entityViews.get(message.getId());
if (entityView != null) {
entityView.set(message.getKey(), message.getValue());
}
}
});
handlers.put(ZoneChangeMessage.class, new SpecificHandler<ZoneChangeMessage>() {
@Override
public void handle(ZoneChangeMessage message) {
ZoneView oldZone = getZoneView(message.getSourceZone()); // can be null
ZoneView destinationZone = getZoneView(message.getDestinationZone());
int id = message.getEntity();
CardView entityView = (CardView) entityViews.remove(id); // can be null
if (oldZone != null) {
oldZone.removeCard(id);
}
if (destinationZone != null) {
CardView newCardView = destinationZone.addCard(new CardInfoMessage(message.getDestinationZone(), id,
entityView == null ? null : entityView.getInfo()));
if (entityView != null) {
entityView.zoneMove(message, destinationZone, newCardView);
}
entityViews.put(id, newCardView);
}
else {
if (entityView != null) {
entityView.zoneMove(message, destinationZone, null);
}
}
/*
Send to AI Medium: ZoneChangeMessage [entity=95, sourceZone=72, destinationZone=73]
Send to AI Medium: CardInfo: 95 in zone 73 - {SCRAP=1, TAUNT=1, MAX_HEALTH=1, SICKNESS=1, MANA_COST=2, name=The Chopper, ATTACK=2, creatureType=Mech, HEALTH=1, ATTACK_AVAILABLE=1}
Send to Zomis: ZoneChangeMessage [entity=95, sourceZone=72, destinationZone=73]
if card is already known, send ZoneChange only
if card is not known, send ZoneChange first and then CardInfo
when cards are created from nowhere, ZoneChange with source -1 is sent and then CardInfo
*/
}
});
handlers.put(ZoneMessage.class, new SpecificHandler<ZoneMessage>() {
@Override
public void handle(ZoneMessage message) {
Gdx.app.log("GameScreen", "Zone " + message);
ZoneView zoneView = createZoneView(message);
if (zoneView != null) {
PlayerView view = (PlayerView) entityViews.get(message.getOwner());
if (view == null) {
Gdx.app.log("GameScreen", "no playerView for " + message.getOwner());
return;
}
String key = view.getIndex() + message.getName();
Container<Actor> container = holders.get(key);
if (container == null) {
Gdx.app.log("GameScreen", "no container for " + key);
return;
}
Gdx.app.log("GameScreen", "putting zoneview for " + key);
container.setActor(zoneView.getActor());
zoneViews.put(message.getId(), zoneView);
}
}
});
return handlers;
}
private ZoneView createZoneView(ZoneMessage message) {
String type = message.getName();
if (type.equals("Battlefield")) {
return new DefaultZoneView(context, message, this.entityViews);
}
if (type.equals("Hand")) {
return new DefaultZoneView(context, message, this.entityViews);
}
if (type.equals("Deck")) {
return new CompactHiddenZoneView(game, message);
}
if (type.equals("Cards")) {
return null; // Card models only
}
throw new RuntimeException("Unknown ZoneView type: " + message.getName());
}
private ZoneView getZoneView(int id) {
return this.zoneViews.get(id);
}
public boolean checkCardDrop(CardViewSmall cardView) {
Table table = (Table)cardView.getActor();
Vector2 stageLoc = table.localToStageCoordinates(new Vector2());
Rectangle tableRect = new Rectangle(stageLoc.x, stageLoc.y, table.getWidth(), table.getHeight());
for (Container<Actor> actor : this.holders.values()) {
if (actor.getName() == "Battlefield") {
Vector2 stageBattlefieldLoc = actor.localToStageCoordinates(new Vector2(actor.getActor().getX(), actor.getActor().getY()));
Vector2 modifiedSBL = new Vector2(stageBattlefieldLoc.x - actor.getWidth()/2, stageBattlefieldLoc.y - actor.getHeight()/2);
Rectangle deckRect = new Rectangle(modifiedSBL.x, modifiedSBL.y, actor.getWidth() * 0.8f, actor.getHeight());
//uncomment this to see the bug where battlefields pop up in strange places
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(modifiedSBL.x, modifiedSBL.y);
squareImage.setSize(deckRect.width, deckRect.height);
this.game.stage.addActor(squareImage);
*/
if (tableRect.overlaps(deckRect)) {
//this.addEntity(cardView);
System.out.println("target found!");
return true;
}
}
}
return false;
//these can be used to double check the location of the rectangles
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(modifiedSBL.x, modifiedSBL.y);
squareImage.setSize(deckRect.width, deckRect.height);
this.game.stage.addActor(squareImage);
*/
/*
Image squareImage = new Image(new Texture(Gdx.files.internal("cardbg.png")));
squareImage.setPosition(stageLoc.x, stageLoc.y);
squareImage.setSize(tableRect.width, tableRect.height);
this.game.stage.addActor(squareImage);
*/
}
@Override
public boolean addEntity(EntityView view) {
//called by the CardViewSmall when not in mulligan mode, nothing will happen
return false;
}
}
| Java |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// <auto-generated/>
#nullable disable
using System.Text.Json;
using Azure.Core;
namespace Azure.Management.Network.Models
{
public partial class Availability
{
internal static Availability DeserializeAvailability(JsonElement element)
{
string timeGrain = default;
string retention = default;
string blobDuration = default;
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("timeGrain"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
timeGrain = property.Value.GetString();
continue;
}
if (property.NameEquals("retention"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
retention = property.Value.GetString();
continue;
}
if (property.NameEquals("blobDuration"))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
blobDuration = property.Value.GetString();
continue;
}
}
return new Availability(timeGrain, retention, blobDuration);
}
}
}
| Java |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.inspections.quickfix;
import com.intellij.codeInsight.CodeInsightUtilCore;
import com.intellij.codeInsight.FileModificationService;
import com.intellij.codeInsight.template.TemplateBuilder;
import com.intellij.codeInsight.template.TemplateBuilderFactory;
import com.intellij.codeInspection.LocalQuickFix;
import com.intellij.codeInspection.ProblemDescriptor;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.fileEditor.FileEditorManager;
import com.intellij.openapi.fileEditor.OpenFileDescriptor;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.MessageType;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.util.Function;
import com.jetbrains.python.PyBundle;
import com.jetbrains.python.PyNames;
import com.jetbrains.python.psi.*;
import com.jetbrains.python.psi.impl.PyPsiUtils;
import com.jetbrains.python.psi.types.PyClassType;
import com.jetbrains.python.psi.types.PyClassTypeImpl;
import com.jetbrains.python.psi.types.PyType;
import com.jetbrains.python.psi.types.TypeEvalContext;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
/**
* Available on self.my_something when my_something is unresolved.
* User: dcheryasov
*/
public class AddFieldQuickFix implements LocalQuickFix {
private final String myInitializer;
private final String myClassName;
private final String myIdentifier;
private boolean replaceInitializer = false;
public AddFieldQuickFix(@NotNull final String identifier, @NotNull final String initializer, final String className, boolean replace) {
myIdentifier = identifier;
myInitializer = initializer;
myClassName = className;
replaceInitializer = replace;
}
@NotNull
public String getName() {
return PyBundle.message("QFIX.NAME.add.field.$0.to.class.$1", myIdentifier, myClassName);
}
@NotNull
public String getFamilyName() {
return "Add field to class";
}
@NotNull
public static PsiElement appendToMethod(PyFunction init, Function<String, PyStatement> callback) {
// add this field as the last stmt of the constructor
final PyStatementList statementList = init.getStatementList();
// name of 'self' may be different for fancier styles
String selfName = PyNames.CANONICAL_SELF;
final PyParameter[] params = init.getParameterList().getParameters();
if (params.length > 0) {
selfName = params[0].getName();
}
final PyStatement newStmt = callback.fun(selfName);
final PsiElement result = PyUtil.addElementToStatementList(newStmt, statementList, true);
PyPsiUtils.removeRedundantPass(statementList);
return result;
}
public void applyFix(@NotNull Project project, @NotNull ProblemDescriptor descriptor) {
// expect the descriptor to point to the unresolved identifier.
final PsiElement element = descriptor.getPsiElement();
final PyClassType type = getClassType(element);
if (type == null) return;
final PyClass cls = type.getPyClass();
if (!FileModificationService.getInstance().preparePsiElementForWrite(cls)) return;
WriteAction.run(() -> {
PsiElement initStatement;
if (!type.isDefinition()) {
initStatement = addFieldToInit(project, cls, myIdentifier, new CreateFieldCallback(project, myIdentifier, myInitializer));
}
else {
PyStatement field = PyElementGenerator.getInstance(project)
.createFromText(LanguageLevel.getDefault(), PyStatement.class, myIdentifier + " = " + myInitializer);
initStatement = PyUtil.addElementToStatementList(field, cls.getStatementList(), true);
}
if (initStatement != null) {
showTemplateBuilder(initStatement, cls.getContainingFile());
return;
}
// somehow we failed. tell about this
PyUtil.showBalloon(project, PyBundle.message("QFIX.failed.to.add.field"), MessageType.ERROR);
});
}
@Override
public boolean startInWriteAction() {
return false;
}
private static PyClassType getClassType(@NotNull final PsiElement element) {
if (element instanceof PyQualifiedExpression) {
final PyExpression qualifier = ((PyQualifiedExpression)element).getQualifier();
if (qualifier == null) return null;
final PyType type = TypeEvalContext.userInitiated(element.getProject(), element.getContainingFile()).getType(qualifier);
return type instanceof PyClassType ? (PyClassType)type : null;
}
final PyClass aClass = PsiTreeUtil.getParentOfType(element, PyClass.class);
return aClass != null ? new PyClassTypeImpl(aClass, false) : null;
}
private void showTemplateBuilder(PsiElement initStatement, @NotNull final PsiFile file) {
initStatement = CodeInsightUtilCore.forcePsiPostprocessAndRestoreElement(initStatement);
if (initStatement instanceof PyAssignmentStatement) {
final TemplateBuilder builder = TemplateBuilderFactory.getInstance().createTemplateBuilder(initStatement);
final PyExpression assignedValue = ((PyAssignmentStatement)initStatement).getAssignedValue();
final PyExpression leftExpression = ((PyAssignmentStatement)initStatement).getLeftHandSideExpression();
if (assignedValue != null && leftExpression != null) {
if (replaceInitializer)
builder.replaceElement(assignedValue, myInitializer);
else
builder.replaceElement(leftExpression.getLastChild(), myIdentifier);
final VirtualFile virtualFile = file.getVirtualFile();
if (virtualFile == null) return;
final Editor editor = FileEditorManager.getInstance(file.getProject()).openTextEditor(
new OpenFileDescriptor(file.getProject(), virtualFile), true);
if (editor == null) return;
builder.run(editor, false);
}
}
}
@Nullable
public static PsiElement addFieldToInit(Project project, PyClass cls, String itemName, Function<String, PyStatement> callback) {
if (cls != null && itemName != null) {
PyFunction init = cls.findMethodByName(PyNames.INIT, false, null);
if (init != null) {
return appendToMethod(init, callback);
}
else { // no init! boldly copy ancestor's.
for (PyClass ancestor : cls.getAncestorClasses(null)) {
init = ancestor.findMethodByName(PyNames.INIT, false, null);
if (init != null) break;
}
PyFunction newInit = createInitMethod(project, cls, init);
appendToMethod(newInit, callback);
PsiElement addAnchor = null;
PyFunction[] meths = cls.getMethods();
if (meths.length > 0) addAnchor = meths[0].getPrevSibling();
PyStatementList clsContent = cls.getStatementList();
newInit = (PyFunction) clsContent.addAfter(newInit, addAnchor);
PyUtil.showBalloon(project, PyBundle.message("QFIX.added.constructor.$0.for.field.$1", cls.getName(), itemName), MessageType.INFO);
final PyStatementList statementList = newInit.getStatementList();
final PyStatement[] statements = statementList.getStatements();
return statements.length != 0 ? statements[0] : null;
}
}
return null;
}
@NotNull
private static PyFunction createInitMethod(Project project, PyClass cls, @Nullable PyFunction ancestorInit) {
// found it; copy its param list and make a call to it.
String paramList = ancestorInit != null ? ancestorInit.getParameterList().getText() : "(self)";
String functionText = "def " + PyNames.INIT + paramList + ":\n";
if (ancestorInit == null) functionText += " pass";
else {
final PyClass ancestorClass = ancestorInit.getContainingClass();
if (ancestorClass != null && !PyUtil.isObjectClass(ancestorClass)) {
StringBuilder sb = new StringBuilder();
PyParameter[] params = ancestorInit.getParameterList().getParameters();
boolean seen = false;
if (cls.isNewStyleClass(null)) {
// form the super() call
sb.append("super(");
if (!LanguageLevel.forElement(cls).isPy3K()) {
sb.append(cls.getName());
// NOTE: assume that we have at least the first param
String self_name = params[0].getName();
sb.append(", ").append(self_name);
}
sb.append(").").append(PyNames.INIT).append("(");
}
else {
sb.append(ancestorClass.getName());
sb.append(".__init__(self");
seen = true;
}
for (int i = 1; i < params.length; i += 1) {
if (seen) sb.append(", ");
else seen = true;
sb.append(params[i].getText());
}
sb.append(")");
functionText += " " + sb.toString();
}
else {
functionText += " pass";
}
}
return PyElementGenerator.getInstance(project).createFromText(
LanguageLevel.getDefault(), PyFunction.class, functionText,
new int[]{0}
);
}
private static class CreateFieldCallback implements Function<String, PyStatement> {
private final Project myProject;
private final String myItemName;
private final String myInitializer;
private CreateFieldCallback(Project project, String itemName, String initializer) {
myProject = project;
myItemName = itemName;
myInitializer = initializer;
}
public PyStatement fun(String selfName) {
return PyElementGenerator.getInstance(myProject).createFromText(LanguageLevel.getDefault(), PyStatement.class, selfName + "." + myItemName + " = " + myInitializer);
}
}
}
| Java |
// Copyright 2013 Matthew Baird
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package core
import (
"encoding/json"
"fmt"
"github.com/mattbaird/elastigo/api"
)
// Validate allows a user to validate a potentially expensive query without executing it.
// see http://www.elasticsearch.org/guide/reference/api/validate.html
func Validate(index string, _type string, args map[string]interface{}) (api.BaseResponse, error) {
var url string
var retval api.BaseResponse
if len(_type) > 0 {
url = fmt.Sprintf("/%s/%s/_validate/", index, _type)
} else {
url = fmt.Sprintf("/%s/_validate/", index)
}
body, err := api.DoCommand("GET", url, args, nil)
if err != nil {
return retval, err
}
if err == nil {
// marshall into json
jsonErr := json.Unmarshal(body, &retval)
if jsonErr != nil {
return retval, jsonErr
}
}
return retval, err
}
type Validation struct {
Valid bool `json:"valid"`
Shards api.Status `json:"_shards"`
Explainations []Explaination `json:"explanations,omitempty"`
}
type Explaination struct {
Index string `json:"index"`
Valid bool `json:"valid"`
Error string `json:"error"`
}
| Java |
<!DOCTYPE html>
<html>
<head>
<title>Noto Warang Citi</title>
<meta charset="UTF-8" http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<script type="text/javascript">
function sliderChange() {
var slider = document.getElementById("fontSizeSlider");
var sizeInput = document.getElementById("fontSizeInput");
sizeInput.value = slider.value;
sizeInput.oninput();
}
function changeSize() {
var selector = document.getElementById('fontSizeInput');
var selectedSize = selector.value + "px";
samples = document.getElementsByClassName("sample");
for (var i = 0; i < samples.length; i++) {
samples[i].style.fontSize = selectedSize;
}
}
function printPrep() {
var header = document.getElementById("header");
header.style.display = "none";
window.print();
header.style.display = "block";
}
function init(reset) {
var sizeInput = document.getElementById("fontSizeInput");
var fontSizeSlider = document.getElementById("fontSizeSlider");
fontSizeSlider.value = 48;
sizeInput.value = 48;
sizeInput.oninput();
}
</script>
</head>
<style>
#header {
top:0;
left:0;
position: fixed;
margin: 0px;
width: 100%;
background: #fff;
}
/*#wrapper {*/
/*white-space: pre;*/
/*}*/
#fontSizeInput {
width: 40px;
}
input[type=range] {
/*removes default webkit styles*/
-webkit-appearance: none;
position: relative;
top: 50%;
transform: perspective(1px) translateY(30%);
/*fix for FF unable to apply focus style bug */
border: 1px solid white;
/*required for proper track sizing in FF*/
width: 400px;
}
input[type=range]::-webkit-slider-runnable-track {
width: 400px;
height: 5px;
background: #ddd;
border: none;
border-radius: 3px;
}
input[type=range]::-webkit-slider-thumb {
-webkit-appearance: none;
border: none;
height: 16px;
width: 16px;
border-radius: 50%;
background: red;
margin-top: -4px;
}
input[type=range]:focus {
outline: none;
}
input[type=range]:focus::-webkit-slider-runnable-track {
background: #ccc;
}
input[type=range]::-moz-range-track {
width: 400px;
height: 5px;
background: #ddd;
border: none;
border-radius: 3px;
}
input[type=range]::-moz-range-thumb {
border: none;
height: 16px;
width: 16px;
border-radius: 50%;
background: red;
}
/*hide the outline behind the border*/
input[type=range]:-moz-focusring{
outline: 1px solid white;
outline-offset: -1px;
}
rt, rp {font-size: 20%;} /* = Webkit value */
@font-face {
font-family: "Noto Sans Warang Citi";
src: url("fonts/NotoSansWarangCiti-Regular.otf");
font-weight: 400;
font-style: normal;
}
ruby {
ruby-position:under;
ruby-align: center;
}
.desc {
font-family: "Noto Sans", sans-serif;
font-size: 48px;
line-height: inherit;
}
.sample
{
font-family: "Noto Sans Warang Citi", "Noto Sans", sans-serif;
font-size: 48px;
line-height: 160%;
font-weight: 400;
font-variant-ligatures: discretionary-ligatures;
moz-font-feature-settings: "dlig";
webkit-font-feature-settings: "dlig";
font-feature-settings: "dlig";
}
</style>
<body onload="init()">
<div id="header" style="
padding-top:8px;
z-index:150;
color: #E4E4E4;
">
<span>
<input id="fontSizeSlider" type="range" name="amountRange" min="6" max="600" value="48" oninput="sliderChange()" />
<input id="fontSizeInput" type="number" name="amountInput" min="6" max="600" value="48" oninput="changeSize()" />
<button onclick="printPrep()">Print</button>
</span>
</div>
<div id="wrapper">
<br><br>
<span class="sample" contenteditable="true" onselect="textSelected()">
Noto Sans Warang Citi Kerning
<br><br>
𑢡𑣀𑢢𑣀𑢣𑣀𑢤𑣀𑢥𑣀𑢦𑣀𑢧𑣀𑢨𑣀𑢩𑣀<br>
𑢪𑣀𑢫𑣀𑢬𑣀𑢭𑣀𑢮𑣀𑢯𑣀𑢰𑣀𑢱𑣀𑢲𑣀𑢳𑣀𑢴𑣀𑢵𑣀𑢶𑣀𑢷𑣀𑢸𑣀𑢹𑣀𑢺𑣀𑢻𑣀𑢼𑣀𑢽𑣀𑢾𑣀𑢿𑣀<br>
𑣁𑣀𑣂𑣀𑣃𑣀𑣄𑣀𑣅𑣀𑣆𑣀𑣇𑣀𑣈𑣀𑣉𑣀<br>
𑣊𑣀𑣋𑣀𑣌𑣀𑣍𑣀𑣎𑣀𑣏𑣀𑣐𑣀𑣑𑣀𑣒𑣀𑣓𑣀𑣔𑣀𑣕𑣀𑣖𑣀𑣗𑣀𑣘𑣀𑣙𑣀𑣚𑣀𑣛𑣀𑣜𑣀𑣝𑣀𑣞𑣀𑣟𑣀<br>
<br>
𑢪𑣔𑢫𑣔𑢬𑣔𑢭𑣔𑢮𑣔𑢯𑣔𑢰𑣔𑢱𑣔𑢲𑣔𑢳𑣔𑢴𑣔𑢵𑣔𑢶𑣔𑢷𑣔𑢸𑣔𑢹𑣔𑢺𑣔𑢻𑣔𑢼𑣔𑢽𑣔𑢾𑣔𑢿𑣔<br>
𑣊𑣔𑣋𑣔𑣌𑣔𑣍𑣔𑣎𑣔𑣏𑣔𑣐𑣔𑣑𑣔𑣒𑣔𑣓𑣔𑣔𑣔𑣕𑣔𑣖𑣔𑣗𑣔𑣘𑣔𑣙𑣔𑣚𑣔𑣛𑣔𑣜𑣔𑣝𑣔𑣞𑣔𑣟𑣔<br>
<br>
𑢪𑣘𑢫𑣘𑢬𑣘𑢭𑣘𑢮𑣘𑢯𑣘𑢰𑣘𑢱𑣘𑢲𑣘𑢳𑣘𑢴𑣘𑢵𑣘𑢶𑣘𑢷𑣘𑢸𑣘𑢹𑣘𑢺𑣘𑢻𑣘𑢼𑣘𑢽𑣘𑢾𑣘𑢿𑣘<br>
𑣊𑣘𑣋𑣘𑣌𑣘𑣍𑣘𑣎𑣘𑣏𑣘𑣐𑣘𑣑𑣘𑣒𑣘𑣓𑣘𑣔𑣘𑣕𑣘𑣖𑣘𑣗𑣘𑣘𑣘𑣙𑣘𑣚𑣘𑣛𑣘𑣜𑣘𑣝𑣘𑣞𑣘𑣟𑣘<br>
𑢪𑣚𑢫𑣚𑢬𑣚𑢭𑣚𑢮𑣚𑢯𑣚𑢰𑣚𑢱𑣚𑢲𑣚𑢳𑣚𑢴𑣚𑢵𑣚𑢶𑣚𑢷𑣚𑢸𑣚𑢹𑣚𑢺𑣚𑢻𑣚𑢼𑣚𑢽𑣚𑢾𑣚𑢿𑣚<br>
𑣊𑣚𑣋𑣚𑣌𑣚𑣍𑣚𑣎𑣚𑣏𑣚𑣐𑣚𑣑𑣚𑣒𑣚𑣓𑣚𑣔𑣚𑣕𑣚𑣖𑣚𑣗𑣚𑣘𑣚𑣙𑣚𑣚𑣚𑣛𑣚𑣜𑣚𑣝𑣚𑣞𑣚𑣟𑣚<br>
𑢪𑣒𑢫𑣒𑢬𑣒𑢭𑣒𑢮𑣒𑢯𑣒𑢰𑣒𑢱𑣒𑢲𑣒𑢳𑣒𑢴𑣒𑢵𑣒𑢶𑣒𑢷𑣒𑢸𑣒𑢹𑣒𑢺𑣒𑢻𑣒𑢼𑣒𑢽𑣒𑢾𑣒𑢿𑣒<br>
𑣊𑣒𑣋𑣒𑣌𑣒𑣍𑣒𑣎𑣒𑣏𑣒𑣐𑣒𑣑𑣒𑣒𑣒𑣓𑣒𑣔𑣒𑣕𑣒𑣖𑣒𑣗𑣒𑣘𑣒𑣙𑣒𑣚𑣒𑣛𑣒𑣜𑣒𑣝𑣒𑣞𑣒𑣟𑣒<br>
𑢪𑣜𑢫𑣜𑢬𑣜𑢭𑣜𑢮𑣜𑢯𑣜𑢰𑣜𑢱𑣜𑢲𑣜𑢳𑣜𑢴𑣜𑢵𑣜𑢶𑣜𑢷𑣜𑢸𑣜𑢹𑣜𑢺𑣜𑢻𑣜𑢼𑣜𑢽𑣜𑢾𑣜𑢿𑣜<br>
𑣊𑣜𑣋𑣜𑣌𑣜𑣍𑣜𑣎𑣜𑣏𑣜𑣐𑣜𑣑𑣜𑣒𑣜𑣓𑣜𑣔𑣜𑣕𑣜𑣖𑣜𑣗𑣜𑣘𑣜𑣙𑣜𑣚𑣜𑣛𑣜𑣜𑣜𑣝𑣜𑣞𑣜𑣟𑣜<br>
<br>
<br>
</span>
</div>
</body>
</html>
| Java |
<?php
namespace Topxia\Service\User\Dao\Impl;
use Topxia\Service\Common\BaseDao;
use Topxia\Service\User\Dao\UserFortuneLogDao;
class UserFortuneLogDaoImpl extends BaseDao implements UserFortuneLogDao
{
protected $table = 'user_fortune_log';
public function addLog(array $log)
{
$affected = $this->getConnection()->insert($this->table, $log);
if ($affected <= 0) {
throw $this->createDaoException('Insert log error');
}
return $this->getLog($this->getConnection()->lastInsertId());
}
public function getLog($id)
{
$sql = "SELECT * FROM {$this->table} WHERE id = ? LIMIT 1";
return $this->getConnection()->fetchAssoc($sql, array($id));
}
} | Java |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.emoji.text;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertSame;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import androidx.test.filters.SdkSuppress;
import androidx.test.filters.SmallTest;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@SmallTest
@RunWith(AndroidJUnit4.class)
@SdkSuppress(minSdkVersion = 19)
public class MetadataRepoTest {
MetadataRepo mMetadataRepo;
@Before
public void clearResourceIndex() {
mMetadataRepo = new MetadataRepo();
}
@Test(expected = NullPointerException.class)
public void testPut_withNullMetadata() {
mMetadataRepo.put(null);
}
@Test(expected = IllegalArgumentException.class)
public void testPut_withEmptyKeys() {
mMetadataRepo.put(new TestEmojiMetadata(new int[0]));
}
@Test
public void testPut_withSingleCodePointMapping() {
final int[] codePoint = new int[]{1};
final TestEmojiMetadata metadata = new TestEmojiMetadata(codePoint);
mMetadataRepo.put(metadata);
assertSame(metadata, getNode(codePoint));
}
@Test
public void testPut_withMultiCodePointsMapping() {
final int[] codePoint = new int[]{1, 2, 3, 4};
final TestEmojiMetadata metadata = new TestEmojiMetadata(codePoint);
mMetadataRepo.put(metadata);
assertSame(metadata, getNode(codePoint));
assertEquals(null, getNode(new int[]{1}));
assertEquals(null, getNode(new int[]{1, 2}));
assertEquals(null, getNode(new int[]{1, 2, 3}));
assertEquals(null, getNode(new int[]{1, 2, 3, 5}));
}
@Test
public void testPut_sequentialCodePoints() {
final int[] codePoint1 = new int[]{1, 2, 3, 4};
final EmojiMetadata metadata1 = new TestEmojiMetadata(codePoint1);
final int[] codePoint2 = new int[]{1, 2, 3};
final EmojiMetadata metadata2 = new TestEmojiMetadata(codePoint2);
final int[] codePoint3 = new int[]{1, 2};
final EmojiMetadata metadata3 = new TestEmojiMetadata(codePoint3);
mMetadataRepo.put(metadata1);
mMetadataRepo.put(metadata2);
mMetadataRepo.put(metadata3);
assertSame(metadata1, getNode(codePoint1));
assertSame(metadata2, getNode(codePoint2));
assertSame(metadata3, getNode(codePoint3));
assertEquals(null, getNode(new int[]{1}));
assertEquals(null, getNode(new int[]{1, 2, 3, 4, 5}));
}
final EmojiMetadata getNode(final int[] codepoints) {
return getNode(mMetadataRepo.getRootNode(), codepoints, 0);
}
final EmojiMetadata getNode(MetadataRepo.Node node, final int[] codepoints, int start) {
if (codepoints.length < start) return null;
if (codepoints.length == start) return node.getData();
final MetadataRepo.Node childNode = node.get(codepoints[start]);
if (childNode == null) return null;
return getNode(childNode, codepoints, start + 1);
}
}
| Java |
/**
* FreeRDP: A Remote Desktop Protocol Implementation
* T.124 Generic Conference Control (GCC)
*
* Copyright 2011 Marc-Andre Moreau <[email protected]>
* Copyright 2014 Norbert Federa <[email protected]>
* Copyright 2014 DI (FH) Martin Haimberger <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <winpr/crt.h>
#include <winpr/crypto.h>
#include <freerdp/log.h>
#include "gcc.h"
#include "certificate.h"
#define TAG FREERDP_TAG("core.gcc")
static BOOL gcc_read_client_cluster_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static BOOL gcc_read_client_core_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static BOOL gcc_read_client_data_blocks(wStream* s, rdpMcs* mcs, int length);
static BOOL gcc_read_server_data_blocks(wStream* s, rdpMcs* mcs, int length);
static BOOL gcc_read_user_data_header(wStream* s, UINT16* type, UINT16* length);
static void gcc_write_user_data_header(wStream* s, UINT16 type, UINT16 length);
static void gcc_write_client_core_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_server_core_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_write_server_core_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_security_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static void gcc_write_client_security_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_server_security_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_write_server_security_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_network_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static void gcc_write_client_network_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_server_network_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_write_server_network_data(wStream* s, rdpMcs* mcs);
static void gcc_write_client_cluster_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_monitor_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static void gcc_write_client_monitor_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_monitor_extended_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static void gcc_write_client_monitor_extended_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_message_channel_data(wStream* s, rdpMcs* mcs, UINT16 blockLength);
static void gcc_write_client_message_channel_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_server_message_channel_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_write_server_message_channel_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_client_multitransport_channel_data(wStream* s, rdpMcs* mcs,
UINT16 blockLength);
static void gcc_write_client_multitransport_channel_data(wStream* s, rdpMcs* mcs);
static BOOL gcc_read_server_multitransport_channel_data(wStream* s, rdpMcs* mcs);
static void gcc_write_server_multitransport_channel_data(wStream* s, rdpMcs* mcs);
static DWORD rdp_version_common(DWORD serverVersion, DWORD clientVersion)
{
DWORD version = MIN(serverVersion, clientVersion);
switch (version)
{
case RDP_VERSION_4:
case RDP_VERSION_5_PLUS:
case RDP_VERSION_10_0:
case RDP_VERSION_10_1:
case RDP_VERSION_10_2:
case RDP_VERSION_10_3:
case RDP_VERSION_10_4:
case RDP_VERSION_10_5:
case RDP_VERSION_10_6:
case RDP_VERSION_10_7:
return version;
default:
WLog_ERR(TAG, "Invalid client [%" PRId32 "] and server [%" PRId32 "] versions",
serverVersion, clientVersion);
return version;
}
}
/**
* T.124 GCC is defined in:
*
* http://www.itu.int/rec/T-REC-T.124-199802-S/en
* ITU-T T.124 (02/98): Generic Conference Control
*/
/**
* ConnectData ::= SEQUENCE
* {
* t124Identifier Key,
* connectPDU OCTET_STRING
* }
*
* Key ::= CHOICE
* {
* object OBJECT_IDENTIFIER,
* h221NonStandard H221NonStandardIdentifier
* }
*
* ConnectGCCPDU ::= CHOICE
* {
* conferenceCreateRequest ConferenceCreateRequest,
* conferenceCreateResponse ConferenceCreateResponse,
* conferenceQueryRequest ConferenceQueryRequest,
* conferenceQueryResponse ConferenceQueryResponse,
* conferenceJoinRequest ConferenceJoinRequest,
* conferenceJoinResponse ConferenceJoinResponse,
* conferenceInviteRequest ConferenceInviteRequest,
* conferenceInviteResponse ConferenceInviteResponse,
* ...
* }
*
* ConferenceCreateRequest ::= SEQUENCE
* {
* conferenceName ConferenceName,
* convenerPassword Password OPTIONAL,
* password Password OPTIONAL,
* lockedConference BOOLEAN,
* listedConference BOOLEAN,
* conductibleConference BOOLEAN,
* terminationMethod TerminationMethod,
* conductorPrivileges SET OF Privilege OPTIONAL,
* conductedPrivileges SET OF Privilege OPTIONAL,
* nonConductedPrivileges SET OF Privilege OPTIONAL,
* conferenceDescription TextString OPTIONAL,
* callerIdentifier TextString OPTIONAL,
* userData UserData OPTIONAL,
* ...,
* conferencePriority ConferencePriority OPTIONAL,
* conferenceMode ConferenceMode OPTIONAL
* }
*
* ConferenceCreateResponse ::= SEQUENCE
* {
* nodeID UserID,
* tag INTEGER,
* result ENUMERATED
* {
* success (0),
* userRejected (1),
* resourcesNotAvailable (2),
* rejectedForSymmetryBreaking (3),
* lockedConferenceNotSupported (4)
* },
* userData UserData OPTIONAL,
* ...
* }
*
* ConferenceName ::= SEQUENCE
* {
* numeric SimpleNumericString
* text SimpleTextString OPTIONAL,
* ...
* }
*
* SimpleNumericString ::= NumericString (SIZE (1..255)) (FROM ("0123456789"))
*
* UserData ::= SET OF SEQUENCE
* {
* key Key,
* value OCTET_STRING OPTIONAL
* }
*
* H221NonStandardIdentifier ::= OCTET STRING (SIZE (4..255))
*
* UserID ::= DynamicChannelID
*
* ChannelID ::= INTEGER (1..65535)
* StaticChannelID ::= INTEGER (1..1000)
* DynamicChannelID ::= INTEGER (1001..65535)
*
*/
/*
* OID = 0.0.20.124.0.1
* { itu-t(0) recommendation(0) t(20) t124(124) version(0) 1 }
* v.1 of ITU-T Recommendation T.124 (Feb 1998): "Generic Conference Control"
*/
BYTE t124_02_98_oid[6] = { 0, 0, 20, 124, 0, 1 };
BYTE h221_cs_key[4] = "Duca";
BYTE h221_sc_key[4] = "McDn";
/**
* Read a GCC Conference Create Request.\n
* @msdn{cc240836}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_conference_create_request(wStream* s, rdpMcs* mcs)
{
UINT16 length;
BYTE choice;
BYTE number;
BYTE selection;
/* ConnectData */
if (!per_read_choice(s, &choice))
return FALSE;
if (!per_read_object_identifier(s, t124_02_98_oid))
return FALSE;
/* ConnectData::connectPDU (OCTET_STRING) */
if (!per_read_length(s, &length))
return FALSE;
/* ConnectGCCPDU */
if (!per_read_choice(s, &choice))
return FALSE;
if (!per_read_selection(s, &selection))
return FALSE;
/* ConferenceCreateRequest::conferenceName */
if (!per_read_numeric_string(s, 1)) /* ConferenceName::numeric */
return FALSE;
if (!per_read_padding(s, 1)) /* padding */
return FALSE;
/* UserData (SET OF SEQUENCE) */
if (!per_read_number_of_sets(s, &number) || number != 1) /* one set of UserData */
return FALSE;
if (!per_read_choice(s, &choice) ||
choice != 0xC0) /* UserData::value present + select h221NonStandard (1) */
return FALSE;
/* h221NonStandard */
if (!per_read_octet_string(s, h221_cs_key, 4,
4)) /* h221NonStandard, client-to-server H.221 key, "Duca" */
return FALSE;
/* userData::value (OCTET_STRING) */
if (!per_read_length(s, &length))
return FALSE;
if (Stream_GetRemainingLength(s) < length)
return FALSE;
if (!gcc_read_client_data_blocks(s, mcs, length))
return FALSE;
return TRUE;
}
/**
* Write a GCC Conference Create Request.\n
* @msdn{cc240836}
* @param s stream
* @param user_data client data blocks
*/
void gcc_write_conference_create_request(wStream* s, wStream* userData)
{
/* ConnectData */
per_write_choice(s, 0); /* From Key select object (0) of type OBJECT_IDENTIFIER */
per_write_object_identifier(s, t124_02_98_oid); /* ITU-T T.124 (02/98) OBJECT_IDENTIFIER */
/* ConnectData::connectPDU (OCTET_STRING) */
per_write_length(s, Stream_GetPosition(userData) + 14); /* connectPDU length */
/* ConnectGCCPDU */
per_write_choice(s, 0); /* From ConnectGCCPDU select conferenceCreateRequest (0) of type
ConferenceCreateRequest */
per_write_selection(s, 0x08); /* select optional userData from ConferenceCreateRequest */
/* ConferenceCreateRequest::conferenceName */
per_write_numeric_string(s, (BYTE*)"1", 1, 1); /* ConferenceName::numeric */
per_write_padding(s, 1); /* padding */
/* UserData (SET OF SEQUENCE) */
per_write_number_of_sets(s, 1); /* one set of UserData */
per_write_choice(s, 0xC0); /* UserData::value present + select h221NonStandard (1) */
/* h221NonStandard */
per_write_octet_string(s, h221_cs_key, 4,
4); /* h221NonStandard, client-to-server H.221 key, "Duca" */
/* userData::value (OCTET_STRING) */
per_write_octet_string(s, Stream_Buffer(userData), Stream_GetPosition(userData),
0); /* array of client data blocks */
}
BOOL gcc_read_conference_create_response(wStream* s, rdpMcs* mcs)
{
UINT16 length;
UINT32 tag;
UINT16 nodeID;
BYTE result;
BYTE choice;
BYTE number;
/* ConnectData */
if (!per_read_choice(s, &choice) || !per_read_object_identifier(s, t124_02_98_oid))
return FALSE;
/* ConnectData::connectPDU (OCTET_STRING) */
if (!per_read_length(s, &length))
return FALSE;
/* ConnectGCCPDU */
if (!per_read_choice(s, &choice))
return FALSE;
/* ConferenceCreateResponse::nodeID (UserID) */
if (!per_read_integer16(s, &nodeID, 1001))
return FALSE;
/* ConferenceCreateResponse::tag (INTEGER) */
if (!per_read_integer(s, &tag))
return FALSE;
/* ConferenceCreateResponse::result (ENUMERATED) */
if (!per_read_enumerated(s, &result, MCS_Result_enum_length))
return FALSE;
/* number of UserData sets */
if (!per_read_number_of_sets(s, &number))
return FALSE;
/* UserData::value present + select h221NonStandard (1) */
if (!per_read_choice(s, &choice))
return FALSE;
/* h221NonStandard */
if (!per_read_octet_string(s, h221_sc_key, 4,
4)) /* h221NonStandard, server-to-client H.221 key, "McDn" */
return FALSE;
/* userData (OCTET_STRING) */
if (!per_read_length(s, &length))
return FALSE;
if (!gcc_read_server_data_blocks(s, mcs, length))
{
WLog_ERR(TAG, "gcc_read_conference_create_response: gcc_read_server_data_blocks failed");
return FALSE;
}
return TRUE;
}
void gcc_write_conference_create_response(wStream* s, wStream* userData)
{
/* ConnectData */
per_write_choice(s, 0);
per_write_object_identifier(s, t124_02_98_oid);
/* ConnectData::connectPDU (OCTET_STRING) */
/* This length MUST be ignored by the client according to [MS-RDPBCGR] */
per_write_length(s, 0x2A);
/* ConnectGCCPDU */
per_write_choice(s, 0x14);
/* ConferenceCreateResponse::nodeID (UserID) */
per_write_integer16(s, 0x79F3, 1001);
/* ConferenceCreateResponse::tag (INTEGER) */
per_write_integer(s, 1);
/* ConferenceCreateResponse::result (ENUMERATED) */
per_write_enumerated(s, 0, MCS_Result_enum_length);
/* number of UserData sets */
per_write_number_of_sets(s, 1);
/* UserData::value present + select h221NonStandard (1) */
per_write_choice(s, 0xC0);
/* h221NonStandard */
per_write_octet_string(s, h221_sc_key, 4,
4); /* h221NonStandard, server-to-client H.221 key, "McDn" */
/* userData (OCTET_STRING) */
per_write_octet_string(s, Stream_Buffer(userData), Stream_GetPosition(userData),
0); /* array of server data blocks */
}
BOOL gcc_read_client_data_blocks(wStream* s, rdpMcs* mcs, int length)
{
UINT16 type;
UINT16 blockLength;
size_t begPos, endPos;
while (length > 0)
{
begPos = Stream_GetPosition(s);
if (!gcc_read_user_data_header(s, &type, &blockLength))
return FALSE;
if (Stream_GetRemainingLength(s) < (size_t)(blockLength - 4))
return FALSE;
switch (type)
{
case CS_CORE:
if (!gcc_read_client_core_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_SECURITY:
if (!gcc_read_client_security_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_NET:
if (!gcc_read_client_network_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_CLUSTER:
if (!gcc_read_client_cluster_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_MONITOR:
if (!gcc_read_client_monitor_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_MCS_MSGCHANNEL:
if (!gcc_read_client_message_channel_data(s, mcs, blockLength - 4))
return FALSE;
break;
case CS_MONITOR_EX:
if (!gcc_read_client_monitor_extended_data(s, mcs, blockLength - 4))
return FALSE;
break;
case 0xC009:
case CS_MULTITRANSPORT:
if (!gcc_read_client_multitransport_channel_data(s, mcs, blockLength - 4))
return FALSE;
break;
default:
WLog_ERR(TAG, "Unknown GCC client data block: 0x%04" PRIX16 "", type);
Stream_Seek(s, blockLength - 4);
break;
}
endPos = Stream_GetPosition(s);
if (endPos != (begPos + blockLength))
{
WLog_ERR(TAG,
"Error parsing GCC client data block 0x%04" PRIX16
": Actual Offset: %d Expected Offset: %d",
type, endPos, begPos + blockLength);
}
length -= blockLength;
Stream_SetPosition(s, begPos + blockLength);
}
return TRUE;
}
void gcc_write_client_data_blocks(wStream* s, rdpMcs* mcs)
{
rdpSettings* settings = mcs->settings;
gcc_write_client_core_data(s, mcs);
gcc_write_client_cluster_data(s, mcs);
gcc_write_client_security_data(s, mcs);
gcc_write_client_network_data(s, mcs);
/* extended client data supported */
if (settings->NegotiationFlags & EXTENDED_CLIENT_DATA_SUPPORTED)
{
if (settings->UseMultimon && !settings->SpanMonitors)
{
gcc_write_client_monitor_data(s, mcs);
gcc_write_client_monitor_extended_data(s, mcs);
}
gcc_write_client_message_channel_data(s, mcs);
gcc_write_client_multitransport_channel_data(s, mcs);
}
else
{
if (settings->UseMultimon && !settings->SpanMonitors)
{
WLog_ERR(TAG, "WARNING: true multi monitor support was not advertised by server!");
if (settings->ForceMultimon)
{
WLog_ERR(TAG, "Sending multi monitor information anyway (may break connectivity!)");
gcc_write_client_monitor_data(s, mcs);
gcc_write_client_monitor_extended_data(s, mcs);
}
else
{
WLog_ERR(TAG, "Use /multimon:force to force sending multi monitor information");
}
}
}
}
BOOL gcc_read_server_data_blocks(wStream* s, rdpMcs* mcs, int length)
{
UINT16 type;
UINT16 offset = 0;
UINT16 blockLength;
BYTE* holdp;
while (offset < length)
{
holdp = Stream_Pointer(s);
if (!gcc_read_user_data_header(s, &type, &blockLength))
{
WLog_ERR(TAG, "gcc_read_server_data_blocks: gcc_read_user_data_header failed");
return FALSE;
}
switch (type)
{
case SC_CORE:
if (!gcc_read_server_core_data(s, mcs))
{
WLog_ERR(TAG, "gcc_read_server_data_blocks: gcc_read_server_core_data failed");
return FALSE;
}
break;
case SC_SECURITY:
if (!gcc_read_server_security_data(s, mcs))
{
WLog_ERR(TAG,
"gcc_read_server_data_blocks: gcc_read_server_security_data failed");
return FALSE;
}
break;
case SC_NET:
if (!gcc_read_server_network_data(s, mcs))
{
WLog_ERR(TAG,
"gcc_read_server_data_blocks: gcc_read_server_network_data failed");
return FALSE;
}
break;
case SC_MCS_MSGCHANNEL:
if (!gcc_read_server_message_channel_data(s, mcs))
{
WLog_ERR(
TAG,
"gcc_read_server_data_blocks: gcc_read_server_message_channel_data failed");
return FALSE;
}
break;
case SC_MULTITRANSPORT:
if (!gcc_read_server_multitransport_channel_data(s, mcs))
{
WLog_ERR(TAG, "gcc_read_server_data_blocks: "
"gcc_read_server_multitransport_channel_data failed");
return FALSE;
}
break;
default:
WLog_ERR(TAG, "gcc_read_server_data_blocks: ignoring type=%" PRIu16 "", type);
break;
}
offset += blockLength;
Stream_SetPointer(s, holdp + blockLength);
}
return TRUE;
}
BOOL gcc_write_server_data_blocks(wStream* s, rdpMcs* mcs)
{
return gcc_write_server_core_data(s, mcs) && /* serverCoreData */
gcc_write_server_network_data(s, mcs) && /* serverNetworkData */
gcc_write_server_security_data(s, mcs) && /* serverSecurityData */
gcc_write_server_message_channel_data(s, mcs); /* serverMessageChannelData */
/* TODO: Send these GCC data blocks only when the client sent them */
// gcc_write_server_multitransport_channel_data(s, settings); /* serverMultitransportChannelData
// */
}
BOOL gcc_read_user_data_header(wStream* s, UINT16* type, UINT16* length)
{
if (Stream_GetRemainingLength(s) < 4)
return FALSE;
Stream_Read_UINT16(s, *type); /* type */
Stream_Read_UINT16(s, *length); /* length */
if (Stream_GetRemainingLength(s) < (size_t)(*length - 4))
return FALSE;
return TRUE;
}
/**
* Write a user data header (TS_UD_HEADER).\n
* @msdn{cc240509}
* @param s stream
* @param type data block type
* @param length data block length
*/
void gcc_write_user_data_header(wStream* s, UINT16 type, UINT16 length)
{
Stream_Write_UINT16(s, type); /* type */
Stream_Write_UINT16(s, length); /* length */
}
/**
* Read a client core data block (TS_UD_CS_CORE).\n
* @msdn{cc240510}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_core_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
char* str = NULL;
UINT32 version;
BYTE connectionType = 0;
UINT32 clientColorDepth;
UINT16 colorDepth = 0;
UINT16 postBeta2ColorDepth = 0;
UINT16 highColorDepth = 0;
UINT16 supportedColorDepths = 0;
UINT32 serverSelectedProtocol = 0;
UINT16 earlyCapabilityFlags = 0;
rdpSettings* settings = mcs->settings;
/* Length of all required fields, until imeFileName */
if (blockLength < 128)
return FALSE;
Stream_Read_UINT32(s, version); /* version (4 bytes) */
settings->RdpVersion = rdp_version_common(version, settings->RdpVersion);
Stream_Read_UINT16(s, settings->DesktopWidth); /* DesktopWidth (2 bytes) */
Stream_Read_UINT16(s, settings->DesktopHeight); /* DesktopHeight (2 bytes) */
Stream_Read_UINT16(s, colorDepth); /* ColorDepth (2 bytes) */
Stream_Seek_UINT16(s); /* SASSequence (Secure Access Sequence) (2 bytes) */
Stream_Read_UINT32(s, settings->KeyboardLayout); /* KeyboardLayout (4 bytes) */
Stream_Read_UINT32(s, settings->ClientBuild); /* ClientBuild (4 bytes) */
/* clientName (32 bytes, null-terminated unicode, truncated to 15 characters) */
if (ConvertFromUnicode(CP_UTF8, 0, (WCHAR*)Stream_Pointer(s), 32 / 2, &str, 0, NULL, NULL) < 1)
{
WLog_ERR(TAG, "failed to convert client host name");
return FALSE;
}
Stream_Seek(s, 32);
free(settings->ClientHostname);
settings->ClientHostname = str;
str = NULL;
Stream_Read_UINT32(s, settings->KeyboardType); /* KeyboardType (4 bytes) */
Stream_Read_UINT32(s, settings->KeyboardSubType); /* KeyboardSubType (4 bytes) */
Stream_Read_UINT32(s, settings->KeyboardFunctionKey); /* KeyboardFunctionKey (4 bytes) */
Stream_Seek(s, 64); /* imeFileName (64 bytes) */
blockLength -= 128;
/**
* The following fields are all optional. If one field is present, all of the preceding
* fields MUST also be present. If one field is not present, all of the subsequent fields
* MUST NOT be present.
* We must check the bytes left before reading each field.
*/
do
{
if (blockLength < 2)
break;
Stream_Read_UINT16(s, postBeta2ColorDepth); /* postBeta2ColorDepth (2 bytes) */
blockLength -= 2;
if (blockLength < 2)
break;
Stream_Seek_UINT16(s); /* clientProductID (2 bytes) */
blockLength -= 2;
if (blockLength < 4)
break;
Stream_Seek_UINT32(s); /* serialNumber (4 bytes) */
blockLength -= 4;
if (blockLength < 2)
break;
Stream_Read_UINT16(s, highColorDepth); /* highColorDepth (2 bytes) */
blockLength -= 2;
if (blockLength < 2)
break;
Stream_Read_UINT16(s, supportedColorDepths); /* supportedColorDepths (2 bytes) */
blockLength -= 2;
if (blockLength < 2)
break;
Stream_Read_UINT16(s, earlyCapabilityFlags); /* earlyCapabilityFlags (2 bytes) */
settings->EarlyCapabilityFlags = (UINT32)earlyCapabilityFlags;
blockLength -= 2;
/* clientDigProductId (64 bytes): Contains a value that uniquely identifies the client */
if (blockLength < 64)
break;
if (ConvertFromUnicode(CP_UTF8, 0, (WCHAR*)Stream_Pointer(s), 64 / 2, &str, 0, NULL, NULL) <
1)
{
WLog_ERR(TAG, "failed to convert the client product identifier");
return FALSE;
}
Stream_Seek(s, 64); /* clientDigProductId (64 bytes) */
free(settings->ClientProductId);
settings->ClientProductId = str;
blockLength -= 64;
if (blockLength < 1)
break;
Stream_Read_UINT8(s, connectionType); /* connectionType (1 byte) */
blockLength -= 1;
if (blockLength < 1)
break;
Stream_Seek_UINT8(s); /* pad1octet (1 byte) */
blockLength -= 1;
if (blockLength < 4)
break;
Stream_Read_UINT32(s, serverSelectedProtocol); /* serverSelectedProtocol (4 bytes) */
blockLength -= 4;
if (blockLength < 4)
break;
Stream_Read_UINT32(s, settings->DesktopPhysicalWidth); /* desktopPhysicalWidth (4 bytes) */
blockLength -= 4;
if (blockLength < 4)
break;
Stream_Read_UINT32(s,
settings->DesktopPhysicalHeight); /* desktopPhysicalHeight (4 bytes) */
blockLength -= 4;
if (blockLength < 2)
break;
Stream_Read_UINT16(s, settings->DesktopOrientation); /* desktopOrientation (2 bytes) */
blockLength -= 2;
if (blockLength < 4)
break;
Stream_Read_UINT32(s, settings->DesktopScaleFactor); /* desktopScaleFactor (4 bytes) */
blockLength -= 4;
if (blockLength < 4)
break;
Stream_Read_UINT32(s, settings->DeviceScaleFactor); /* deviceScaleFactor (4 bytes) */
if (settings->SelectedProtocol != serverSelectedProtocol)
return FALSE;
} while (0);
if (highColorDepth > 0)
{
if (earlyCapabilityFlags & RNS_UD_CS_WANT_32BPP_SESSION)
clientColorDepth = 32;
else
clientColorDepth = highColorDepth;
}
else if (postBeta2ColorDepth > 0)
{
switch (postBeta2ColorDepth)
{
case RNS_UD_COLOR_4BPP:
clientColorDepth = 4;
break;
case RNS_UD_COLOR_8BPP:
clientColorDepth = 8;
break;
case RNS_UD_COLOR_16BPP_555:
clientColorDepth = 15;
break;
case RNS_UD_COLOR_16BPP_565:
clientColorDepth = 16;
break;
case RNS_UD_COLOR_24BPP:
clientColorDepth = 24;
break;
default:
return FALSE;
}
}
else
{
switch (colorDepth)
{
case RNS_UD_COLOR_4BPP:
clientColorDepth = 4;
break;
case RNS_UD_COLOR_8BPP:
clientColorDepth = 8;
break;
default:
return FALSE;
}
}
/*
* If we are in server mode, accept client's color depth only if
* it is smaller than ours. This is what Windows server does.
*/
if ((clientColorDepth < settings->ColorDepth) || !settings->ServerMode)
settings->ColorDepth = clientColorDepth;
if (settings->NetworkAutoDetect)
settings->NetworkAutoDetect =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_NETWORK_AUTODETECT) ? TRUE : FALSE;
if (settings->SupportHeartbeatPdu)
settings->SupportHeartbeatPdu =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_HEARTBEAT_PDU) ? TRUE : FALSE;
if (settings->SupportGraphicsPipeline)
settings->SupportGraphicsPipeline =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_DYNVC_GFX_PROTOCOL) ? TRUE : FALSE;
if (settings->SupportDynamicTimeZone)
settings->SupportDynamicTimeZone =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_DYNAMIC_TIME_ZONE) ? TRUE : FALSE;
if (settings->SupportMonitorLayoutPdu)
settings->SupportMonitorLayoutPdu =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_MONITOR_LAYOUT_PDU) ? TRUE : FALSE;
if (settings->SupportStatusInfoPdu)
settings->SupportStatusInfoPdu =
(earlyCapabilityFlags & RNS_UD_CS_SUPPORT_STATUSINFO_PDU) ? TRUE : FALSE;
if (!(earlyCapabilityFlags & RNS_UD_CS_VALID_CONNECTION_TYPE))
connectionType = 0;
settings->SupportErrorInfoPdu = earlyCapabilityFlags & RNS_UD_CS_SUPPORT_ERRINFO_PDU;
settings->ConnectionType = connectionType;
return TRUE;
}
/**
* Write a client core data block (TS_UD_CS_CORE).\n
* @msdn{cc240510}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_core_data(wStream* s, rdpMcs* mcs)
{
WCHAR* clientName = NULL;
int clientNameLength;
BYTE connectionType;
UINT16 highColorDepth;
UINT16 supportedColorDepths;
UINT16 earlyCapabilityFlags;
WCHAR* clientDigProductId = NULL;
int clientDigProductIdLength;
rdpSettings* settings = mcs->settings;
gcc_write_user_data_header(s, CS_CORE, 234);
clientNameLength = ConvertToUnicode(CP_UTF8, 0, settings->ClientHostname, -1, &clientName, 0);
clientDigProductIdLength =
ConvertToUnicode(CP_UTF8, 0, settings->ClientProductId, -1, &clientDigProductId, 0);
Stream_Write_UINT32(s, settings->RdpVersion); /* Version */
Stream_Write_UINT16(s, settings->DesktopWidth); /* DesktopWidth */
Stream_Write_UINT16(s, settings->DesktopHeight); /* DesktopHeight */
Stream_Write_UINT16(s,
RNS_UD_COLOR_8BPP); /* ColorDepth, ignored because of postBeta2ColorDepth */
Stream_Write_UINT16(s, RNS_UD_SAS_DEL); /* SASSequence (Secure Access Sequence) */
Stream_Write_UINT32(s, settings->KeyboardLayout); /* KeyboardLayout */
Stream_Write_UINT32(s, settings->ClientBuild); /* ClientBuild */
/* clientName (32 bytes, null-terminated unicode, truncated to 15 characters) */
if (clientNameLength >= 16)
{
clientNameLength = 16;
clientName[clientNameLength - 1] = 0;
}
Stream_Write(s, clientName, (clientNameLength * 2));
Stream_Zero(s, 32 - (clientNameLength * 2));
free(clientName);
Stream_Write_UINT32(s, settings->KeyboardType); /* KeyboardType */
Stream_Write_UINT32(s, settings->KeyboardSubType); /* KeyboardSubType */
Stream_Write_UINT32(s, settings->KeyboardFunctionKey); /* KeyboardFunctionKey */
Stream_Zero(s, 64); /* imeFileName */
Stream_Write_UINT16(s, RNS_UD_COLOR_8BPP); /* postBeta2ColorDepth */
Stream_Write_UINT16(s, 1); /* clientProductID */
Stream_Write_UINT32(s, 0); /* serialNumber (should be initialized to 0) */
highColorDepth = MIN(settings->ColorDepth, 24);
supportedColorDepths = RNS_UD_24BPP_SUPPORT | RNS_UD_16BPP_SUPPORT | RNS_UD_15BPP_SUPPORT;
earlyCapabilityFlags = RNS_UD_CS_SUPPORT_ERRINFO_PDU;
if (settings->NetworkAutoDetect)
settings->ConnectionType = CONNECTION_TYPE_AUTODETECT;
if (settings->RemoteFxCodec && !settings->NetworkAutoDetect)
settings->ConnectionType = CONNECTION_TYPE_LAN;
connectionType = settings->ConnectionType;
if (connectionType)
earlyCapabilityFlags |= RNS_UD_CS_VALID_CONNECTION_TYPE;
if (settings->ColorDepth == 32)
{
supportedColorDepths |= RNS_UD_32BPP_SUPPORT;
earlyCapabilityFlags |= RNS_UD_CS_WANT_32BPP_SESSION;
}
if (settings->NetworkAutoDetect)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_NETWORK_AUTODETECT;
if (settings->SupportHeartbeatPdu)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_HEARTBEAT_PDU;
if (settings->SupportGraphicsPipeline)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_DYNVC_GFX_PROTOCOL;
if (settings->SupportDynamicTimeZone)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_DYNAMIC_TIME_ZONE;
if (settings->SupportMonitorLayoutPdu)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_MONITOR_LAYOUT_PDU;
if (settings->SupportStatusInfoPdu)
earlyCapabilityFlags |= RNS_UD_CS_SUPPORT_STATUSINFO_PDU;
Stream_Write_UINT16(s, highColorDepth); /* highColorDepth */
Stream_Write_UINT16(s, supportedColorDepths); /* supportedColorDepths */
Stream_Write_UINT16(s, earlyCapabilityFlags); /* earlyCapabilityFlags */
/* clientDigProductId (64 bytes, null-terminated unicode, truncated to 31 characters) */
if (clientDigProductIdLength >= 32)
{
clientDigProductIdLength = 32;
clientDigProductId[clientDigProductIdLength - 1] = 0;
}
Stream_Write(s, clientDigProductId, (clientDigProductIdLength * 2));
Stream_Zero(s, 64 - (clientDigProductIdLength * 2));
free(clientDigProductId);
Stream_Write_UINT8(s, connectionType); /* connectionType */
Stream_Write_UINT8(s, 0); /* pad1octet */
Stream_Write_UINT32(s, settings->SelectedProtocol); /* serverSelectedProtocol */
Stream_Write_UINT32(s, settings->DesktopPhysicalWidth); /* desktopPhysicalWidth */
Stream_Write_UINT32(s, settings->DesktopPhysicalHeight); /* desktopPhysicalHeight */
Stream_Write_UINT16(s, settings->DesktopOrientation); /* desktopOrientation */
Stream_Write_UINT32(s, settings->DesktopScaleFactor); /* desktopScaleFactor */
Stream_Write_UINT32(s, settings->DeviceScaleFactor); /* deviceScaleFactor */
}
BOOL gcc_read_server_core_data(wStream* s, rdpMcs* mcs)
{
UINT32 serverVersion;
UINT32 clientRequestedProtocols;
UINT32 earlyCapabilityFlags;
rdpSettings* settings = mcs->settings;
if (Stream_GetRemainingLength(s) < 4)
return FALSE;
Stream_Read_UINT32(s, serverVersion); /* version */
settings->RdpVersion = rdp_version_common(serverVersion, settings->RdpVersion);
if (Stream_GetRemainingLength(s) >= 4)
{
Stream_Read_UINT32(s, clientRequestedProtocols); /* clientRequestedProtocols */
}
if (Stream_GetRemainingLength(s) >= 4)
{
Stream_Read_UINT32(s, earlyCapabilityFlags); /* earlyCapabilityFlags */
}
return TRUE;
}
BOOL gcc_write_server_core_data(wStream* s, rdpMcs* mcs)
{
UINT32 earlyCapabilityFlags = 0;
rdpSettings* settings = mcs->settings;
if (!Stream_EnsureRemainingCapacity(s, 20))
return FALSE;
gcc_write_user_data_header(s, SC_CORE, 16);
if (settings->SupportDynamicTimeZone)
earlyCapabilityFlags |= RNS_UD_SC_DYNAMIC_DST_SUPPORTED;
Stream_Write_UINT32(s, settings->RdpVersion); /* version (4 bytes) */
Stream_Write_UINT32(s, settings->RequestedProtocols); /* clientRequestedProtocols (4 bytes) */
Stream_Write_UINT32(s, earlyCapabilityFlags); /* earlyCapabilityFlags (4 bytes) */
return TRUE;
}
/**
* Read a client security data block (TS_UD_CS_SEC).\n
* @msdn{cc240511}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_security_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
rdpSettings* settings = mcs->settings;
if (blockLength < 8)
return FALSE;
if (settings->UseRdpSecurityLayer)
{
Stream_Read_UINT32(s, settings->EncryptionMethods); /* encryptionMethods */
if (settings->EncryptionMethods == 0)
Stream_Read_UINT32(s, settings->EncryptionMethods); /* extEncryptionMethods */
else
Stream_Seek(s, 4);
}
else
{
Stream_Seek(s, 8);
}
return TRUE;
}
/**
* Write a client security data block (TS_UD_CS_SEC).\n
* @msdn{cc240511}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_security_data(wStream* s, rdpMcs* mcs)
{
rdpSettings* settings = mcs->settings;
gcc_write_user_data_header(s, CS_SECURITY, 12);
if (settings->UseRdpSecurityLayer)
{
Stream_Write_UINT32(s, settings->EncryptionMethods); /* encryptionMethods */
Stream_Write_UINT32(s, 0); /* extEncryptionMethods */
}
else
{
/* French locale, disable encryption */
Stream_Write_UINT32(s, 0); /* encryptionMethods */
Stream_Write_UINT32(s, settings->EncryptionMethods); /* extEncryptionMethods */
}
}
BOOL gcc_read_server_security_data(wStream* s, rdpMcs* mcs)
{
BYTE* data;
UINT32 length;
rdpSettings* settings = mcs->settings;
BOOL validCryptoConfig = FALSE;
UINT32 serverEncryptionMethod;
if (Stream_GetRemainingLength(s) < 8)
return FALSE;
Stream_Read_UINT32(s, serverEncryptionMethod); /* encryptionMethod */
Stream_Read_UINT32(s, settings->EncryptionLevel); /* encryptionLevel */
/* Only accept valid/known encryption methods */
switch (serverEncryptionMethod)
{
case ENCRYPTION_METHOD_NONE:
WLog_DBG(TAG, "Server rdp encryption method: NONE");
break;
case ENCRYPTION_METHOD_40BIT:
WLog_DBG(TAG, "Server rdp encryption method: 40BIT");
break;
case ENCRYPTION_METHOD_56BIT:
WLog_DBG(TAG, "Server rdp encryption method: 56BIT");
break;
case ENCRYPTION_METHOD_128BIT:
WLog_DBG(TAG, "Server rdp encryption method: 128BIT");
break;
case ENCRYPTION_METHOD_FIPS:
WLog_DBG(TAG, "Server rdp encryption method: FIPS");
break;
default:
WLog_ERR(TAG, "Received unknown encryption method %08" PRIX32 "",
serverEncryptionMethod);
return FALSE;
}
if (settings->UseRdpSecurityLayer && !(settings->EncryptionMethods & serverEncryptionMethod))
{
WLog_WARN(TAG, "Server uses non-advertised encryption method 0x%08" PRIX32 "",
serverEncryptionMethod);
/* FIXME: Should we return FALSE; in this case ?? */
}
settings->EncryptionMethods = serverEncryptionMethod;
/* Verify encryption level/method combinations according to MS-RDPBCGR Section 5.3.2 */
switch (settings->EncryptionLevel)
{
case ENCRYPTION_LEVEL_NONE:
if (settings->EncryptionMethods == ENCRYPTION_METHOD_NONE)
{
validCryptoConfig = TRUE;
}
break;
case ENCRYPTION_LEVEL_FIPS:
if (settings->EncryptionMethods == ENCRYPTION_METHOD_FIPS)
{
validCryptoConfig = TRUE;
}
break;
case ENCRYPTION_LEVEL_LOW:
case ENCRYPTION_LEVEL_HIGH:
case ENCRYPTION_LEVEL_CLIENT_COMPATIBLE:
if (settings->EncryptionMethods == ENCRYPTION_METHOD_40BIT ||
settings->EncryptionMethods == ENCRYPTION_METHOD_56BIT ||
settings->EncryptionMethods == ENCRYPTION_METHOD_128BIT ||
settings->EncryptionMethods == ENCRYPTION_METHOD_FIPS)
{
validCryptoConfig = TRUE;
}
break;
default:
WLog_ERR(TAG, "Received unknown encryption level 0x%08" PRIX32 "",
settings->EncryptionLevel);
}
if (!validCryptoConfig)
{
WLog_ERR(TAG,
"Received invalid cryptographic configuration (level=0x%08" PRIX32
" method=0x%08" PRIX32 ")",
settings->EncryptionLevel, settings->EncryptionMethods);
return FALSE;
}
if (settings->EncryptionLevel == ENCRYPTION_LEVEL_NONE)
{
/* serverRandomLen and serverCertLen must not be present */
settings->UseRdpSecurityLayer = FALSE;
return TRUE;
}
if (Stream_GetRemainingLength(s) < 8)
return FALSE;
Stream_Read_UINT32(s, settings->ServerRandomLength); /* serverRandomLen */
Stream_Read_UINT32(s, settings->ServerCertificateLength); /* serverCertLen */
if ((settings->ServerRandomLength == 0) || (settings->ServerCertificateLength == 0))
return FALSE;
if (Stream_GetRemainingLength(s) < settings->ServerRandomLength)
return FALSE;
/* serverRandom */
settings->ServerRandom = (BYTE*)malloc(settings->ServerRandomLength);
if (!settings->ServerRandom)
goto fail;
Stream_Read(s, settings->ServerRandom, settings->ServerRandomLength);
if (Stream_GetRemainingLength(s) < settings->ServerCertificateLength)
goto fail;
/* serverCertificate */
settings->ServerCertificate = (BYTE*)malloc(settings->ServerCertificateLength);
if (!settings->ServerCertificate)
goto fail;
Stream_Read(s, settings->ServerCertificate, settings->ServerCertificateLength);
certificate_free(settings->RdpServerCertificate);
settings->RdpServerCertificate = certificate_new();
if (!settings->RdpServerCertificate)
goto fail;
data = settings->ServerCertificate;
length = settings->ServerCertificateLength;
if (!certificate_read_server_certificate(settings->RdpServerCertificate, data, length))
goto fail;
return TRUE;
fail:
free(settings->ServerRandom);
free(settings->ServerCertificate);
settings->ServerRandom = NULL;
settings->ServerCertificate = NULL;
return FALSE;
}
static const BYTE initial_signature[] = {
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0x00, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0x01
};
/*
* Terminal Services Signing Keys.
* Yes, Terminal Services Private Key is publicly available.
*/
const BYTE tssk_modulus[] = { 0x3d, 0x3a, 0x5e, 0xbd, 0x72, 0x43, 0x3e, 0xc9, 0x4d, 0xbb, 0xc1,
0x1e, 0x4a, 0xba, 0x5f, 0xcb, 0x3e, 0x88, 0x20, 0x87, 0xef, 0xf5,
0xc1, 0xe2, 0xd7, 0xb7, 0x6b, 0x9a, 0xf2, 0x52, 0x45, 0x95, 0xce,
0x63, 0x65, 0x6b, 0x58, 0x3a, 0xfe, 0xef, 0x7c, 0xe7, 0xbf, 0xfe,
0x3d, 0xf6, 0x5c, 0x7d, 0x6c, 0x5e, 0x06, 0x09, 0x1a, 0xf5, 0x61,
0xbb, 0x20, 0x93, 0x09, 0x5f, 0x05, 0x6d, 0xea, 0x87 };
const BYTE tssk_privateExponent[] = {
0x87, 0xa7, 0x19, 0x32, 0xda, 0x11, 0x87, 0x55, 0x58, 0x00, 0x16, 0x16, 0x25, 0x65, 0x68, 0xf8,
0x24, 0x3e, 0xe6, 0xfa, 0xe9, 0x67, 0x49, 0x94, 0xcf, 0x92, 0xcc, 0x33, 0x99, 0xe8, 0x08, 0x60,
0x17, 0x9a, 0x12, 0x9f, 0x24, 0xdd, 0xb1, 0x24, 0x99, 0xc7, 0x3a, 0xb8, 0x0a, 0x7b, 0x0d, 0xdd,
0x35, 0x07, 0x79, 0x17, 0x0b, 0x51, 0x9b, 0xb3, 0xc7, 0x10, 0x01, 0x13, 0xe7, 0x3f, 0xf3, 0x5f
};
const BYTE tssk_exponent[] = { 0x5b, 0x7b, 0x88, 0xc0 };
BOOL gcc_write_server_security_data(wStream* s, rdpMcs* mcs)
{
BYTE* sigData;
int expLen, keyLen, sigDataLen;
BYTE encryptedSignature[TSSK_KEY_LENGTH];
BYTE signature[sizeof(initial_signature)];
UINT32 headerLen, serverRandomLen, serverCertLen, wPublicKeyBlobLen;
rdpSettings* settings = mcs->settings;
/**
* Re: settings->EncryptionLevel:
* This is configured/set by the server implementation and serves the same
* purpose as the "Encryption Level" setting in the RDP-Tcp configuration
* dialog of Microsoft's Remote Desktop Session Host Configuration.
* Re: settings->EncryptionMethods:
* at this point this setting contains the client's supported encryption
* methods we've received in gcc_read_client_security_data()
*/
if (!settings->UseRdpSecurityLayer)
{
/* TLS/NLA is used: disable rdp style encryption */
settings->EncryptionLevel = ENCRYPTION_LEVEL_NONE;
}
/* verify server encryption level value */
switch (settings->EncryptionLevel)
{
case ENCRYPTION_LEVEL_NONE:
WLog_INFO(TAG, "Active rdp encryption level: NONE");
break;
case ENCRYPTION_LEVEL_FIPS:
WLog_INFO(TAG, "Active rdp encryption level: FIPS Compliant");
break;
case ENCRYPTION_LEVEL_HIGH:
WLog_INFO(TAG, "Active rdp encryption level: HIGH");
break;
case ENCRYPTION_LEVEL_LOW:
WLog_INFO(TAG, "Active rdp encryption level: LOW");
break;
case ENCRYPTION_LEVEL_CLIENT_COMPATIBLE:
WLog_INFO(TAG, "Active rdp encryption level: CLIENT-COMPATIBLE");
break;
default:
WLog_ERR(TAG, "Invalid server encryption level 0x%08" PRIX32 "",
settings->EncryptionLevel);
WLog_ERR(TAG, "Switching to encryption level CLIENT-COMPATIBLE");
settings->EncryptionLevel = ENCRYPTION_LEVEL_CLIENT_COMPATIBLE;
}
/* choose rdp encryption method based on server level and client methods */
switch (settings->EncryptionLevel)
{
case ENCRYPTION_LEVEL_NONE:
/* The only valid method is NONE in this case */
settings->EncryptionMethods = ENCRYPTION_METHOD_NONE;
break;
case ENCRYPTION_LEVEL_FIPS:
/* The only valid method is FIPS in this case */
if (!(settings->EncryptionMethods & ENCRYPTION_METHOD_FIPS))
{
WLog_WARN(TAG, "client does not support FIPS as required by server configuration");
}
settings->EncryptionMethods = ENCRYPTION_METHOD_FIPS;
break;
case ENCRYPTION_LEVEL_HIGH:
/* Maximum key strength supported by the server must be used (128 bit)*/
if (!(settings->EncryptionMethods & ENCRYPTION_METHOD_128BIT))
{
WLog_WARN(TAG, "client does not support 128 bit encryption method as required by "
"server configuration");
}
settings->EncryptionMethods = ENCRYPTION_METHOD_128BIT;
break;
case ENCRYPTION_LEVEL_LOW:
case ENCRYPTION_LEVEL_CLIENT_COMPATIBLE:
/* Maximum key strength supported by the client must be used */
if (settings->EncryptionMethods & ENCRYPTION_METHOD_128BIT)
settings->EncryptionMethods = ENCRYPTION_METHOD_128BIT;
else if (settings->EncryptionMethods & ENCRYPTION_METHOD_56BIT)
settings->EncryptionMethods = ENCRYPTION_METHOD_56BIT;
else if (settings->EncryptionMethods & ENCRYPTION_METHOD_40BIT)
settings->EncryptionMethods = ENCRYPTION_METHOD_40BIT;
else if (settings->EncryptionMethods & ENCRYPTION_METHOD_FIPS)
settings->EncryptionMethods = ENCRYPTION_METHOD_FIPS;
else
{
WLog_WARN(TAG, "client has not announced any supported encryption methods");
settings->EncryptionMethods = ENCRYPTION_METHOD_128BIT;
}
break;
default:
WLog_ERR(TAG, "internal error: unknown encryption level");
return FALSE;
}
/* log selected encryption method */
switch (settings->EncryptionMethods)
{
case ENCRYPTION_METHOD_NONE:
WLog_INFO(TAG, "Selected rdp encryption method: NONE");
break;
case ENCRYPTION_METHOD_40BIT:
WLog_INFO(TAG, "Selected rdp encryption method: 40BIT");
break;
case ENCRYPTION_METHOD_56BIT:
WLog_INFO(TAG, "Selected rdp encryption method: 56BIT");
break;
case ENCRYPTION_METHOD_128BIT:
WLog_INFO(TAG, "Selected rdp encryption method: 128BIT");
break;
case ENCRYPTION_METHOD_FIPS:
WLog_INFO(TAG, "Selected rdp encryption method: FIPS");
break;
default:
WLog_ERR(TAG, "internal error: unknown encryption method");
return FALSE;
}
headerLen = 12;
keyLen = 0;
wPublicKeyBlobLen = 0;
serverRandomLen = 0;
serverCertLen = 0;
if (settings->EncryptionMethods != ENCRYPTION_METHOD_NONE)
{
serverRandomLen = 32;
keyLen = settings->RdpServerRsaKey->ModulusLength;
expLen = sizeof(settings->RdpServerRsaKey->exponent);
wPublicKeyBlobLen = 4; /* magic (RSA1) */
wPublicKeyBlobLen += 4; /* keylen */
wPublicKeyBlobLen += 4; /* bitlen */
wPublicKeyBlobLen += 4; /* datalen */
wPublicKeyBlobLen += expLen;
wPublicKeyBlobLen += keyLen;
wPublicKeyBlobLen += 8; /* 8 bytes of zero padding */
serverCertLen = 4; /* dwVersion */
serverCertLen += 4; /* dwSigAlgId */
serverCertLen += 4; /* dwKeyAlgId */
serverCertLen += 2; /* wPublicKeyBlobType */
serverCertLen += 2; /* wPublicKeyBlobLen */
serverCertLen += wPublicKeyBlobLen;
serverCertLen += 2; /* wSignatureBlobType */
serverCertLen += 2; /* wSignatureBlobLen */
serverCertLen += sizeof(encryptedSignature); /* SignatureBlob */
serverCertLen += 8; /* 8 bytes of zero padding */
headerLen += sizeof(serverRandomLen);
headerLen += sizeof(serverCertLen);
headerLen += serverRandomLen;
headerLen += serverCertLen;
}
if (!Stream_EnsureRemainingCapacity(s, headerLen + 4))
return FALSE;
gcc_write_user_data_header(s, SC_SECURITY, headerLen);
Stream_Write_UINT32(s, settings->EncryptionMethods); /* encryptionMethod */
Stream_Write_UINT32(s, settings->EncryptionLevel); /* encryptionLevel */
if (settings->EncryptionMethods == ENCRYPTION_METHOD_NONE)
{
return TRUE;
}
Stream_Write_UINT32(s, serverRandomLen); /* serverRandomLen */
Stream_Write_UINT32(s, serverCertLen); /* serverCertLen */
settings->ServerRandomLength = serverRandomLen;
settings->ServerRandom = (BYTE*)malloc(serverRandomLen);
if (!settings->ServerRandom)
{
return FALSE;
}
winpr_RAND(settings->ServerRandom, serverRandomLen);
Stream_Write(s, settings->ServerRandom, serverRandomLen);
sigData = Stream_Pointer(s);
Stream_Write_UINT32(s, CERT_CHAIN_VERSION_1); /* dwVersion (4 bytes) */
Stream_Write_UINT32(s, SIGNATURE_ALG_RSA); /* dwSigAlgId */
Stream_Write_UINT32(s, KEY_EXCHANGE_ALG_RSA); /* dwKeyAlgId */
Stream_Write_UINT16(s, BB_RSA_KEY_BLOB); /* wPublicKeyBlobType */
Stream_Write_UINT16(s, wPublicKeyBlobLen); /* wPublicKeyBlobLen */
Stream_Write(s, "RSA1", 4); /* magic */
Stream_Write_UINT32(s, keyLen + 8); /* keylen */
Stream_Write_UINT32(s, keyLen * 8); /* bitlen */
Stream_Write_UINT32(s, keyLen - 1); /* datalen */
Stream_Write(s, settings->RdpServerRsaKey->exponent, expLen);
Stream_Write(s, settings->RdpServerRsaKey->Modulus, keyLen);
Stream_Zero(s, 8);
sigDataLen = Stream_Pointer(s) - sigData;
Stream_Write_UINT16(s, BB_RSA_SIGNATURE_BLOB); /* wSignatureBlobType */
Stream_Write_UINT16(s, sizeof(encryptedSignature) + 8); /* wSignatureBlobLen */
memcpy(signature, initial_signature, sizeof(initial_signature));
if (!winpr_Digest(WINPR_MD_MD5, sigData, sigDataLen, signature, sizeof(signature)))
return FALSE;
crypto_rsa_private_encrypt(signature, sizeof(signature), TSSK_KEY_LENGTH, tssk_modulus,
tssk_privateExponent, encryptedSignature);
Stream_Write(s, encryptedSignature, sizeof(encryptedSignature));
Stream_Zero(s, 8);
return TRUE;
}
/**
* Read a client network data block (TS_UD_CS_NET).\n
* @msdn{cc240512}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_network_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 i;
if (blockLength < 4)
return FALSE;
Stream_Read_UINT32(s, mcs->channelCount); /* channelCount */
if (blockLength < 4 + mcs->channelCount * 12)
return FALSE;
if (mcs->channelCount > CHANNEL_MAX_COUNT)
return FALSE;
/* channelDefArray */
for (i = 0; i < mcs->channelCount; i++)
{
/**
* CHANNEL_DEF
* - name: an 8-byte array containing a null-terminated collection
* of seven ANSI characters that uniquely identify the channel.
* - options: a 32-bit, unsigned integer. Channel option flags
*/
Stream_Read(s, mcs->channels[i].Name, 8); /* name (8 bytes) */
if (!memchr(mcs->channels[i].Name, 0, 8))
{
WLog_ERR(
TAG,
"protocol violation: received a static channel name with missing null-termination");
return FALSE;
}
Stream_Read_UINT32(s, mcs->channels[i].options); /* options (4 bytes) */
mcs->channels[i].ChannelId = mcs->baseChannelId++;
}
return TRUE;
}
/**
* Write a client network data block (TS_UD_CS_NET).\n
* @msdn{cc240512}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_network_data(wStream* s, rdpMcs* mcs)
{
UINT32 i;
UINT16 length;
if (mcs->channelCount > 0)
{
length = mcs->channelCount * 12 + 8;
gcc_write_user_data_header(s, CS_NET, length);
Stream_Write_UINT32(s, mcs->channelCount); /* channelCount */
/* channelDefArray */
for (i = 0; i < mcs->channelCount; i++)
{
/* CHANNEL_DEF */
Stream_Write(s, mcs->channels[i].Name, 8); /* name (8 bytes) */
Stream_Write_UINT32(s, mcs->channels[i].options); /* options (4 bytes) */
}
}
}
BOOL gcc_read_server_network_data(wStream* s, rdpMcs* mcs)
{
int i;
UINT16 channelId;
UINT16 MCSChannelId;
UINT16 channelCount;
UINT16 parsedChannelCount;
if (Stream_GetRemainingLength(s) < 4)
return FALSE;
Stream_Read_UINT16(s, MCSChannelId); /* MCSChannelId */
Stream_Read_UINT16(s, channelCount); /* channelCount */
parsedChannelCount = channelCount;
if (channelCount != mcs->channelCount)
{
WLog_ERR(TAG, "requested %" PRIu32 " channels, got %" PRIu16 " instead", mcs->channelCount,
channelCount);
/* we ensure that the response is not bigger than the request */
if (channelCount > mcs->channelCount)
parsedChannelCount = mcs->channelCount;
}
if (Stream_GetRemainingLength(s) < (size_t)channelCount * 2)
return FALSE;
for (i = 0; i < parsedChannelCount; i++)
{
Stream_Read_UINT16(s, channelId); /* channelId */
mcs->channels[i].ChannelId = channelId;
}
if (channelCount % 2 == 1)
return Stream_SafeSeek(s, 2); /* padding */
return TRUE;
}
BOOL gcc_write_server_network_data(wStream* s, rdpMcs* mcs)
{
UINT32 i;
int payloadLen = 8 + mcs->channelCount * 2 + (mcs->channelCount % 2 == 1 ? 2 : 0);
if (!Stream_EnsureRemainingCapacity(s, payloadLen + 4))
return FALSE;
gcc_write_user_data_header(s, SC_NET, payloadLen);
Stream_Write_UINT16(s, MCS_GLOBAL_CHANNEL_ID); /* MCSChannelId */
Stream_Write_UINT16(s, mcs->channelCount); /* channelCount */
for (i = 0; i < mcs->channelCount; i++)
{
Stream_Write_UINT16(s, mcs->channels[i].ChannelId);
}
if (mcs->channelCount % 2 == 1)
Stream_Write_UINT16(s, 0);
return TRUE;
}
/**
* Read a client cluster data block (TS_UD_CS_CLUSTER).\n
* @msdn{cc240514}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_cluster_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 flags;
UINT32 redirectedSessionId;
rdpSettings* settings = mcs->settings;
if (blockLength < 8)
return FALSE;
Stream_Read_UINT32(s, flags); /* flags */
Stream_Read_UINT32(s, redirectedSessionId); /* redirectedSessionId */
if (flags & REDIRECTED_SESSIONID_FIELD_VALID)
settings->RedirectedSessionId = redirectedSessionId;
if (blockLength != 8)
{
if (Stream_GetRemainingLength(s) >= (size_t)(blockLength - 8))
{
/* The old Microsoft Mac RDP client can send a pad here */
Stream_Seek(s, (blockLength - 8));
}
}
return TRUE;
}
/**
* Write a client cluster data block (TS_UD_CS_CLUSTER).\n
* @msdn{cc240514}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_cluster_data(wStream* s, rdpMcs* mcs)
{
UINT32 flags;
rdpSettings* settings = mcs->settings;
gcc_write_user_data_header(s, CS_CLUSTER, 12);
flags = REDIRECTION_SUPPORTED | (REDIRECTION_VERSION4 << 2);
if (settings->ConsoleSession || settings->RedirectedSessionId)
flags |= REDIRECTED_SESSIONID_FIELD_VALID;
if (settings->RedirectSmartCards)
flags |= REDIRECTED_SMARTCARD;
Stream_Write_UINT32(s, flags); /* flags */
Stream_Write_UINT32(s, settings->RedirectedSessionId); /* redirectedSessionID */
}
/**
* Read a client monitor data block (TS_UD_CS_MONITOR).\n
* @msdn{dd305336}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_monitor_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 index;
UINT32 flags;
UINT32 monitorCount;
UINT32 left, top, right, bottom;
rdpSettings* settings = mcs->settings;
if (blockLength < 8)
return FALSE;
Stream_Read_UINT32(s, flags); /* flags */
Stream_Read_UINT32(s, monitorCount); /* monitorCount */
/* 2.2.1.3.6 Client Monitor Data -
* monitorCount (4 bytes): A 32-bit, unsigned integer. The number of display
* monitor definitions in the monitorDefArray field (the maximum allowed is 16).
*/
if (monitorCount > 16)
{
WLog_ERR(TAG, "announced monitors(%" PRIu32 ") exceed the 16 limit", monitorCount);
return FALSE;
}
if (monitorCount > settings->MonitorDefArraySize)
{
WLog_ERR(TAG, "too many announced monitors(%" PRIu32 "), clamping to %" PRIu32 "",
monitorCount, settings->MonitorDefArraySize);
monitorCount = settings->MonitorDefArraySize;
}
if ((UINT32)((blockLength - 8) / 20) < monitorCount)
return FALSE;
settings->MonitorCount = monitorCount;
for (index = 0; index < monitorCount; index++)
{
Stream_Read_UINT32(s, left); /* left */
Stream_Read_UINT32(s, top); /* top */
Stream_Read_UINT32(s, right); /* right */
Stream_Read_UINT32(s, bottom); /* bottom */
Stream_Read_UINT32(s, flags); /* flags */
settings->MonitorDefArray[index].x = left;
settings->MonitorDefArray[index].y = top;
settings->MonitorDefArray[index].width = right - left + 1;
settings->MonitorDefArray[index].height = bottom - top + 1;
settings->MonitorDefArray[index].is_primary = (flags & MONITOR_PRIMARY);
}
return TRUE;
}
/**
* Write a client monitor data block (TS_UD_CS_MONITOR).\n
* @msdn{dd305336}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_monitor_data(wStream* s, rdpMcs* mcs)
{
UINT32 i;
UINT16 length;
UINT32 left, top, right, bottom, flags;
INT32 baseX = 0, baseY = 0;
rdpSettings* settings = mcs->settings;
if (settings->MonitorCount > 1)
{
length = (20 * settings->MonitorCount) + 12;
gcc_write_user_data_header(s, CS_MONITOR, length);
Stream_Write_UINT32(s, 0); /* flags */
Stream_Write_UINT32(s, settings->MonitorCount); /* monitorCount */
/* first pass to get the primary monitor coordinates (it is supposed to be
* in (0,0) */
for (i = 0; i < settings->MonitorCount; i++)
{
if (settings->MonitorDefArray[i].is_primary)
{
baseX = settings->MonitorDefArray[i].x;
baseY = settings->MonitorDefArray[i].y;
break;
}
}
for (i = 0; i < settings->MonitorCount; i++)
{
left = settings->MonitorDefArray[i].x - baseX;
top = settings->MonitorDefArray[i].y - baseY;
right = left + settings->MonitorDefArray[i].width - 1;
bottom = top + settings->MonitorDefArray[i].height - 1;
flags = settings->MonitorDefArray[i].is_primary ? MONITOR_PRIMARY : 0;
Stream_Write_UINT32(s, left); /* left */
Stream_Write_UINT32(s, top); /* top */
Stream_Write_UINT32(s, right); /* right */
Stream_Write_UINT32(s, bottom); /* bottom */
Stream_Write_UINT32(s, flags); /* flags */
}
}
}
BOOL gcc_read_client_monitor_extended_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 index;
UINT32 flags;
UINT32 monitorCount;
UINT32 monitorAttributeSize;
rdpSettings* settings = mcs->settings;
if (blockLength < 12)
return FALSE;
Stream_Read_UINT32(s, flags); /* flags */
Stream_Read_UINT32(s, monitorAttributeSize); /* monitorAttributeSize */
Stream_Read_UINT32(s, monitorCount); /* monitorCount */
if (monitorAttributeSize != 20)
return FALSE;
if ((blockLength - 12) / monitorAttributeSize < monitorCount)
return FALSE;
if (settings->MonitorCount != monitorCount)
return FALSE;
settings->HasMonitorAttributes = TRUE;
for (index = 0; index < monitorCount; index++)
{
Stream_Read_UINT32(
s, settings->MonitorDefArray[index].attributes.physicalWidth); /* physicalWidth */
Stream_Read_UINT32(
s, settings->MonitorDefArray[index].attributes.physicalHeight); /* physicalHeight */
Stream_Read_UINT32(
s, settings->MonitorDefArray[index].attributes.orientation); /* orientation */
Stream_Read_UINT32(s, settings->MonitorDefArray[index]
.attributes.desktopScaleFactor); /* desktopScaleFactor */
Stream_Read_UINT32(
s,
settings->MonitorDefArray[index].attributes.deviceScaleFactor); /* deviceScaleFactor */
}
return TRUE;
}
void gcc_write_client_monitor_extended_data(wStream* s, rdpMcs* mcs)
{
UINT32 i;
UINT16 length;
rdpSettings* settings = mcs->settings;
if (settings->HasMonitorAttributes)
{
length = (20 * settings->MonitorCount) + 16;
gcc_write_user_data_header(s, CS_MONITOR_EX, length);
Stream_Write_UINT32(s, 0); /* flags */
Stream_Write_UINT32(s, 20); /* monitorAttributeSize */
Stream_Write_UINT32(s, settings->MonitorCount); /* monitorCount */
for (i = 0; i < settings->MonitorCount; i++)
{
Stream_Write_UINT32(
s, settings->MonitorDefArray[i].attributes.physicalWidth); /* physicalWidth */
Stream_Write_UINT32(
s, settings->MonitorDefArray[i].attributes.physicalHeight); /* physicalHeight */
Stream_Write_UINT32(
s, settings->MonitorDefArray[i].attributes.orientation); /* orientation */
Stream_Write_UINT32(s, settings->MonitorDefArray[i]
.attributes.desktopScaleFactor); /* desktopScaleFactor */
Stream_Write_UINT32(
s,
settings->MonitorDefArray[i].attributes.deviceScaleFactor); /* deviceScaleFactor */
}
}
}
/**
* Read a client message channel data block (TS_UD_CS_MCS_MSGCHANNEL).\n
* @msdn{jj217627}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_message_channel_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 flags;
if (blockLength < 4)
return FALSE;
Stream_Read_UINT32(s, flags);
mcs->messageChannelId = mcs->baseChannelId++;
return TRUE;
}
/**
* Write a client message channel data block (TS_UD_CS_MCS_MSGCHANNEL).\n
* @msdn{jj217627}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_message_channel_data(wStream* s, rdpMcs* mcs)
{
rdpSettings* settings = mcs->settings;
if (settings->NetworkAutoDetect || settings->SupportHeartbeatPdu ||
settings->SupportMultitransport)
{
gcc_write_user_data_header(s, CS_MCS_MSGCHANNEL, 8);
Stream_Write_UINT32(s, 0); /* flags */
}
}
BOOL gcc_read_server_message_channel_data(wStream* s, rdpMcs* mcs)
{
UINT16 MCSChannelId;
if (Stream_GetRemainingLength(s) < 2)
return FALSE;
Stream_Read_UINT16(s, MCSChannelId); /* MCSChannelId */
/* Save the MCS message channel id */
mcs->messageChannelId = MCSChannelId;
return TRUE;
}
BOOL gcc_write_server_message_channel_data(wStream* s, rdpMcs* mcs)
{
if (mcs->messageChannelId == 0)
return TRUE;
if (!Stream_EnsureRemainingCapacity(s, 2 + 4))
return FALSE;
gcc_write_user_data_header(s, SC_MCS_MSGCHANNEL, 6);
Stream_Write_UINT16(s, mcs->messageChannelId); /* mcsChannelId (2 bytes) */
return TRUE;
}
/**
* Read a client multitransport channel data block (TS_UD_CS_MULTITRANSPORT).\n
* @msdn{jj217498}
* @param s stream
* @param settings rdp settings
*/
BOOL gcc_read_client_multitransport_channel_data(wStream* s, rdpMcs* mcs, UINT16 blockLength)
{
UINT32 flags;
if (blockLength < 4)
return FALSE;
Stream_Read_UINT32(s, flags);
return TRUE;
}
/**
* Write a client multitransport channel data block (TS_UD_CS_MULTITRANSPORT).\n
* @msdn{jj217498}
* @param s stream
* @param settings rdp settings
*/
void gcc_write_client_multitransport_channel_data(wStream* s, rdpMcs* mcs)
{
rdpSettings* settings = mcs->settings;
gcc_write_user_data_header(s, CS_MULTITRANSPORT, 8);
Stream_Write_UINT32(s, settings->MultitransportFlags); /* flags */
}
BOOL gcc_read_server_multitransport_channel_data(wStream* s, rdpMcs* mcs)
{
UINT32 flags;
if (Stream_GetRemainingLength(s) < 4)
return FALSE;
Stream_Read_UINT32(s, flags); /* flags */
return TRUE;
}
void gcc_write_server_multitransport_channel_data(wStream* s, rdpMcs* mcs)
{
UINT32 flags = 0;
gcc_write_user_data_header(s, SC_MULTITRANSPORT, 8);
Stream_Write_UINT32(s, flags); /* flags (4 bytes) */
}
| Java |
/*
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* See the NOTICE file distributed with this work for additional information
* regarding copyright ownership. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef NETWORK_TORUS_NETWORK_H_
#define NETWORK_TORUS_NETWORK_H_
#include <json/json.h>
#include <prim/prim.h>
#include <string>
#include <vector>
#include "event/Component.h"
#include "interface/Interface.h"
#include "network/Channel.h"
#include "network/Network.h"
#include "router/Router.h"
#include "util/DimensionalArray.h"
namespace Torus {
class Network : public ::Network {
public:
Network(const std::string& _name, const Component* _parent,
MetadataHandler* _metadataHandler, Json::Value _settings);
~Network();
// this is the routing algorithm factory for this network
::RoutingAlgorithm* createRoutingAlgorithm(
u32 _inputPort, u32 _inputVc, const std::string& _name,
const Component* _parent, Router* _router) override;
// Network
u32 numRouters() const override;
u32 numInterfaces() const override;
Router* getRouter(u32 _id) const override;
Interface* getInterface(u32 _id) const override;
void translateInterfaceIdToAddress(
u32 _id, std::vector<u32>* _address) const override;
u32 translateInterfaceAddressToId(
const std::vector<u32>* _address) const override;
void translateRouterIdToAddress(
u32 _id, std::vector<u32>* _address) const override;
u32 translateRouterAddressToId(
const std::vector<u32>* _address) const override;
u32 computeMinimalHops(const std::vector<u32>* _source,
const std::vector<u32>* _destination) const override;
protected:
void collectChannels(std::vector<Channel*>* _channels) override;
private:
u32 dimensions_;
u32 concentration_;
std::vector<u32> dimensionWidths_;
std::vector<u32> dimensionWeights_;
DimensionalArray<Router*> routers_;
DimensionalArray<Interface*> interfaces_;
std::vector<Channel*> internalChannels_;
std::vector<Channel*> externalChannels_;
};
} // namespace Torus
#endif // NETWORK_TORUS_NETWORK_H_
| Java |
/*
* Copyright 2009 Martin Grotzke
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package de.javakaffee.web.msm;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyString;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.*;
import static org.testng.Assert.assertEquals;
import java.io.IOException;
import java.util.Collection;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.annotation.Nonnull;
import javax.servlet.ServletException;
import javax.servlet.http.Cookie;
import org.apache.catalina.Context;
import org.apache.catalina.Host;
import org.apache.catalina.Valve;
import org.apache.catalina.connector.Request;
import org.apache.catalina.connector.Response;
import org.apache.tomcat.util.http.ServerCookie;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import de.javakaffee.web.msm.MemcachedSessionService.SessionManager;
/**
* Test the {@link RequestTrackingHostValve}.
*
* @author <a href="mailto:[email protected]">Martin Grotzke</a>
* @version $Id$
*/
public abstract class RequestTrackingHostValveTest {
protected MemcachedSessionService _service;
private RequestTrackingHostValve _sessionTrackerValve;
private Valve _nextValve;
private Request _request;
private Response _response;
@BeforeMethod
public void setUp() throws Exception {
_service = mock( MemcachedSessionService.class );
_request = mock( Request.class );
_response = mock( Response.class );
final Context _contextContainer = mock(Context.class);
final Host _hostContainer = mock(Host.class);
final SessionManager _manager = mock(SessionManager.class);
when(_service.getManager()).thenReturn(_manager);
when(_manager.getContext()).thenReturn(_contextContainer);
when(_contextContainer.getParent()).thenReturn(_hostContainer);
when(_contextContainer.getPath()).thenReturn("/");
_sessionTrackerValve = createSessionTrackerValve();
_nextValve = mock( Valve.class );
_sessionTrackerValve.setNext( _nextValve );
_sessionTrackerValve.setContainer(_hostContainer);
when(_request.getRequestURI()).thenReturn( "/someRequest");
when(_request.getMethod()).thenReturn("GET");
when(_request.getQueryString()).thenReturn(null);
when(_request.getContext()).thenReturn(_contextContainer);
when(_request.getNote(eq(RequestTrackingHostValve.REQUEST_PROCESSED))).thenReturn(Boolean.TRUE);
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.FALSE);
}
@Nonnull
protected RequestTrackingHostValve createSessionTrackerValve() {
return new RequestTrackingHostValve(".*\\.(png|gif|jpg|css|js|ico)$", "somesessionid", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
return RequestTrackingHostValveTest.this.getSetCookieHeaders(response);
}
};
}
protected abstract String[] getSetCookieHeaders(final Response response);
@AfterMethod
public void tearDown() throws Exception {
reset( _service,
_nextValve,
_request,
_response );
}
@Test
public final void testGetSessionCookieName() throws IOException, ServletException {
final RequestTrackingHostValve cut = new RequestTrackingHostValve(null, "foo", _service, Statistics.create(),
new AtomicBoolean( true ), new CurrentRequest()) {
@Override
protected String[] getSetCookieHeaders(final Response response) {
final Collection<String> result = response.getHeaders("Set-Cookie");
return result.toArray(new String[result.size()]);
}
};
assertEquals(cut.getSessionCookieName(), "foo");
}
@Test
public final void testProcessRequestNotePresent() throws IOException, ServletException {
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
verify(_request).setNote(eq(RequestTrackingHostValve.REQUEST_PROCESS), eq(Boolean.TRUE));
}
@Test
public final void testBackupSessionNotInvokedWhenNoSessionIdPresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
when( _response.getHeader( eq( "Set-Cookie" ) ) ).thenReturn( null );
_sessionTrackerValve.invoke( _request, _response );
verify( _service, never() ).backupSession( anyString(), anyBoolean(), anyString() );
}
@Test
public final void testBackupSessionInvokedWhenResponseCookiePresent() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( null );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), "foo" );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( "foo" ), eq( false), anyString() );
}
@Test
public final void testChangeSessionIdForRelocatedSession() throws IOException, ServletException {
final String sessionId = "bar";
final String newSessionId = "newId";
when(_request.getNote(eq(RequestTrackingHostValve.SESSION_ID_CHANGED))).thenReturn(Boolean.TRUE);
when( _request.getRequestedSessionId() ).thenReturn( sessionId );
final Cookie cookie = new Cookie( _sessionTrackerValve.getSessionCookieName(), newSessionId );
setupGetResponseSetCookieHeadersExpectations(_response, new String[]{generateCookieString( cookie )});
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).backupSession( eq( newSessionId ), eq( true ), anyString() );
}
@Test
public final void testRequestFinishedShouldBeInvokedForIgnoredResources() throws IOException, ServletException {
when( _request.getRequestedSessionId() ).thenReturn( "foo" );
when(_request.getRequestURI()).thenReturn("/pixel.gif");
_sessionTrackerValve.invoke( _request, _response );
verify( _service ).requestFinished( eq( "foo" ), anyString() );
}
protected abstract void setupGetResponseSetCookieHeadersExpectations(Response response, String[] result);
@Nonnull
protected String generateCookieString(final Cookie cookie) {
final StringBuffer sb = new StringBuffer();
ServerCookie.appendCookieValue
(sb, cookie.getVersion(), cookie.getName(), cookie.getValue(),
cookie.getPath(), cookie.getDomain(), cookie.getComment(),
cookie.getMaxAge(), cookie.getSecure(), true);
final String setSessionCookieHeader = sb.toString();
return setSessionCookieHeader;
}
}
| Java |
/*
* Copyright (c) 2003-2012 Apple Inc. All rights reserved.
*
* @APPLE_OSREFERENCE_LICENSE_HEADER_START@
*
* This file contains Original Code and/or Modifications of Original Code
* as defined in and that are subject to the Apple Public Source License
* Version 2.0 (the 'License'). You may not use this file except in
* compliance with the License. The rights granted to you under the License
* may not be used to create, or enable the creation or redistribution of,
* unlawful or unlicensed copies of an Apple operating system, or to
* circumvent, violate, or enable the circumvention or violation of, any
* terms of an Apple operating system software license agreement.
*
* Please obtain a copy of the License at
* http://www.opensource.apple.com/apsl/ and read it before using this file.
*
* The Original Code and all software distributed under the License are
* distributed on an 'AS IS' basis, WITHOUT WARRANTY OF ANY KIND, EITHER
* EXPRESS OR IMPLIED, AND APPLE HEREBY DISCLAIMS ALL SUCH WARRANTIES,
* INCLUDING WITHOUT LIMITATION, ANY WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE, QUIET ENJOYMENT OR NON-INFRINGEMENT.
* Please see the License for the specific language governing rights and
* limitations under the License.
*
* @APPLE_OSREFERENCE_LICENSE_HEADER_END@
*/
#ifndef FD_SET
#define FD_SET(n, p) __DARWIN_FD_SET(n, p)
#endif /* FD_SET */
| Java |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import time
import urllib
from tempest.common import rest_client
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
CONF = config.CONF
LOG = logging.getLogger(__name__)
class SnapshotsClientJSON(rest_client.RestClient):
"""Client class to send CRUD Volume API requests."""
def __init__(self, auth_provider):
super(SnapshotsClientJSON, self).__init__(auth_provider)
self.service = CONF.volume.catalog_type
self.build_interval = CONF.volume.build_interval
self.build_timeout = CONF.volume.build_timeout
def list_snapshots(self, params=None):
"""List all the snapshot."""
url = 'snapshots'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def list_snapshots_with_detail(self, params=None):
"""List the details of all snapshots."""
url = 'snapshots/detail'
if params:
url += '?%s' % urllib.urlencode(params)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshots']
def get_snapshot(self, snapshot_id):
"""Returns the details of a single snapshot."""
url = "snapshots/%s" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['snapshot']
def create_snapshot(self, volume_id, **kwargs):
"""
Creates a new snapshot.
volume_id(Required): id of the volume.
force: Create a snapshot even if the volume attached (Default=False)
display_name: Optional snapshot Name.
display_description: User friendly snapshot description.
"""
post_body = {'volume_id': volume_id}
post_body.update(kwargs)
post_body = json.dumps({'snapshot': post_body})
resp, body = self.post('snapshots', post_body)
body = json.loads(body)
return resp, body['snapshot']
def update_snapshot(self, snapshot_id, **kwargs):
"""Updates a snapshot."""
put_body = json.dumps({'snapshot': kwargs})
resp, body = self.put('snapshots/%s' % snapshot_id, put_body)
body = json.loads(body)
return resp, body['snapshot']
# NOTE(afazekas): just for the wait function
def _get_snapshot_status(self, snapshot_id):
resp, body = self.get_snapshot(snapshot_id)
status = body['status']
# NOTE(afazekas): snapshot can reach an "error"
# state in a "normal" lifecycle
if (status == 'error'):
raise exceptions.SnapshotBuildErrorException(
snapshot_id=snapshot_id)
return status
# NOTE(afazkas): Wait reinvented again. It is not in the correct layer
def wait_for_snapshot_status(self, snapshot_id, status):
"""Waits for a Snapshot to reach a given status."""
start_time = time.time()
old_value = value = self._get_snapshot_status(snapshot_id)
while True:
dtime = time.time() - start_time
time.sleep(self.build_interval)
if value != old_value:
LOG.info('Value transition from "%s" to "%s"'
'in %d second(s).', old_value,
value, dtime)
if (value == status):
return value
if dtime > self.build_timeout:
message = ('Time Limit Exceeded! (%ds)'
'while waiting for %s, '
'but we got %s.' %
(self.build_timeout, status, value))
raise exceptions.TimeoutException(message)
time.sleep(self.build_interval)
old_value = value
value = self._get_snapshot_status(snapshot_id)
def delete_snapshot(self, snapshot_id):
"""Delete Snapshot."""
return self.delete("snapshots/%s" % str(snapshot_id))
def is_resource_deleted(self, id):
try:
self.get_snapshot(id)
except exceptions.NotFound:
return True
return False
def reset_snapshot_status(self, snapshot_id, status):
"""Reset the specified snapshot's status."""
post_body = json.dumps({'os-reset_status': {"status": status}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body
def update_snapshot_status(self, snapshot_id, status, progress):
"""Update the specified snapshot's status."""
post_body = {
'status': status,
'progress': progress
}
post_body = json.dumps({'os-update_snapshot_status': post_body})
url = 'snapshots/%s/action' % str(snapshot_id)
resp, body = self.post(url, post_body)
return resp, body
def create_snapshot_metadata(self, snapshot_id, metadata):
"""Create metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.post(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def get_snapshot_metadata(self, snapshot_id):
"""Get metadata of the snapshot."""
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.get(url)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata(self, snapshot_id, metadata):
"""Update metadata for the snapshot."""
put_body = json.dumps({'metadata': metadata})
url = "snapshots/%s/metadata" % str(snapshot_id)
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['metadata']
def update_snapshot_metadata_item(self, snapshot_id, id, meta_item):
"""Update metadata item for the snapshot."""
put_body = json.dumps({'meta': meta_item})
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.put(url, put_body)
body = json.loads(body)
return resp, body['meta']
def delete_snapshot_metadata_item(self, snapshot_id, id):
"""Delete metadata item for the snapshot."""
url = "snapshots/%s/metadata/%s" % (str(snapshot_id), str(id))
resp, body = self.delete(url)
return resp, body
def force_delete_snapshot(self, snapshot_id):
"""Force Delete Snapshot."""
post_body = json.dumps({'os-force_delete': {}})
resp, body = self.post('snapshots/%s/action' % snapshot_id, post_body)
return resp, body
| Java |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#include <aws/qldb/model/TagResourceResult.h>
#include <aws/core/utils/json/JsonSerializer.h>
#include <aws/core/AmazonWebServiceResult.h>
#include <aws/core/utils/StringUtils.h>
#include <aws/core/utils/UnreferencedParam.h>
#include <utility>
using namespace Aws::QLDB::Model;
using namespace Aws::Utils::Json;
using namespace Aws::Utils;
using namespace Aws;
TagResourceResult::TagResourceResult()
{
}
TagResourceResult::TagResourceResult(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
*this = result;
}
TagResourceResult& TagResourceResult::operator =(const Aws::AmazonWebServiceResult<JsonValue>& result)
{
AWS_UNREFERENCED_PARAM(result);
return *this;
}
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.algebricks.rewriter.rules.subplan;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang3.mutable.Mutable;
import org.apache.commons.lang3.mutable.MutableObject;
import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
import org.apache.hyracks.algebricks.common.utils.ListSet;
import org.apache.hyracks.algebricks.common.utils.Pair;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalExpression;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.base.ILogicalPlan;
import org.apache.hyracks.algebricks.core.algebra.base.IOptimizationContext;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalOperatorTag;
import org.apache.hyracks.algebricks.core.algebra.base.LogicalVariable;
import org.apache.hyracks.algebricks.core.algebra.expressions.ConstantExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.ScalarFunctionCallExpression;
import org.apache.hyracks.algebricks.core.algebra.expressions.VariableReferenceExpression;
import org.apache.hyracks.algebricks.core.algebra.functions.AlgebricksBuiltinFunctions;
import org.apache.hyracks.algebricks.core.algebra.functions.IFunctionInfo;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractBinaryJoinOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AbstractLogicalOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AggregateOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.AssignOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.DataSourceScanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.GroupByOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.NestedTupleSourceOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.ProjectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SelectOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.SubplanOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.UnnestOperator;
import org.apache.hyracks.algebricks.core.algebra.operators.logical.visitors.VariableUtilities;
import org.apache.hyracks.algebricks.core.algebra.plan.ALogicalPlanImpl;
import org.apache.hyracks.algebricks.core.algebra.properties.FunctionalDependency;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorManipulationUtil;
import org.apache.hyracks.algebricks.core.algebra.util.OperatorPropertiesUtil;
import org.apache.hyracks.algebricks.core.config.AlgebricksConfig;
import org.apache.hyracks.algebricks.core.rewriter.base.IAlgebraicRewriteRule;
import org.apache.hyracks.algebricks.rewriter.util.PhysicalOptimizationsUtil;
/**
* The rule searches for SUBPLAN operator with a optional PROJECT operator and
* an AGGREGATE followed by a join operator.
*
* <pre>
* Before
*
* plan__parent
* SUBPLAN {
* PROJECT?
* AGGREGATE
* plan__nested_A
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
*
* where $condition does not equal a constant true.
*
* After (This is a general application of the rule, specifics may vary based on the query plan.)
*
* plan__parent
* GROUP_BY {
* PROJECT?
* AGGREGATE
* plan__nested_A
* SELECT( algebricks:not( is_null( $right ) ) )
* NESTED_TUPLE_SOURCE
* }
* SUBPLAN {
* INNER_JOIN | LEFT_OUTER_JOIN ($condition, $left, $right)
* plan__nested_B
* }
* plan__child
* </pre>
*
* @author prestonc
*/
public class IntroduceGroupByForSubplanRule implements IAlgebraicRewriteRule {
@Override
public boolean rewritePre(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
return false;
}
@Override
public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context)
throws AlgebricksException {
AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue();
if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) {
return false;
}
SubplanOperator subplan = (SubplanOperator) op0;
Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator();
ILogicalPlan p = null;
while (plansIter.hasNext()) {
p = plansIter.next();
}
if (p == null) {
return false;
}
if (p.getRoots().size() != 1) {
return false;
}
Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0);
AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue();
Mutable<ILogicalOperator> botRef = subplanRoot;
AbstractLogicalOperator op2;
// Project is optional
if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) {
op2 = op1;
} else {
ProjectOperator project = (ProjectOperator) op1;
botRef = project.getInputs().get(0);
op2 = (AbstractLogicalOperator) botRef.getValue();
}
if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) {
return false;
}
AggregateOperator aggregate = (AggregateOperator) op2;
Set<LogicalVariable> free = new HashSet<LogicalVariable>();
VariableUtilities.getUsedVariables(aggregate, free);
Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0);
AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue();
while (op3.getInputs().size() == 1) {
Set<LogicalVariable> prod = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariables(op3, prod);
free.removeAll(prod);
VariableUtilities.getUsedVariables(op3, free);
botRef = op3Ref;
op3Ref = op3.getInputs().get(0);
op3 = (AbstractLogicalOperator) op3Ref.getValue();
}
if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN
&& op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) {
return false;
}
AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3;
if (join.getCondition().getValue() == ConstantExpression.TRUE) {
return false;
}
VariableUtilities.getUsedVariables(join, free);
AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue();
// see if there's an NTS at the end of the pipeline
NestedTupleSourceOperator outerNts = getNts(b0);
if (outerNts == null) {
AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue();
outerNts = getNts(b1);
if (outerNts == null) {
return false;
}
}
Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context);
if (pkVars == null || pkVars.size() < 1) {
// there is no non-trivial primary key, group-by keys are all live variables
// that were produced by descendant or self
ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue();
pkVars = new HashSet<LogicalVariable>();
//get live variables
VariableUtilities.getLiveVariables(subplanInput, pkVars);
//get produced variables
Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>();
VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars);
//retain the intersection
pkVars.retainAll(producedVars);
}
AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars);
Mutable<ILogicalOperator> rightRef = join.getInputs().get(1);
LogicalVariable testForNull = null;
AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue();
switch (right.getOperatorTag()) {
case UNNEST: {
UnnestOperator innerUnnest = (UnnestOperator) right;
// Select [ $y != null ]
testForNull = innerUnnest.getVariable();
break;
}
case RUNNINGAGGREGATE: {
ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue();
Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>();
VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars);
if (!producedVars.isEmpty()) {
// Select [ $y != null ]
testForNull = producedVars.iterator().next();
}
break;
}
case DATASOURCESCAN: {
DataSourceScanOperator innerScan = (DataSourceScanOperator) right;
// Select [ $y != null ]
if (innerScan.getVariables().size() == 1) {
testForNull = innerScan.getVariables().get(0);
}
break;
}
default:
break;
}
if (testForNull == null) {
testForNull = context.newVar();
AssignOperator tmpAsgn = new AssignOperator(testForNull,
new MutableObject<ILogicalExpression>(ConstantExpression.TRUE));
tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue()));
rightRef.setValue(tmpAsgn);
context.computeAndSetTypeEnvironmentForOperator(tmpAsgn);
}
IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING);
ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull)));
IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT);
ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot,
new MutableObject<ILogicalExpression>(isNullTest));
SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false,
null);
GroupByOperator g = new GroupByOperator();
Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan);
NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g));
opRef.setValue(g);
selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts));
List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs();
prodInpList.clear();
prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull));
ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue()));
g.getNestedPlans().add(gPlan);
subplanRoot.setValue(op3Ref.getValue());
g.getInputs().add(newSubplanRef);
HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>();
VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars);
underVars.removeAll(pkVars);
Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList());
context.updatePrimaryKeys(mappedVars);
for (LogicalVariable uv : underVars) {
g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null,
new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv))));
}
OperatorPropertiesUtil.typeOpRec(subplanRoot, context);
OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context);
context.computeAndSetTypeEnvironmentForOperator(g);
return true;
}
private NestedTupleSourceOperator getNts(AbstractLogicalOperator op) {
AbstractLogicalOperator alo = op;
do {
if (alo.getOperatorTag() == LogicalOperatorTag.NESTEDTUPLESOURCE) {
return (NestedTupleSourceOperator) alo;
}
if (alo.getInputs().size() != 1) {
return null;
}
alo = (AbstractLogicalOperator) alo.getInputs().get(0).getValue();
} while (true);
}
protected Set<LogicalVariable> computeGbyVars(AbstractLogicalOperator op, Set<LogicalVariable> freeVars,
IOptimizationContext context) throws AlgebricksException {
PhysicalOptimizationsUtil.computeFDsAndEquivalenceClasses(op, context);
List<FunctionalDependency> fdList = context.getFDList(op);
if (fdList == null) {
return null;
}
// check if any of the FDs is a key
List<LogicalVariable> all = new ArrayList<LogicalVariable>();
VariableUtilities.getLiveVariables(op, all);
all.retainAll(freeVars);
for (FunctionalDependency fd : fdList) {
if (fd.getTail().containsAll(all)) {
return new HashSet<LogicalVariable>(fd.getHead());
}
}
return null;
}
private Map<LogicalVariable, LogicalVariable> buildVarExprList(Collection<LogicalVariable> vars,
IOptimizationContext context, GroupByOperator g,
List<Pair<LogicalVariable, Mutable<ILogicalExpression>>> outVeList) throws AlgebricksException {
Map<LogicalVariable, LogicalVariable> m = new HashMap<LogicalVariable, LogicalVariable>();
for (LogicalVariable ov : vars) {
LogicalVariable newVar = context.newVar();
ILogicalExpression varExpr = new VariableReferenceExpression(newVar);
outVeList.add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(ov,
new MutableObject<ILogicalExpression>(varExpr)));
for (ILogicalPlan p : g.getNestedPlans()) {
for (Mutable<ILogicalOperator> r : p.getRoots()) {
OperatorManipulationUtil.substituteVarRec((AbstractLogicalOperator) r.getValue(), ov, newVar, true,
context);
}
}
AbstractLogicalOperator opUnder = (AbstractLogicalOperator) g.getInputs().get(0).getValue();
OperatorManipulationUtil.substituteVarRec(opUnder, ov, newVar, true, context);
m.put(ov, newVar);
}
return m;
}
}
| Java |
/**
* Copyright 2009 - 2011 Sergio Bossa ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package terrastore.store.features;
import java.io.IOException;
import java.io.Serializable;
import java.util.Map;
import org.apache.commons.lang.builder.EqualsBuilder;
import org.apache.commons.lang.builder.HashCodeBuilder;
import org.msgpack.MessagePackable;
import org.msgpack.MessageTypeException;
import org.msgpack.MessageUnpackable;
import org.msgpack.Packer;
import org.msgpack.Unpacker;
import terrastore.util.io.MsgPackUtils;
/**
* Update object carrying data about the update function, timeout and parameters.
*
* @author Sergio Bossa
*/
public class Update implements MessagePackable, MessageUnpackable, Serializable {
private static final long serialVersionUID = 12345678901L;
//
private String functionName;
private long timeoutInMillis;
private Map<String, Object> parameters;
public Update(String functionName, long timeoutInMillis, Map<String, Object> parameters) {
this.functionName = functionName;
this.timeoutInMillis = timeoutInMillis;
this.parameters = parameters;
}
public Update() {
}
public long getTimeoutInMillis() {
return timeoutInMillis;
}
public String getFunctionName() {
return functionName;
}
public Map<String, Object> getParameters() {
return parameters;
}
@Override
public void messagePack(Packer packer) throws IOException {
MsgPackUtils.packString(packer, functionName);
MsgPackUtils.packLong(packer, timeoutInMillis);
MsgPackUtils.packGenericMap(packer, parameters);
}
@Override
public void messageUnpack(Unpacker unpacker) throws IOException, MessageTypeException {
functionName = MsgPackUtils.unpackString(unpacker);
timeoutInMillis = MsgPackUtils.unpackLong(unpacker);
parameters = MsgPackUtils.unpackGenericMap(unpacker);
}
@Override
public boolean equals(Object obj) {
if (obj instanceof Update) {
Update other = (Update) obj;
return new EqualsBuilder().append(this.functionName, other.functionName).append(this.timeoutInMillis, other.timeoutInMillis).append(this.parameters, other.parameters).
isEquals();
} else {
return false;
}
}
@Override
public int hashCode() {
return new HashCodeBuilder().append(functionName).append(timeoutInMillis).append(parameters).toHashCode();
}
}
| Java |
package assertion_test
import (
"errors"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/ginkgo"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega"
. "github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega/internal/assertion"
"github.com/cloudfoundry/bosh-agent/internal/github.com/onsi/gomega/internal/fakematcher"
)
var _ = Describe("Assertion", func() {
var (
a *Assertion
failureMessage string
failureCallerSkip int
matcher *fakematcher.FakeMatcher
)
input := "The thing I'm testing"
var fakeFailHandler = func(message string, callerSkip ...int) {
failureMessage = message
if len(callerSkip) == 1 {
failureCallerSkip = callerSkip[0]
}
}
BeforeEach(func() {
matcher = &fakematcher.FakeMatcher{}
failureMessage = ""
failureCallerSkip = 0
a = New(input, fakeFailHandler, 1)
})
Context("when called", func() {
It("should pass the provided input value to the matcher", func() {
a.Should(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.ShouldNot(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.To(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.ToNot(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
matcher.ReceivedActual = ""
a.NotTo(matcher)
Ω(matcher.ReceivedActual).Should(Equal(input))
})
})
Context("when the matcher succeeds", func() {
BeforeEach(func() {
matcher.MatchesToReturn = true
matcher.ErrToReturn = nil
})
Context("and a positive assertion is being made", func() {
It("should not call the failure callback", func() {
a.Should(matcher)
Ω(failureMessage).Should(Equal(""))
})
It("should be true", func() {
Ω(a.Should(matcher)).Should(BeTrue())
})
})
Context("and a negative assertion is being made", func() {
It("should call the failure callback", func() {
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal("negative: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
It("should be false", func() {
Ω(a.ShouldNot(matcher)).Should(BeFalse())
})
})
})
Context("when the matcher fails", func() {
BeforeEach(func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
})
Context("and a positive assertion is being made", func() {
It("should call the failure callback", func() {
a.Should(matcher)
Ω(failureMessage).Should(Equal("positive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
It("should be false", func() {
Ω(a.Should(matcher)).Should(BeFalse())
})
})
Context("and a negative assertion is being made", func() {
It("should not call the failure callback", func() {
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal(""))
})
It("should be true", func() {
Ω(a.ShouldNot(matcher)).Should(BeTrue())
})
})
})
Context("When reporting a failure", func() {
BeforeEach(func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
})
Context("and there is an optional description", func() {
It("should append the description to the failure message", func() {
a.Should(matcher, "A description")
Ω(failureMessage).Should(Equal("A description\npositive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
Context("and there are multiple arguments to the optional description", func() {
It("should append the formatted description to the failure message", func() {
a.Should(matcher, "A description of [%d]", 3)
Ω(failureMessage).Should(Equal("A description of [3]\npositive: The thing I'm testing"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
})
Context("When the matcher returns an error", func() {
BeforeEach(func() {
matcher.ErrToReturn = errors.New("Kaboom!")
})
Context("and a positive assertion is being made", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = true
a.Should(matcher)
Ω(failureMessage).Should(Equal("Kaboom!"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
Context("and a negative assertion is being made", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = false
a.ShouldNot(matcher)
Ω(failureMessage).Should(Equal("Kaboom!"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
It("should always be false", func() {
Ω(a.Should(matcher)).Should(BeFalse())
Ω(a.ShouldNot(matcher)).Should(BeFalse())
})
})
Context("when there are extra parameters", func() {
It("(a simple example)", func() {
Ω(func() (string, int, error) {
return "foo", 0, nil
}()).Should(Equal("foo"))
})
Context("when the parameters are all nil or zero", func() {
It("should invoke the matcher", func() {
matcher.MatchesToReturn = true
matcher.ErrToReturn = nil
var typedNil []string
a = New(input, fakeFailHandler, 1, 0, nil, typedNil)
result := a.Should(matcher)
Ω(result).Should(BeTrue())
Ω(matcher.ReceivedActual).Should(Equal(input))
Ω(failureMessage).Should(BeZero())
})
})
Context("when any of the parameters are not nil or zero", func() {
It("should call the failure callback", func() {
matcher.MatchesToReturn = false
matcher.ErrToReturn = nil
a = New(input, fakeFailHandler, 1, errors.New("foo"))
result := a.Should(matcher)
Ω(result).Should(BeFalse())
Ω(matcher.ReceivedActual).Should(BeZero(), "The matcher doesn't even get called")
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 1)
result = a.ShouldNot(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("1"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.To(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.ToNot(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
failureMessage = ""
a = New(input, fakeFailHandler, 1, nil, 0, []string{"foo"})
result = a.NotTo(matcher)
Ω(result).Should(BeFalse())
Ω(failureMessage).Should(ContainSubstring("foo"))
Ω(failureCallerSkip).Should(Equal(3))
})
})
})
Context("Making an assertion without a registered fail handler", func() {
It("should panic", func() {
defer func() {
e := recover()
RegisterFailHandler(Fail)
if e == nil {
Fail("expected a panic to have occured")
}
}()
RegisterFailHandler(nil)
Ω(true).Should(BeTrue())
})
})
})
| Java |
#!/usr/bin/env bash
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Usage:
# ci_parameterized_build.sh
#
# The script obeys the following required environment variables:
# TF_BUILD_CONTAINER_TYPE: (CPU | GPU | ANDROID | ANDROID_FULL)
# TF_BUILD_PYTHON_VERSION: (PYTHON2 | PYTHON3 | PYTHON3.5)
# TF_BUILD_IS_PIP: (NO_PIP | PIP | BOTH)
#
# The below environment variable is required, but will be deprecated together
# with TF_BUILD_MAVX and both will be replaced by TF_BUILD_OPTIONS.
# TF_BUILD_IS_OPT: (NO_OPT | OPT)
#
# Note:
# 1) Certain combinations of parameter values are regarded
# as invalid and will cause the script to exit with code 0. For example:
# NO_OPT & PIP (PIP builds should always use OPT)
# ANDROID & PIP (Android and PIP builds are mutually exclusive)
#
# 2) TF_BUILD_PYTHON_VERSION is set to PYTHON3, the build will use the version
# pointed to by "which python3" on the system, which is typically python3.4. To
# build for python3.5, set the environment variable to PYTHON3.5
#
#
# Additionally, the script follows the directions of optional environment
# variables:
# TF_BUILD_DRY_RUN: If it is set to any non-empty value that is not "0",
# the script will just generate and print the final
# command, but not actually run it.
# TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS:
# String appended to the content of CI_DOCKER_EXTRA_PARAMS
# TF_BUILD_APPEND_ARGUMENTS:
# Additional command line arguments for the bazel,
# pip.sh or android.sh command
# TF_BUILD_MAVX: (Soon to be deprecated, use TF_BUILD_OPTIONS instead)
# (unset | MAVX | MAVX2)
# If set to MAVX or MAVX2, will cause bazel to use the
# additional flag --copt=-mavx or --copt=-mavx2, to
# perform AVX or AVX2 builds, respectively. This requires
# AVX- or AVX2-compatible CPUs.
# TF_BUILD_BAZEL_TARGET:
# Used to override the default bazel build target:
# //tensorflow/... -//tensorflow/compiler
# TF_BUILD_BAZEL_CLEAN:
# Will perform "bazel clean", if and only if this variable
# is set to any non-empty and non-0 value
# TF_BAZEL_BUILD_ONLY:
# If it is set to any non-empty value that is not "0", Bazel
# will only build specified targets
# TF_GPU_COUNT:
# Run this many parallel tests for serial builds.
# For now, only can be edited for PIP builds.
# TODO(gunan): Find a way to pass this environment variable
# to the script bazel runs (using --run_under).
# TF_BUILD_TEST_TUTORIALS:
# If set to any non-empty and non-0 value, will perform
# tutorials tests (Applicable only if TF_BUILD_IS_PIP is
# PIP or BOTH).
# See builds/test_tutorials.sh
# TF_BUILD_INTEGRATION_TESTS:
# If set this will perform integration tests. See
# builds/integration_tests.sh.
# TF_BUILD_RUN_BENCHMARKS:
# If set to any non-empty and non-0 value, will perform
# the benchmark tests (see *_logged_benchmark targets in
# tools/test/BUILD)
# TF_BUILD_OPTIONS:
# (FASTBUILD | OPT | OPTDBG | MAVX | MAVX2_FMA | MAVX_DBG |
# MAVX2_FMA_DBG)
# Use the specified configurations when building.
# When set, overrides TF_BUILD_IS_OPT and TF_BUILD_MAVX
# options, as this will replace the two.
# TF_SKIP_CONTRIB_TESTS:
# If set to any non-empty or non-0 value, will skipp running
# contrib tests.
# TF_NIGHTLY:
# If this run is being used to build the tf_nightly pip
# packages.
# TF_CUDA_CLANG:
# If set to 1, builds and runs cuda_clang configuration.
# Only available inside GPU containers.
#
# This script can be used by Jenkins parameterized / matrix builds.
# Helper function: Convert to lower case
to_lower () {
echo "$1" | tr '[:upper:]' '[:lower:]'
}
# Helper function: Strip leading and trailing whitespaces
str_strip () {
echo -e "$1" | sed -e 's/^[[:space:]]*//' -e 's/[[:space:]]*$//'
}
# Helper function: Exit on failure
die () {
echo $@
exit 1
}
##########################################################
# Default configuration
CI_BUILD_DIR="tensorflow/tools/ci_build"
# Command to call when Docker is available
DOCKER_MAIN_CMD="${CI_BUILD_DIR}/ci_build.sh"
# Command to call when Docker is unavailable
NO_DOCKER_MAIN_CMD="${CI_BUILD_DIR}/builds/configured"
# Additional option flags to apply when Docker is unavailable (e.g., on Mac)
NO_DOCKER_OPT_FLAG="--genrule_strategy=standalone"
DO_DOCKER=1
BAZEL_CMD="bazel test"
BAZEL_BUILD_ONLY_CMD="bazel build"
BAZEL_CLEAN_CMD="bazel clean"
DEFAULT_BAZEL_CONFIGS=""
PIP_CMD="${CI_BUILD_DIR}/builds/pip.sh"
PIP_TEST_TUTORIALS_FLAG="--test_tutorials"
PIP_INTEGRATION_TESTS_FLAG="--integration_tests"
ANDROID_CMD="${CI_BUILD_DIR}/builds/android.sh"
ANDROID_FULL_CMD="${CI_BUILD_DIR}/builds/android_full.sh"
TF_GPU_COUNT=${TF_GPU_COUNT:-8}
PARALLEL_GPU_TEST_CMD='//tensorflow/tools/ci_build/gpu_build:parallel_gpu_execute'
BENCHMARK_CMD="${CI_BUILD_DIR}/builds/benchmark.sh"
EXTRA_PARAMS=""
BAZEL_TARGET="//tensorflow/... -//tensorflow/compiler/..."
if [[ -n "$TF_SKIP_CONTRIB_TESTS" ]]; then
BAZEL_TARGET="$BAZEL_TARGET -//tensorflow/contrib/..."
else
BAZEL_TARGET="${BAZEL_TARGET} //tensorflow/contrib/lite/..."
fi
TUT_TEST_DATA_DIR="/tmp/tf_tutorial_test_data"
##########################################################
echo "Parameterized build starts at: $(date)"
echo ""
START_TIME=$(date +'%s')
# Convert all the required environment variables to lower case
TF_BUILD_CONTAINER_TYPE=$(to_lower ${TF_BUILD_CONTAINER_TYPE})
TF_BUILD_PYTHON_VERSION=$(to_lower ${TF_BUILD_PYTHON_VERSION})
TF_BUILD_IS_OPT=$(to_lower ${TF_BUILD_IS_OPT})
TF_BUILD_IS_PIP=$(to_lower ${TF_BUILD_IS_PIP})
if [[ ! -z "${TF_BUILD_MAVX}" ]]; then
TF_BUILD_MAVX=$(to_lower ${TF_BUILD_MAVX})
fi
# Print parameter values
echo "Required build parameters:"
echo " TF_BUILD_CONTAINER_TYPE=${TF_BUILD_CONTAINER_TYPE}"
echo " TF_BUILD_PYTHON_VERSION=${TF_BUILD_PYTHON_VERSION}"
echo " TF_BUILD_IS_OPT=${TF_BUILD_IS_OPT}"
echo " TF_BUILD_IS_PIP=${TF_BUILD_IS_PIP}"
echo "Optional build parameters:"
echo " TF_BUILD_DRY_RUN=${TF_BUILD_DRY_RUN}"
echo " TF_BUILD_MAVX=${TF_BUILD_MAVX}"
echo " TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS="\
"${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS}"
echo " TF_BUILD_APPEND_ARGUMENTS=${TF_BUILD_APPEND_ARGUMENTS}"
echo " TF_BUILD_BAZEL_TARGET=${TF_BUILD_BAZEL_TARGET}"
echo " TF_BUILD_BAZEL_CLEAN=${TF_BUILD_BAZEL_CLEAN}"
echo " TF_BUILD_TEST_TUTORIALS=${TF_BUILD_TEST_TUTORIALS}"
echo " TF_BUILD_INTEGRATION_TESTS=${TF_BUILD_INTEGRATION_TESTS}"
echo " TF_BUILD_RUN_BENCHMARKS=${TF_BUILD_RUN_BENCHMARKS}"
echo " TF_BUILD_OPTIONS=${TF_BUILD_OPTIONS}"
# Function that tries to determine CUDA capability, if deviceQuery binary
# is available on path
function get_cuda_capability_version() {
if [[ ! -z $(which deviceQuery) ]]; then
# The first listed device is used
deviceQuery | grep "CUDA Capability .* version" | \
head -1 | awk '{print $NF}'
fi
}
# Container type, e.g., CPU, GPU
CTYPE=${TF_BUILD_CONTAINER_TYPE}
# Determine if the machine is a Mac
OPT_FLAG="--test_output=errors"
if [[ "$(uname -s)" == "Darwin" ]]; then
DO_DOCKER=0
echo "It appears this machine is a Mac. "\
"We will perform this build without Docker."
echo "Also, the additional option flags will be applied to the build:"
echo " ${NO_DOCKER_OPT_FLAG}"
MAIN_CMD="${NO_DOCKER_MAIN_CMD} ${CTYPE}"
OPT_FLAG="${OPT_FLAG} ${NO_DOCKER_OPT_FLAG}"
fi
# In DO_DOCKER mode, appends environment variable to docker's run invocation.
# Otherwise, exports the corresponding variable.
function set_script_variable() {
local VAR="$1"
local VALUE="$2"
if [[ $DO_DOCKER == "1" ]]; then
TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS="${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS} -e $VAR=$VALUE"
else
export $VAR="$VALUE"
fi
}
# Process container type
if [[ ${CTYPE} == cpu* ]] || [[ ${CTYPE} == "debian.jessie.cpu" ]]; then
:
elif [[ ${CTYPE} == gpu* ]]; then
set_script_variable TF_NEED_CUDA 1
if [[ $TF_CUDA_CLANG == "1" ]]; then
OPT_FLAG="${OPT_FLAG} --config=cuda_clang"
set_script_variable TF_CUDA_CLANG 1
# For cuda_clang we download `clang` while building.
set_script_variable TF_DOWNLOAD_CLANG 1
else
OPT_FLAG="${OPT_FLAG} --config=cuda"
fi
# Attempt to determine CUDA capability version automatically and use it if
# CUDA capability version is not specified by the environment variables.
CUDA_CAPA_VER=$(get_cuda_capability_version)
if [[ ! -z ${CUDA_CAPA_VER} ]]; then
AUTO_CUDA_CAPA_VER=0
if [[ ${DO_DOCKER} == "1" ]] && \
[[ "${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS}" != \
*"TF_CUDA_COMPUTE_CAPABILITIES="* ]]; then
AUTO_CUDA_CAPA_VER=1
TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS=\
"${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS} -e "\
"TF_CUDA_COMPUTE_CAPABILITIES=${CUDA_CAPA_VER}"
echo "Docker GPU build: TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS="\
"\"${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS}\""
elif [[ ${DO_DOCKER} == "0" ]] && \
[[ -z "${TF_CUDA_COMPUTE_CAPABILITIES}" ]]; then
AUTO_CUDA_CAPA_VER=1
TF_CUDA_COMPUTE_CAPABILITIES="${CUDA_CAPA_VER}"
echo "Non-Docker GPU build: TF_CUDA_COMPUTE_CAPABILITIES="\
"\"${TF_CUDA_COMPUTE_CAPABILITIES}\""
fi
if [[ ${AUTO_CUDA_CAPA_VER} == "1" ]]; then
echo "TF_CUDA_COMPUTE_CAPABILITIES is not set:"
echo "Using CUDA capability version from deviceQuery: ${CUDA_CAPA_VER}"
echo ""
fi
fi
elif [[ ${CTYPE} == "android" ]] || [[ ${CTYPE} == "android_full" ]]; then
:
else
die "Unrecognized value in TF_BUILD_CONTAINER_TYPE: "\
"\"${TF_BUILD_CONTAINER_TYPE}\""
fi
# Determine if this is a benchmarks job
RUN_BENCHMARKS=0
if [[ ! -z "${TF_BUILD_RUN_BENCHMARKS}" ]] &&
[[ "${TF_BUILD_RUN_BENCHMARKS}" != "0" ]]; then
RUN_BENCHMARKS=1
fi
# Process Bazel "-c opt" flag
if [[ -z "${TF_BUILD_OPTIONS}" ]]; then
if [[ ${TF_BUILD_IS_OPT} == "no_opt" ]]; then
# PIP builds are done only with the -c opt flag
if [[ ${TF_BUILD_IS_PIP} == "pip" ]]; then
echo "Skipping parameter combination: ${TF_BUILD_IS_OPT} & "\
"${TF_BUILD_IS_PIP}"
exit 0
fi
elif [[ ${TF_BUILD_IS_OPT} == "opt" ]]; then
OPT_FLAG="${OPT_FLAG} -c opt"
else
die "Unrecognized value in TF_BUILD_IS_OPT: \"${TF_BUILD_IS_OPT}\""
fi
# Process MAVX option
if [[ ! -z "${TF_BUILD_MAVX}" ]]; then
if [[ "${TF_BUILD_MAVX}" == "mavx" ]]; then
OPT_FLAG="${OPT_FLAG} --copt=-mavx"
elif [[ "${TF_BUILD_MAVX}" == "mavx2" ]]; then
OPT_FLAG="${OPT_FLAG} --copt=-mavx2"
else
die "Unsupported value in TF_BUILD_MAVX: ${TF_BUILD_MAVX}"
fi
fi
else
case $TF_BUILD_OPTIONS in
FASTBUILD)
echo "Running FASTBUILD mode (noopt, nodbg)."
;;
OPT)
OPT_FLAG="${OPT_FLAG} -c opt"
;;
OPTDBG)
OPT_FLAG="${OPT_FLAG} -c opt --copt=-g"
;;
MAVX)
OPT_FLAG="${OPT_FLAG} -c opt --copt=-mavx"
;;
MAVX_DBG)
OPT_FLAG="${OPT_FLAG} -c opt --copt=-g --copt=-mavx"
;;
MAVX2_FMA)
OPT_FLAG="${OPT_FLAG} -c opt --copt=-mavx2 --copt=-mfma"
;;
MAVX2_FMA_DBG)
OPT_FLAG="${OPT_FLAG} -c opt --copt=-g --copt=-mavx2 --copt=-mfma"
;;
esac
fi
# Strip whitespaces from OPT_FLAG
OPT_FLAG=$(str_strip "${OPT_FLAG}")
# 1) Filter out benchmark tests if this is not a benchmarks job;
# 2) Filter out tests with the "nomac" tag if the build is on Mac OS X.
EXTRA_ARGS=${DEFAULT_BAZEL_CONFIGS}
IS_MAC=0
if [[ "$(uname)" == "Darwin" ]]; then
IS_MAC=1
fi
if [[ "${TF_BUILD_APPEND_ARGUMENTS}" == *"--test_tag_filters="* ]]; then
ITEMS=(${TF_BUILD_APPEND_ARGUMENTS})
for ITEM in "${ITEMS[@]}"; do
if [[ ${ITEM} == *"--test_tag_filters="* ]]; then
NEW_ITEM="${ITEM}"
if [[ ${NEW_ITEM} != *"benchmark-test"* ]]; then
NEW_ITEM="${NEW_ITEM},-benchmark-test"
fi
if [[ ${IS_MAC} == "1" ]] && [[ ${NEW_ITEM} != *"nomac"* ]]; then
NEW_ITEM="${NEW_ITEM},-nomac"
fi
EXTRA_ARGS="${EXTRA_ARGS} ${NEW_ITEM}"
else
EXTRA_ARGS="${EXTRA_ARGS} ${ITEM}"
fi
done
else
EXTRA_ARGS="${EXTRA_ARGS} ${TF_BUILD_APPEND_ARGUMENTS} --test_tag_filters=-no_oss,-oss_serial,-benchmark-test"
if [[ ${IS_MAC} == "1" ]]; then
EXTRA_ARGS="${EXTRA_ARGS},-nomac"
fi
fi
# For any "tool" dependencies in genrules, Bazel will build them for host
# instead of the target configuration. We can save some build time by setting
# this flag, and it only affects a few tests.
EXTRA_ARGS="${EXTRA_ARGS} --distinct_host_configuration=false"
if [[ ! -z "${TF_BAZEL_BUILD_ONLY}" ]] &&
[[ "${TF_BAZEL_BUILD_ONLY}" != "0" ]];then
BAZEL_CMD=${BAZEL_BUILD_ONLY_CMD}
fi
# Process PIP install-test option
if [[ ${TF_BUILD_IS_PIP} == "no_pip" ]] ||
[[ ${TF_BUILD_IS_PIP} == "both" ]]; then
# Process optional bazel target override
if [[ ! -z "${TF_BUILD_BAZEL_TARGET}" ]]; then
BAZEL_TARGET=${TF_BUILD_BAZEL_TARGET}
fi
if [[ ${CTYPE} == cpu* ]] || \
[[ ${CTYPE} == "debian.jessie.cpu" ]]; then
# CPU only command, fully parallel.
NO_PIP_MAIN_CMD="${MAIN_CMD} ${BAZEL_CMD} ${OPT_FLAG} ${EXTRA_ARGS} -- "\
"${BAZEL_TARGET}"
elif [[ ${CTYPE} == gpu* ]]; then
# GPU only command, run as many jobs as the GPU count only.
NO_PIP_MAIN_CMD="${BAZEL_CMD} ${OPT_FLAG} "\
"--local_test_jobs=${TF_GPU_COUNT} "\
"--run_under=${PARALLEL_GPU_TEST_CMD} ${EXTRA_ARGS} -- ${BAZEL_TARGET}"
elif [[ ${CTYPE} == "android" ]]; then
# Run android specific script for android build.
NO_PIP_MAIN_CMD="${ANDROID_CMD} ${OPT_FLAG} "
elif [[ ${CTYPE} == "android_full" ]]; then
# Run android specific script for full android build.
NO_PIP_MAIN_CMD="${ANDROID_FULL_CMD} ${OPT_FLAG} "
fi
fi
if [[ ${TF_BUILD_IS_PIP} == "pip" ]] ||
[[ ${TF_BUILD_IS_PIP} == "both" ]]; then
# Android builds conflict with PIP builds
if [[ ${CTYPE} == "android" ]]; then
echo "Skipping parameter combination: ${TF_BUILD_IS_PIP} & "\
"${TF_BUILD_CONTAINER_TYPE}"
exit 0
fi
PIP_MAIN_CMD="${MAIN_CMD} ${PIP_CMD} ${CTYPE} ${EXTRA_ARGS} ${OPT_FLAG}"
# Add flag for integration tests
if [[ ! -z "${TF_BUILD_INTEGRATION_TESTS}" ]] &&
[[ "${TF_BUILD_INTEGRATION_TESTS}" != "0" ]]; then
PIP_MAIN_CMD="${PIP_MAIN_CMD} ${PIP_INTEGRATION_TESTS_FLAG}"
fi
# Add command for tutorial test
if [[ ! -z "${TF_BUILD_TEST_TUTORIALS}" ]] &&
[[ "${TF_BUILD_TEST_TUTORIALS}" != "0" ]]; then
PIP_MAIN_CMD="${PIP_MAIN_CMD} ${PIP_TEST_TUTORIALS_FLAG}"
# Prepare data directory for tutorial tests
mkdir -p "${TUT_TEST_DATA_DIR}" ||
die "FAILED to create data directory for tutorial tests: "\
"${TUT_TEST_DATA_DIR}"
if [[ "${DO_DOCKER}" == "1" ]]; then
EXTRA_PARAMS="${EXTRA_PARAMS} -v ${TUT_TEST_DATA_DIR}:${TUT_TEST_DATA_DIR}"
fi
fi
fi
if [[ ${RUN_BENCHMARKS} == "1" ]]; then
MAIN_CMD="${BENCHMARK_CMD} ${OPT_FLAG}"
elif [[ ${TF_BUILD_IS_PIP} == "no_pip" ]]; then
MAIN_CMD="${NO_PIP_MAIN_CMD}"
elif [[ ${TF_BUILD_IS_PIP} == "pip" ]]; then
MAIN_CMD="${PIP_MAIN_CMD}"
elif [[ ${TF_BUILD_IS_PIP} == "both" ]]; then
MAIN_CMD="${NO_PIP_MAIN_CMD} && ${PIP_MAIN_CMD}"
else
die "Unrecognized value in TF_BUILD_IS_PIP: \"${TF_BUILD_IS_PIP}\""
fi
# Check if this is a tf_nightly build
if [[ "${TF_NIGHTLY}" == "1" ]]; then
EXTRA_PARAMS="${EXTRA_PARAMS} -e TF_NIGHTLY=1"
fi
# Process Python version
if [[ ${TF_BUILD_PYTHON_VERSION} == "python2" ]]; then
:
elif [[ ${TF_BUILD_PYTHON_VERSION} == "python3" || \
${TF_BUILD_PYTHON_VERSION} == "python3.4" || \
${TF_BUILD_PYTHON_VERSION} == "python3.5" || \
${TF_BUILD_PYTHON_VERSION} == "python3.6" ]]; then
# Supply proper environment variable to select Python 3
if [[ "${DO_DOCKER}" == "1" ]]; then
EXTRA_PARAMS="${EXTRA_PARAMS} -e CI_BUILD_PYTHON=${TF_BUILD_PYTHON_VERSION}"
else
# Determine the path to python3
PYTHON3_PATH=$(which "${TF_BUILD_PYTHON_VERSION}" | head -1)
if [[ -z "${PYTHON3_PATH}" ]]; then
die "ERROR: Failed to locate ${TF_BUILD_PYTHON_VERSION} binary on path"
else
echo "Found ${TF_BUILD_PYTHON_VERSION} binary at: ${PYTHON3_PATH}"
fi
export PYTHON_BIN_PATH="${PYTHON3_PATH}"
fi
else
die "Unrecognized value in TF_BUILD_PYTHON_VERSION: "\
"\"${TF_BUILD_PYTHON_VERSION}\""
fi
# Append additional Docker extra parameters
EXTRA_PARAMS="${EXTRA_PARAMS} ${TF_BUILD_APPEND_CI_DOCKER_EXTRA_PARAMS}"
# Finally, do a dry run or call the command
# The command, which may consist of multiple parts (e.g., in the case of
# TF_BUILD_SERIAL_TESTS=1), are written to a bash script, which is
# then called. The name of the script is randomized to make concurrent
# builds on the node possible.
TMP_SCRIPT="$(mktemp)_ci_parameterized_build.sh"
if [[ "${DO_DOCKER}" == "1" ]]; then
# Map the tmp script into the Docker container
EXTRA_PARAMS="${EXTRA_PARAMS} -v ${TMP_SCRIPT}:/tmp/tf_build.sh"
if [[ ! -z "${TF_BUILD_BAZEL_CLEAN}" ]] &&
[[ "${TF_BUILD_BAZEL_CLEAN}" != "0" ]] &&
[[ "${TF_BUILD_IS_PIP}" != "both" ]]; then
# For TF_BUILD_IS_PIP == both, "bazel clean" will have already
# been performed before the "bazel test" step
EXTRA_PARAMS="${EXTRA_PARAMS} -e TF_BUILD_BAZEL_CLEAN=1"
fi
EXTRA_PARAMS=$(str_strip "${EXTRA_PARAMS}")
echo "Exporting CI_DOCKER_EXTRA_PARAMS: ${EXTRA_PARAMS}"
export CI_DOCKER_EXTRA_PARAMS="${EXTRA_PARAMS}"
fi
# Write to the tmp script
echo "#!/usr/bin/env bash" > ${TMP_SCRIPT}
if [[ ! -z "${TF_BUILD_BAZEL_CLEAN}" ]] &&
[[ "${TF_BUILD_BAZEL_CLEAN}" != "0" ]]; then
echo ${BAZEL_CLEAN_CMD} >> ${TMP_SCRIPT}
fi
echo ${MAIN_CMD} >> ${TMP_SCRIPT}
echo "Executing final command (${TMP_SCRIPT})..."
echo "=========================================="
cat ${TMP_SCRIPT}
echo "=========================================="
echo ""
TMP_DIR=""
DOCKERFILE_FLAG=""
if [[ "${DO_DOCKER}" == "1" ]]; then
if [[ "${TF_BUILD_PYTHON_VERSION}" == "python3.5" ]] ||
[[ "${TF_BUILD_PYTHON_VERSION}" == "python3.6" ]]; then
# Modify Dockerfile for Python3.5 | Python3.6 build
TMP_DIR=$(mktemp -d)
echo "Docker build will occur in temporary directory: ${TMP_DIR}"
# Copy the files required for the docker build
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
cp -r "${SCRIPT_DIR}/install" "${TMP_DIR}/install" || \
die "ERROR: Failed to copy directory ${SCRIPT_DIR}/install"
DOCKERFILE="${SCRIPT_DIR}/Dockerfile.${TF_BUILD_CONTAINER_TYPE}"
cp "${DOCKERFILE}" "${TMP_DIR}/" || \
die "ERROR: Failed to copy Dockerfile at ${DOCKERFILE}"
DOCKERFILE="${TMP_DIR}/Dockerfile.${TF_BUILD_CONTAINER_TYPE}"
# Replace a line in the Dockerfile
if sed -i \
"s/RUN \/install\/install_pip_packages.sh/RUN \/install\/install_${TF_BUILD_PYTHON_VERSION}_pip_packages.sh/g" \
"${DOCKERFILE}"
then
echo "Copied and modified Dockerfile for ${TF_BUILD_PYTHON_VERSION} build: ${DOCKERFILE}"
else
die "ERROR: Faild to copy and modify Dockerfile: ${DOCKERFILE}"
fi
DOCKERFILE_FLAG="--dockerfile ${DOCKERFILE}"
fi
fi
chmod +x ${TMP_SCRIPT}
# Map TF_BUILD container types to containers we actually have.
if [[ "${CTYPE}" == "android_full" ]]; then
CONTAINER="android"
else
CONTAINER=${CTYPE}
fi
FAILURE=0
if [[ ! -z "${TF_BUILD_DRY_RUN}" ]] && [[ ${TF_BUILD_DRY_RUN} != "0" ]]; then
# Do a dry run: just print the final command
echo "*** This is a DRY RUN ***"
else
# Actually run the command
if [[ "${DO_DOCKER}" == "1" ]]; then
${DOCKER_MAIN_CMD} ${CONTAINER} ${DOCKERFILE_FLAG} /tmp/tf_build.sh
else
${TMP_SCRIPT}
fi
if [[ $? != "0" ]]; then
FAILURE=1
fi
fi
[[ ${FAILURE} == "0" ]] && RESULT="SUCCESS" || RESULT="FAILURE"
rm -f ${TMP_SCRIPT}
END_TIME=$(date +'%s')
echo ""
echo "Parameterized build ends with ${RESULT} at: $(date) "\
"(Elapsed time: $((END_TIME - START_TIME)) s)"
# Clean up temporary directory if it exists
if [[ ! -z "${TMP_DIR}" ]]; then
echo "Cleaning up temporary directory: ${TMP_DIR}"
rm -rf "${TMP_DIR}"
fi
exit ${FAILURE}
| Java |
package fake_command_runner_matchers
import (
"fmt"
"os/exec"
"github.com/cloudfoundry/gunk/command_runner/fake_command_runner"
)
func HaveKilled(spec fake_command_runner.CommandSpec) *HaveKilledMatcher {
return &HaveKilledMatcher{Spec: spec}
}
type HaveKilledMatcher struct {
Spec fake_command_runner.CommandSpec
killed []*exec.Cmd
}
func (m *HaveKilledMatcher) Match(actual interface{}) (bool, error) {
runner, ok := actual.(*fake_command_runner.FakeCommandRunner)
if !ok {
return false, fmt.Errorf("Not a fake command runner: %#v.", actual)
}
m.killed = runner.KilledCommands()
matched := false
for _, cmd := range m.killed {
if m.Spec.Matches(cmd) {
matched = true
break
}
}
if matched {
return true, nil
} else {
return false, nil
}
}
func (m *HaveKilledMatcher) FailureMessage(actual interface{}) (message string) {
return fmt.Sprintf("Expected to kill:%s\n\nActually killed:%s", prettySpec(m.Spec), prettyCommands(m.killed))
}
func (m *HaveKilledMatcher) NegatedFailureMessage(actual interface{}) (message string) {
return fmt.Sprintf("Expected to not kill the following commands:%s", prettySpec(m.Spec))
}
| Java |
/*
* Copyright 2012, Google Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are
* met:
*
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the
* distribution.
* * Neither the name of Google Inc. nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.jf.dexlib2.writer.pool;
import org.jf.dexlib2.iface.reference.StringReference;
import org.jf.dexlib2.writer.StringSection;
import org.jf.util.ExceptionWithContext;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class StringPool extends StringTypeBasePool implements StringSection<CharSequence, StringReference> {
public StringPool(@Nonnull DexPool dexPool) {
super(dexPool);
}
public void intern(@Nonnull CharSequence string) {
internedItems.put(string.toString(), 0);
}
public void internNullable(@Nullable CharSequence string) {
if (string != null) {
intern(string);
}
}
@Override public int getItemIndex(@Nonnull StringReference key) {
Integer index = internedItems.get(key.toString());
if (index == null) {
throw new ExceptionWithContext("Item not found.: %s", key.toString());
}
return index;
}
@Override public boolean hasJumboIndexes() {
return internedItems.size() > 65536;
}
}
| Java |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
package org.elasticsearch.xpack.ml.inference.allocation;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.ResourceAlreadyExistsException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.metadata.Metadata;
import org.elasticsearch.cluster.metadata.NodesShutdownMetadata;
import org.elasticsearch.cluster.metadata.SingleNodeShutdownMetadata;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodeRole;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.xpack.core.ml.MlMetadata;
import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction;
import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAllocationStateAction;
import org.elasticsearch.xpack.core.ml.inference.allocation.AllocationState;
import org.elasticsearch.xpack.core.ml.inference.allocation.RoutingState;
import org.elasticsearch.xpack.core.ml.inference.allocation.RoutingStateAndReason;
import org.elasticsearch.xpack.core.ml.inference.allocation.TrainedModelAllocation;
import org.elasticsearch.xpack.ml.MachineLearning;
import org.elasticsearch.xpack.ml.job.NodeLoadDetector;
import org.elasticsearch.xpack.ml.process.MlMemoryTracker;
import org.junit.Before;
import java.util.Collections;
import java.util.Set;
import java.util.function.Function;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.anEmptyMap;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.nullValue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TrainedModelAllocationClusterServiceTests extends ESTestCase {
private ClusterService clusterService;
private NodeLoadDetector nodeLoadDetector;
@Before
public void setupObjects() {
clusterService = mock(ClusterService.class);
ClusterSettings clusterSettings = new ClusterSettings(
Settings.EMPTY,
Sets.newHashSet(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, MachineLearning.USE_AUTO_MACHINE_MEMORY_PERCENT)
);
when(clusterService.getClusterSettings()).thenReturn(clusterSettings);
MlMemoryTracker memoryTracker = mock(MlMemoryTracker.class);
when(memoryTracker.isRecentlyRefreshed()).thenReturn(true);
nodeLoadDetector = new NodeLoadDetector(memoryTracker);
}
public void testUpdateModelRoutingTable() {
String modelId = "existing-model";
String nodeId = "ml-node-with-room";
ClusterState currentState = ClusterState.builder(new ClusterName("testUpdateModelRoutingTable"))
.nodes(DiscoveryNodes.builder().add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes())).build())
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
modelId,
TrainedModelAllocation.Builder.empty(newParams(modelId, 10_000L)).addNewRoutingEntry(nodeId)
)
.build()
)
.build()
)
.build();
assertThatStoppingAllocationPreventsMutation(
state -> TrainedModelAllocationClusterService.updateModelRoutingTable(
state,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STARTED, ""))
),
currentState
);
ClusterState newState = TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STARTED, ""))
);
assertThat(
TrainedModelAllocationMetadata.fromState(newState).getModelAllocation(modelId).getNodeRoutingTable().get(nodeId).getState(),
equalTo(RoutingState.STARTED)
);
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(
"missingNode",
modelId,
new RoutingStateAndReason(RoutingState.STARTED, "")
)
)
);
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(
nodeId,
"missingModel",
new RoutingStateAndReason(RoutingState.STARTED, "")
)
)
);
// TEST Stopped
// We should allow a "stopped" update on missing models and nodes as entries may have already been deleted
TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request("missingNode", modelId, new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, "missingModel", new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
ClusterState updateState = TrainedModelAllocationClusterService.updateModelRoutingTable(
currentState,
new UpdateTrainedModelAllocationStateAction.Request(nodeId, modelId, new RoutingStateAndReason(RoutingState.STOPPED, ""))
);
assertThat(
TrainedModelAllocationMetadata.fromState(updateState).getModelAllocation(modelId).getNodeRoutingTable(),
not(hasKey(nodeId))
);
}
public void testRemoveAllocation() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testRemoveAllocation"))
.metadata(Metadata.builder().build())
.build();
String modelId = "remove-allocation";
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.removeAllocation(clusterStateWithoutAllocation, modelId)
);
ClusterState clusterStateWithAllocation = ClusterState.builder(new ClusterName("testRemoveAllocation"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(modelId, TrainedModelAllocation.Builder.empty(newParams(modelId, randomNonNegativeLong())))
.build()
)
.build()
)
.build();
assertThat(TrainedModelAllocationMetadata.fromState(clusterStateWithAllocation).getModelAllocation(modelId), is(not(nullValue())));
ClusterState modified = TrainedModelAllocationClusterService.removeAllocation(clusterStateWithAllocation, modelId);
assertThat(TrainedModelAllocationMetadata.fromState(modified).getModelAllocation(modelId), is(nullValue()));
}
public void testRemoveAllAllocations() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testRemoveAllAllocations"))
.metadata(Metadata.builder().build())
.build();
assertThat(
TrainedModelAllocationClusterService.removeAllAllocations(clusterStateWithoutAllocation),
equalTo(clusterStateWithoutAllocation)
);
ClusterState clusterStateWithAllocations = ClusterState.builder(new ClusterName("testRemoveAllAllocations"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadataTests.randomInstance()
)
.build()
)
.build();
ClusterState modified = TrainedModelAllocationClusterService.removeAllAllocations(clusterStateWithAllocations);
assertThat(TrainedModelAllocationMetadata.fromState(modified).modelAllocations(), is(anEmptyMap()));
}
public void testCreateAllocation() {
ClusterState currentState = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-without-room", true, 1000L))
.add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-shutting-down", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildOldNode("old-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata("ml-node-shutting-down")))
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
ClusterState newState = trainedModelAllocationClusterService.createModelAllocation(currentState, newParams("new-model", 150));
TrainedModelAllocation createdAllocation = TrainedModelAllocationMetadata.fromState(newState).getModelAllocation("new-model");
assertThat(createdAllocation, is(not(nullValue())));
assertThat(createdAllocation.getNodeRoutingTable().keySet(), hasSize(2));
assertThat(createdAllocation.getNodeRoutingTable(), hasKey("ml-node-with-room"));
assertThat(createdAllocation.getNodeRoutingTable().get("ml-node-with-room").getState(), equalTo(RoutingState.STARTING));
assertThat(createdAllocation.getNodeRoutingTable(), hasKey("ml-node-without-room"));
assertThat(createdAllocation.getNodeRoutingTable().get("ml-node-without-room").getState(), equalTo(RoutingState.FAILED));
assertThat(
createdAllocation.getNodeRoutingTable().get("ml-node-without-room").getReason(),
containsString("This node has insufficient available memory.")
);
expectThrows(
ResourceAlreadyExistsException.class,
() -> trainedModelAllocationClusterService.createModelAllocation(newState, newParams("new-model", 150))
);
}
public void testCreateAllocationWhileResetModeIsTrue() {
ClusterState currentState = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(MlMetadata.TYPE, new MlMetadata.Builder().isResetMode(true).build()))
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
expectThrows(
ElasticsearchStatusException.class,
() -> trainedModelAllocationClusterService.createModelAllocation(currentState, newParams("new-model", 150))
);
ClusterState stateWithoutReset = ClusterState.builder(new ClusterName("testCreateAllocation"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(Metadata.builder().putCustom(MlMetadata.TYPE, new MlMetadata.Builder().isResetMode(false).build()))
.build();
// Shouldn't throw
trainedModelAllocationClusterService.createModelAllocation(stateWithoutReset, newParams("new-model", 150));
}
public void testAddRemoveAllocationNodes() {
ClusterState currentState = ClusterState.builder(new ClusterName("testAddRemoveAllocationNodes"))
.nodes(
DiscoveryNodes.builder()
.add(buildNode("ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("new-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-without-room", true, 1000L))
.add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes()))
.add(buildNode("ml-node-shutting-down", true, ByteSizeValue.ofGb(4).getBytes()))
.add(buildOldNode("old-versioned-ml-node-with-room", true, ByteSizeValue.ofGb(4).getBytes()))
.build()
)
.metadata(
Metadata.builder()
.putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata("ml-node-shutting-down"))
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
"model-1",
TrainedModelAllocation.Builder.empty(newParams("model-1", 10_000))
.addNewRoutingEntry("ml-node-with-room")
.updateExistingRoutingEntry("ml-node-with-room", started())
.addNewRoutingEntry("old-ml-node-with-room")
.updateExistingRoutingEntry("old-ml-node-with-room", started())
.addNewRoutingEntry("ml-node-shutting-down")
)
.addNewAllocation(
"model-2",
TrainedModelAllocation.Builder.empty(newParams("model-2", 10_000))
.addNewRoutingEntry("old-ml-node-with-room")
.updateExistingRoutingEntry("old-ml-node-with-room", started())
)
.build()
)
)
.build();
TrainedModelAllocationClusterService trainedModelAllocationClusterService = createClusterService();
// Stopping shouldn't cause any updates
assertThatStoppingAllocationPreventsMutation(
trainedModelAllocationClusterService::addRemoveAllocationNodes,
currentState
);
ClusterState modified = trainedModelAllocationClusterService.addRemoveAllocationNodes(currentState);
TrainedModelAllocationMetadata trainedModelAllocationMetadata = TrainedModelAllocationMetadata.fromState(modified);
assertThat(trainedModelAllocationMetadata.modelAllocations().keySet(), hasSize(2));
assertThat(trainedModelAllocationMetadata.modelAllocations(), allOf(hasKey("model-1"), hasKey("model-2")));
assertThat(trainedModelAllocationMetadata.getModelAllocation("model-1").getNodeRoutingTable().keySet(), hasSize(3));
assertThat(
trainedModelAllocationMetadata.getModelAllocation("model-1").getNodeRoutingTable(),
allOf(hasKey("ml-node-with-room"), hasKey("new-ml-node-with-room"), hasKey("ml-node-without-room"))
);
assertNodeState(trainedModelAllocationMetadata, "model-1", "ml-node-with-room", RoutingState.STARTED);
assertNodeState(trainedModelAllocationMetadata, "model-1", "new-ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-1", "ml-node-without-room", RoutingState.FAILED);
assertThat(trainedModelAllocationMetadata.getModelAllocation("model-2").getNodeRoutingTable().keySet(), hasSize(3));
assertThat(
trainedModelAllocationMetadata.getModelAllocation("model-2").getNodeRoutingTable(),
allOf(hasKey("ml-node-with-room"), hasKey("new-ml-node-with-room"), hasKey("ml-node-without-room"))
);
assertNodeState(trainedModelAllocationMetadata, "model-2", "ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-2", "new-ml-node-with-room", RoutingState.STARTING);
assertNodeState(trainedModelAllocationMetadata, "model-2", "ml-node-without-room", RoutingState.FAILED);
}
public void testShouldAllocateModels() {
String model1 = "model-1";
String model2 = "model-2";
String mlNode1 = "ml-node-with-room";
String mlNode2 = "new-ml-node-with-room";
DiscoveryNode mlNode1Node = buildNode(mlNode1, true, ByteSizeValue.ofGb(4).getBytes());
DiscoveryNode mlNode2Node = buildNode(mlNode2, true, ByteSizeValue.ofGb(4).getBytes());
ClusterState stateWithTwoNodes = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node).add(mlNode2Node))
.build();
ClusterState stateWithOneNode = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node))
.build();
ClusterState stateWithOneNodeNotMl = ClusterState.builder(new ClusterName("testShouldAllocateModels"))
.nodes(DiscoveryNodes.builder().add(mlNode1Node).add(buildNode("not-ml-node", false, ByteSizeValue.ofGb(4).getBytes())))
.build();
// No metadata in the new state means no allocations, so no updates
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes)).build(),
ClusterState.builder(randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// Even with metadata changes, unless there are node changes, do nothing
ClusterState randomState = randomFrom(stateWithOneNodeNotMl, stateWithOneNode, stateWithTwoNodes);
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(randomState)
.metadata(
Metadata.builder()
.putCustom(TrainedModelAllocationMetadata.NAME, TrainedModelAllocationMetadataTests.randomInstance())
.build()
)
.build(),
ClusterState.builder(randomState)
.metadata(
Metadata.builder()
.putCustom(TrainedModelAllocationMetadata.NAME, TrainedModelAllocationMetadataTests.randomInstance())
.build()
)
.build()
)
),
is(false)
);
// If the node removed is not even an ML node, we should not attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNodeNotMl)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If the node removed is an ML node, but no models are allocated to it, we should not attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a new ML node is added, we should attempt to re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(true)
);
// If a new ML node is added, but allocation is stopping, we should not re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).stopAllocation()
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a new ML node is added, but its shutting down, don't re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata(mlNode2))
.build()
)
.build(),
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(model1, TrainedModelAllocation.Builder.empty(newParams(model1, 100)))
.build()
)
.build()
)
.build()
)
),
is(false)
);
// If a ML node is removed and its routed to, re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build()
)
),
is(true)
);
// If a ML node is removed and its routed to, but the allocation is stopping, don't re-allocate
assertThat(
TrainedModelAllocationClusterService.shouldAllocateModels(
new ClusterChangedEvent(
"test",
ClusterState.builder(stateWithOneNode)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
.stopAllocation()
)
.build()
)
.build()
)
.build(),
ClusterState.builder(stateWithTwoNodes)
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(
model1,
TrainedModelAllocation.Builder.empty(newParams(model1, 100)).addNewRoutingEntry(mlNode1)
)
.addNewAllocation(
model2,
TrainedModelAllocation.Builder.empty(newParams("model-2", 100))
.addNewRoutingEntry(mlNode1)
.addNewRoutingEntry(mlNode2)
)
.build()
)
.build()
)
.build()
)
),
is(false)
);
}
public void testSetAllocationToStopping() {
ClusterState clusterStateWithoutAllocation = ClusterState.builder(new ClusterName("testSetAllocationToStopping"))
.metadata(Metadata.builder().build())
.build();
String modelId = "stopping-allocation";
expectThrows(
ResourceNotFoundException.class,
() -> TrainedModelAllocationClusterService.setToStopping(clusterStateWithoutAllocation, modelId)
);
ClusterState clusterStateWithAllocation = ClusterState.builder(new ClusterName("testSetAllocationToStopping"))
.metadata(
Metadata.builder()
.putCustom(
TrainedModelAllocationMetadata.NAME,
TrainedModelAllocationMetadata.Builder.empty()
.addNewAllocation(modelId, TrainedModelAllocation.Builder.empty(newParams(modelId, randomNonNegativeLong())))
.build()
)
.build()
)
.build();
TrainedModelAllocationMetadata before = TrainedModelAllocationMetadata.fromState(clusterStateWithAllocation);
assertThat(before.getModelAllocation(modelId), is(not(nullValue())));
assertThat(before.getModelAllocation(modelId).getAllocationState(), equalTo(AllocationState.STARTED));
ClusterState modified = TrainedModelAllocationClusterService.setToStopping(clusterStateWithAllocation, modelId);
assertThat(
TrainedModelAllocationMetadata.fromState(modified).getModelAllocation(modelId).getAllocationState(),
equalTo(AllocationState.STOPPING)
);
}
private void assertThatStoppingAllocationPreventsMutation(
Function<ClusterState, ClusterState> mutationFunction,
ClusterState original
) {
TrainedModelAllocationMetadata tempMetadata = TrainedModelAllocationMetadata.fromState(original);
if (tempMetadata.modelAllocations().isEmpty()) {
return;
}
TrainedModelAllocationMetadata.Builder builder = TrainedModelAllocationMetadata.builder(original);
for (String modelId : tempMetadata.modelAllocations().keySet()) {
builder.getAllocation(modelId).stopAllocation();
}
TrainedModelAllocationMetadata metadataWithStopping = builder.build();
ClusterState originalWithStoppingAllocations = ClusterState.builder(original)
.metadata(Metadata.builder(original.metadata()).putCustom(TrainedModelAllocationMetadata.NAME, metadataWithStopping).build())
.build();
assertThat(
"setting all allocations to stopping did not prevent mutation",
TrainedModelAllocationMetadata.fromState(mutationFunction.apply(originalWithStoppingAllocations)),
equalTo(metadataWithStopping)
);
}
private TrainedModelAllocationClusterService createClusterService() {
return new TrainedModelAllocationClusterService(Settings.EMPTY, clusterService, nodeLoadDetector);
}
private static DiscoveryNode buildNode(String name, boolean isML, long nativeMemory) {
return buildNode(name, isML, nativeMemory, Version.CURRENT);
}
private static DiscoveryNode buildNode(String name, boolean isML, long nativeMemory, Version version) {
return new DiscoveryNode(
name,
name,
buildNewFakeTransportAddress(),
MapBuilder.<String, String>newMapBuilder()
.put(MachineLearning.MACHINE_MEMORY_NODE_ATTR, String.valueOf(nativeMemory))
.put(MachineLearning.MAX_JVM_SIZE_NODE_ATTR, String.valueOf(10))
.put(MachineLearning.MAX_OPEN_JOBS_NODE_ATTR, String.valueOf(10))
.map(),
isML ? DiscoveryNodeRole.roles() : Set.of(DiscoveryNodeRole.DATA_ROLE, DiscoveryNodeRole.MASTER_ROLE),
version
);
}
private static RoutingStateAndReason started() {
return new RoutingStateAndReason(RoutingState.STARTED, "");
}
private static DiscoveryNode buildOldNode(String name, boolean isML, long nativeMemory) {
return buildNode(name, isML, nativeMemory, Version.V_7_15_0);
}
private static StartTrainedModelDeploymentAction.TaskParams newParams(String modelId, long modelSize) {
return new StartTrainedModelDeploymentAction.TaskParams(modelId, modelSize);
}
private static void assertNodeState(TrainedModelAllocationMetadata metadata, String modelId, String nodeId, RoutingState routingState) {
assertThat(metadata.getModelAllocation(modelId).getNodeRoutingTable().get(nodeId).getState(), equalTo(routingState));
}
private static NodesShutdownMetadata shutdownMetadata(String nodeId) {
return new NodesShutdownMetadata(
Collections.singletonMap(
nodeId,
SingleNodeShutdownMetadata.builder()
.setType(SingleNodeShutdownMetadata.Type.REMOVE)
.setStartedAtMillis(randomNonNegativeLong())
.setReason("tests")
.setNodeId(nodeId)
.build()
)
);
}
}
| Java |
#!/bin/bash
declare -a IMAGES=( 'ansible/ubuntu14.04-ansible:stable' 'ansible/centos7-ansible:stable' \
"williamyeh/ansible:debian8-onbuild" \
"williamyeh/ansible:debian7-onbuild" \
"williamyeh/ansible:ubuntu14.04-onbuild" \
"williamyeh/ansible:ubuntu12.04-onbuild" \
"williamyeh/ansible:centos7-onbuild" \
"williamyeh/ansible:centos6-onbuild"
)
for image in "${IMAGES[@]}" ; do
echo $image
docker pull $image
done
docker images | sort | Java |
#pragma once
#include "base/worker_thread.hpp"
#include "ugc/storage.hpp"
#include "ugc/types.hpp"
#include <functional>
class Index;
struct FeatureID;
namespace ugc
{
class Api
{
public:
using UGCCallback = std::function<void(UGC const &)>;
using UGCUpdateCallback = std::function<void(UGCUpdate const &)>;
explicit Api(Index const & index, std::string const & filename);
void GetUGC(FeatureID const & id, UGCCallback callback);
void GetUGCUpdate(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdate(FeatureID const & id, UGCUpdate const & ugc);
static UGC MakeTestUGC1(Time now = Clock::now());
static UGC MakeTestUGC2(Time now = Clock::now());
private:
void GetUGCImpl(FeatureID const & id, UGCCallback callback);
void GetUGCUpdateImpl(FeatureID const & id, UGCUpdateCallback callback);
void SetUGCUpdateImpl(FeatureID const & id, UGCUpdate const & ugc);
Index const & m_index;
base::WorkerThread m_thread;
Storage m_storage;
};
} // namespace ugc
| Java |
/**
* JBoss, Home of Professional Open Source.
* Copyright 2014-2022 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.pnc.indyrepositorymanager;
import org.apache.commons.io.IOUtils;
import org.commonjava.indy.client.core.Indy;
import org.commonjava.indy.client.core.util.UrlUtils;
import org.commonjava.indy.model.core.Group;
import org.commonjava.indy.model.core.RemoteRepository;
import org.commonjava.indy.model.core.StoreKey;
import org.commonjava.indy.model.core.StoreType;
import org.jboss.pnc.enums.RepositoryType;
import org.jboss.pnc.indyrepositorymanager.fixture.TestBuildExecution;
import org.jboss.pnc.model.Artifact;
import org.jboss.pnc.spi.repositorymanager.BuildExecution;
import org.jboss.pnc.spi.repositorymanager.RepositoryManagerResult;
import org.jboss.pnc.spi.repositorymanager.model.RepositorySession;
import org.jboss.pnc.test.category.ContainerTest;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.commonjava.indy.pkg.maven.model.MavenPackageTypeDescriptor.MAVEN_PKG_KEY;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.hamcrest.CoreMatchers.nullValue;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.PUBLIC_GROUP_ID;
import static org.jboss.pnc.indyrepositorymanager.IndyRepositoryConstants.SHARED_IMPORTS_ID;
@Category(ContainerTest.class)
public class ExcludeInternalRepoByRegexTest extends AbstractImportTest {
private static final String INTERNAL = "internal";
private static final String EXTERNAL = "external";
@Override
protected List<String> getIgnoredRepoPatterns() {
List<String> result = new ArrayList<>();
result.add("maven:.+:in.+");
return result;
}
@Test
public void extractBuildArtifacts_ContainsTwoDownloads() throws Exception {
// create a remote repo pointing at our server fixture's 'repo/test' directory.
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, INTERNAL, server.formatUrl(INTERNAL)),
"Creating internal test remote repo",
RemoteRepository.class);
indy.stores()
.create(
new RemoteRepository(MAVEN_PKG_KEY, EXTERNAL, server.formatUrl(EXTERNAL)),
"Creating external test remote repo",
RemoteRepository.class);
StoreKey publicKey = new StoreKey(MAVEN_PKG_KEY, StoreType.group, PUBLIC_GROUP_ID);
StoreKey internalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, INTERNAL);
StoreKey externalKey = new StoreKey(MAVEN_PKG_KEY, StoreType.remote, EXTERNAL);
Group publicGroup = indy.stores().load(publicKey, Group.class);
if (publicGroup == null) {
publicGroup = new Group(MAVEN_PKG_KEY, PUBLIC_GROUP_ID, internalKey, externalKey);
indy.stores().create(publicGroup, "creating public group", Group.class);
} else {
publicGroup.setConstituents(Arrays.asList(internalKey, externalKey));
indy.stores().update(publicGroup, "adding test remotes to public group");
}
String internalPath = "org/foo/internal/1.0/internal-1.0.pom";
String externalPath = "org/foo/external/1.1/external-1.1.pom";
String content = "This is a test " + System.currentTimeMillis();
// setup the expectation that the remote repo pointing at this server will request this file...and define its
// content.
server.expect(server.formatUrl(INTERNAL, internalPath), 200, content);
server.expect(server.formatUrl(EXTERNAL, externalPath), 200, content);
// create a dummy non-chained build execution and repo session based on it
BuildExecution execution = new TestBuildExecution();
RepositorySession rc = driver.createBuildRepository(
execution,
accessToken,
accessToken,
RepositoryType.MAVEN,
Collections.emptyMap(),
false);
assertThat(rc, notNullValue());
String baseUrl = rc.getConnectionInfo().getDependencyUrl();
// download the two files via the repo session's dependency URL, which will proxy the test http server
// using the expectations above
assertThat(download(UrlUtils.buildUrl(baseUrl, internalPath)), equalTo(content));
assertThat(download(UrlUtils.buildUrl(baseUrl, externalPath)), equalTo(content));
// extract the build artifacts, which should contain the two imported deps.
// This will also trigger promoting imported artifacts into the shared-imports hosted repo
RepositoryManagerResult repositoryManagerResult = rc.extractBuildArtifacts(true);
List<Artifact> deps = repositoryManagerResult.getDependencies();
System.out.println(deps);
assertThat(deps, notNullValue());
assertThat(deps.size(), equalTo(2));
Indy indy = driver.getIndy(accessToken);
StoreKey sharedImportsKey = new StoreKey(MAVEN_PKG_KEY, StoreType.hosted, SHARED_IMPORTS_ID);
// check that the imports from external locations are available from shared-imports
InputStream stream = indy.content().get(sharedImportsKey, externalPath);
String downloaded = IOUtils.toString(stream, (String) null);
assertThat(downloaded, equalTo(content));
stream.close();
// check that the imports from internal/trusted locations are NOT available from shared-imports
stream = indy.content().get(sharedImportsKey, internalPath);
assertThat(stream, nullValue());
}
}
| Java |
if(typeof(Control)=='undefined')
Control={};
Control.TextArea=Class.create();
Object.extend(Control.TextArea.prototype,{
onChangeTimeoutLength:500,
element:false,
onChangeTimeout:false,
initialize:function(textarea){
this.element=$(textarea);
$(this.element).observe('keyup',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('paste',this.doOnChange.bindAsEventListener(this));
$(this.element).observe('input',this.doOnChange.bindAsEventListener(this));
},
doOnChange:function(event){
if(this.onChangeTimeout)
window.clearTimeout(this.onChangeTimeout);
this.onChangeTimeout=window.setTimeout(function(){
if(this.notify)
this.notify('change',this.getValue());
}.bind(this),this.onChangeTimeoutLength);
},
getValue:function(){
return this.element.value;
},
getSelection:function(){
if(!!document.selection)
return document.selection.createRange().text;
else if(!!this.element.setSelectionRange)
return this.element.value.substring(this.element.selectionStart,this.element.selectionEnd);
else
return false;
},
replaceSelection:function(text){
var scrollTop=this.element.scrollTop;
if(!!document.selection){
this.element.focus();
var old=document.selection.createRange().text;
var range=document.selection.createRange();
range.text=text;
range-=old.length-text.length;
}else if(!!this.element.setSelectionRange){
var selection_start=this.element.selectionStart;
this.element.value=this.element.value.substring(0,selection_start)+text+this.element.value.substring(this.element.selectionEnd);
this.element.setSelectionRange(selection_start+text.length,selection_start+text.length);
}
this.doOnChange();
this.element.focus();
this.element.scrollTop=scrollTop;
},
wrapSelection:function(before,after){
this.replaceSelection(before+this.getSelection()+after);
},
insertBeforeSelection:function(text){
this.replaceSelection(text+this.getSelection());
},
insertAfterSelection:function(text){
this.replaceSelection(this.getSelection()+text);
},
injectEachSelectedLine:function(callback,before,after){
this.replaceSelection((before||'')+$A(this.getSelection().split("\n")).inject([],callback).join("\n")+(after||''));
},
insertBeforeEachSelectedLine:function(text,before,after){
this.injectEachSelectedLine(function(lines,line){
lines.push(text+line);
return lines;
},before,after);
}
});
if(typeof(Object.Event)!='undefined')
Object.Event.extend(Control.TextArea);Control.TextArea.BBCode=Class.create();
Object.extend(Control.TextArea.BBCode.prototype,{
textarea:false,
tooltip:false,
toolbar:false,
emotions:false,
wrapper:false,
controllers:false,
initialize:function(textarea){
this.textarea=new Control.TextArea(textarea);
this._initLayout();
this._initEmotions();
this._initToolbar();
},
hide:function(){
this.wrapper.parentNode.appendChild(this.textarea.element.remove());
this.wrapper.hide();
},
show:function(){
this.controllers.appendChild(this.textarea.element.remove());
this.wrapper.show();
},
_initLayout:function(){
this.wrapper=$(document.createElement('div'));
this.wrapper.id="editor_wrapper";
this.wrapper.className="clearfix";
this.textarea.element.parentNode.insertBefore(this.wrapper,this.textarea.element);
this.emotions=$(document.createElement('div'));
this.emotions.id="bbcode_emotions";
this.emotions.innerHTML="<h5>表情图标</h5>";
this.wrapper.appendChild(this.emotions);
this.controllers=$(document.createElement('div'));
this.controllers.id="bbcode_controllers";
this.wrapper.appendChild(this.controllers);
this.toolbar=$(document.createElement('div'));
this.toolbar.id="bbcode_toolbar";
this.controllers.appendChild(this.toolbar);
this.tooltip=$(document.createElement('div'));
this.tooltip.id="bbcode_tooltip";
this.tooltip.innerHTML="提示:选择您需要装饰的文字, 按上列按钮即可添加上相应的标签";
this.controllers.appendChild(this.tooltip);
this.controllers.appendChild(this.textarea.element.remove());
},
_initEmotions:function(){
this._addEmotion("biggrin",function(){this.insertAfterSelection(" :D ");});
this._addEmotion("smile",function(){this.insertAfterSelection(" :) ");});
this._addEmotion("sad",function(){this.insertAfterSelection(" :( ");});
this._addEmotion("surprised",function(){this.insertAfterSelection(" :o ");});
this._addEmotion("eek",function(){this.insertAfterSelection(" :shock: ");});
this._addEmotion("confused",function(){this.insertAfterSelection(" :? ");});
this._addEmotion("cool",function(){this.insertAfterSelection(" 8) ");});
this._addEmotion("lol",function(){this.insertAfterSelection(" :lol: ");});
this._addEmotion("mad",function(){this.insertAfterSelection(" :x ");});
this._addEmotion("razz",function(){this.insertAfterSelection(" :P ");});
this._addEmotion("redface",function(){this.insertAfterSelection(" :oops: ");});
this._addEmotion("cry",function(){this.insertAfterSelection(" :cry: ");});
this._addEmotion("evil",function(){this.insertAfterSelection(" :evil: ");});
this._addEmotion("twisted",function(){this.insertAfterSelection(" :twisted: ");});
this._addEmotion("rolleyes",function(){this.insertAfterSelection(" :roll: ");});
this._addEmotion("wink",function(){this.insertAfterSelection(" :wink: ");});
this._addEmotion("exclaim",function(){this.insertAfterSelection(" :!: ");});
this._addEmotion("question",function(){this.insertAfterSelection(" :?: ");});
this._addEmotion("idea",function(){this.insertAfterSelection(" :idea: ");});
this._addEmotion("arrow",function(){this.insertAfterSelection(" :arrow: ");});
},
_addEmotion:function(icon,callback){
var img=$(document.createElement('img'));
img.src="http://www.javaeye.com/images/smiles/icon_"+icon+".gif";
img.observe('click',callback.bindAsEventListener(this.textarea));
this.emotions.appendChild(img);
},
_initToolbar:function(){
this._addButton("B",function(){this.wrapSelection('[b]','[/b]');},function(){this.innerHTML='粗体: [b]文字[/b] (alt+b)';},{id:'button_bold'});
this._addButton("I",function(){this.wrapSelection('[i]','[/i]');},function(){this.innerHTML='斜体: [i]文字[/i] (alt+i)';},{id:'button_italic'});
this._addButton("U",function(){this.wrapSelection('[u]','[/u]');},function(){this.innerHTML='下划线: [u]文字[/u] (alt+u)';},{id:'button_underline'});
this._addButton("Quote",function(){this.wrapSelection('[quote]','[/quote]');},function(){this.innerHTML='引用文字: [quote]文字[/quote] 或者 [quote="javaeye"]文字[/quote] (alt+q)';});
this._addButton("Code",function(){this.wrapSelection('[code="java"]','[/code]');},function(){this.innerHTML='代码: [code="ruby"]...[/code] (支持java, ruby, js, xml, html, php, python, c, c++, c#, sql)';});
this._addButton("List",function(){this.insertBeforeEachSelectedLine('[*]','[list]\n','\n[/list]')},function(){this.innerHTML='列表: [list] [*]文字 [*]文字 [/list] 或者 顺序列表: [list=1] [*]文字 [*]文字 [/list]';});
this._addButton("Img",function(){this.wrapSelection('[img]','[/img]');},function(){this.innerHTML='插入图像: [img]http://image_url[/img] (alt+p)';});
this._addButton("URL",function(){this.wrapSelection('[url]','[/url]');},function(){this.innerHTML='插入URL: [url]http://url[/url] 或 [url=http://url]URL文字[/url] (alt+w)';});
this._addButton("Flash",function(){this.wrapSelection('[flash=200,200]','[/flash]');},function(){this.innerHTML='插入Flash: [flash=宽,高]http://your_flash.swf[/flash]';});
this._addButton("Table",function(){this.injectEachSelectedLine(function(lines,line){lines.push("|"+line+"|");return lines;},'[table]\n','\n[/table]');},function(){this.innerHTML='插入表格: [table]用换行和|来编辑格子[/table]';});
var color_select=[
"<br />字体颜色: ",
"<select id='select_color'>",
"<option value='black' style='color: black;'>标准</option>",
"<option value='darkred' style='color: darkred;'>深红</option>",
"<option value='red' style='color: red;'>红色</option>",
"<option value='orange' style='color: orange;'>橙色</option>",
"<option value='brown' style='color: brown;'>棕色</option>",
"<option value='yellow' style='color: yellow;'>黄色</option>",
"<option value='green' style='color: green;'>绿色</option>",
"<option value='olive' style='color: olive;'>橄榄</option>",
"<option value='cyan' style='color: cyan;'>青色</option>",
"<option value='blue' style='color: blue;'>蓝色</option>",
"<option value='darkblue' style='color: darkblue;'>深蓝</option>",
"<option value='indigo' style='color: indigo;'>靛蓝</option>",
"<option value='violet' style='color: violet;'>紫色</option>",
"<option value='gray' style='color: gray;'>灰色</option>",
"<option value='white' style='color: white;'>白色</option>",
"<option value='black' style='color: black;'>黑色</option>",
"</select>"
];
this.toolbar.insert(color_select.join(""));
$('select_color').observe('change',this._change_color.bindAsEventListener(this.textarea));
$('select_color').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体颜色: [color=red]文字[/color] 提示:您可以使用 color=#FF0000";});
var font_select=[
" 字体大小: ",
"<select id='select_font'>",
"<option value='0'>标准</option>",
"<option value='xx-small'>1 (xx-small)</option>",
"<option value='x-small'>2 (x-small)</option>",
"<option value='small'>3 (small)</option>",
"<option value='medium'>4 (medium)</option>",
"<option value='large'>5 (large)</option>",
"<option value='x-large'>6 (x-large)</option>",
"<option value='xx-large'>7 (xx-large)</option>",
"</select>"
];
this.toolbar.insert(font_select.join(""));
$('select_font').observe('change',this._change_font.bindAsEventListener(this.textarea));
$('select_font').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="字体大小: [size=x-small]小字体文字[/size]";});
var align_select=[
" 对齐: ",
"<select id='select_align'>",
"<option value='0'>标准</option>",
"<option value='left'>居左</option>",
"<option value='center'>居中</option>",
"<option value='right'>居右</option>",
"</select>"
]
this.toolbar.insert(align_select.join(""));
$('select_align').observe('change',this._change_align.bindAsEventListener(this.textarea));
$('select_align').observe('mouseover',function(){$("bbcode_tooltip").innerHTML="对齐: [align=center]文字[/align]";});
},
_addButton:function(value,callback,tooltip,attrs){
var input=$(document.createElement('input'));
input.type="button";
input.value=value;
input.observe('click',callback.bindAsEventListener(this.textarea));
input.observe('mouseover',tooltip.bindAsEventListener(this.tooltip));
Object.extend(input,attrs||{});
this.toolbar.appendChild(input);
},
_change_color:function(){
this.wrapSelection('[color='+$F('select_color')+']','[/color]');
$('select_color').selectedIndex=0;
},
_change_font:function(){
this.wrapSelection('[size='+$F('select_font')+']','[/size]');
$('select_font').selectedIndex=0;
},
_change_align:function(){
this.wrapSelection('[align='+$F('select_align')+']','[/align]');
$('select_align').selectedIndex=0;
}
});if(typeof(tinyMCE)!='undefined'){
tinyMCE.init({
plugins:"javaeye,media,table,emotions,contextmenu,fullscreen,inlinepopups",
mode:"none",
language:"zh",
theme:"advanced",
theme_advanced_buttons1:"formatselect,fontselect,fontsizeselect,separator,forecolor,backcolor,separator,bold,italic,underline,strikethrough,separator,bullist,numlist",
theme_advanced_buttons2:"undo,redo,cut,copy,paste,separator,justifyleft,justifycenter,justifyright,separator,outdent,indent,separator,link,unlink,image,media,emotions,table,separator,quote,code,separator,fullscreen",
theme_advanced_buttons3:"",
theme_advanced_toolbar_location:"top",
theme_advanced_toolbar_align:"left",
theme_advanced_fonts:"宋体=宋体;黑体=黑体;仿宋=仿宋;楷体=楷体;隶书=隶书;幼圆=幼圆;Arial=arial,helvetica,sans-serif;Comic Sans MS=comic sans ms,sans-serif;Courier New=courier new,courier;Tahoma=tahoma,arial,helvetica,sans-serif;Times New Roman=times new roman,times;Verdana=verdana,geneva;Webdings=webdings;Wingdings=wingdings,zapf dingbats",
convert_fonts_to_spans:true,
remove_trailing_nbsp:true,
remove_linebreaks:false,
width:"100%",
extended_valid_elements:"pre[name|class],object[classid|codebase|width|height|align],param[name|value],embed[quality|type|pluginspage|width|height|src|align|wmode]",
relative_urls:false,
content_css:"/javascripts/tinymce/plugins/javaeye/css/content.css",
save_callback:"removeBRInPre"
});
}
function removeBRInPre(element_id,html,body){
return html.replace(/<pre([^>]*)>((?:.|\n)*?)<\/pre>/gi,function(a,b,c){
c=c.replace(/<br\s*\/?>\n*/gi,'\n');
return'<pre'+b+'>'+c+'</pre>';
});
}
Control.TextArea.Editor=Class.create();
Object.extend(Control.TextArea.Editor.prototype,{
bbcode_editor:false,
rich_editor:false,
mode:false,
in_preview:false,
initialize:function(textarea,mode,autosave){
this.editor_bbcode_flag=$("editor_bbcode_flag");
this.textarea=textarea;
this.switchMode(mode);
if(autosave)this._initAutosave();
},
switchMode:function(mode,convert){
if(this.in_preview&&this.mode==mode){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
return;
}
if(this.mode==mode)return;
if(convert){
var old_text=this.getValue();
if(old_text!=""){
if(!confirm("切换编辑器模式可能导致格式和内容丢失,你确定吗?"))return;
$('editor_switch_spinner').show();
}
}
this.mode=mode;
if($("editor_switch")){
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").removeClassName("activetab");
$("editor_tab_"+mode).addClassName("activetab");
$("editor_preview").hide();
$("editor_main").show();
this.in_preview=false;
}
if(this.mode=="rich"){
this.editor_bbcode_flag.value="false";
if(this.bbcode_editor)this.bbcode_editor.hide();
this.rich_editor=true;
tinyMCE.execCommand('mceAddControl',false,this.textarea);
}else{
this.editor_bbcode_flag.value="true";
if(this.rich_editor)tinyMCE.execCommand('mceRemoveControl',false,this.textarea);
this.bbcode_editor?this.bbcode_editor.show():this.bbcode_editor=new Control.TextArea.BBCode(this.textarea);
}
if(convert&&old_text!=""){
new Ajax.Request(this.mode=="rich"?'/editor/bbcode2html':'/editor/html2bbcode',{
method:'post',
parameters:{text:old_text},
asynchronous:true,
onSuccess:function(transport){this.setValue(transport.responseText);$('editor_switch_spinner').hide();}.bind(this)
});
}
},
getValue:function(){
return this.mode=="bbcode"?this.bbcode_editor.textarea.element.value:tinyMCE.activeEditor.getContent();
},
setValue:function(value){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.element.value=value;
}else{
tinyMCE.get(this.textarea).setContent(value);
}
},
preview:function(){
this.in_preview=true;
$('editor_switch_spinner').show();
$("editor_preview").show();
$("editor_main").hide();
$("editor_tab_bbcode").removeClassName("activetab");
$("editor_tab_rich").removeClassName("activetab");
$("editor_tab_preview").addClassName("activetab");
new Ajax.Updater("editor_preview","/editor/preview",{
parameters:{text:this.getValue(),mode:this.mode},
evalScripts:true,
onSuccess:function(){$('editor_switch_spinner').hide();}
});
},
insertImage:function(url){
if(this.mode=="bbcode"){
this.bbcode_editor.textarea.insertAfterSelection("\n[img]"+url+"[/img]\n");
}else{
tinyMCE.execCommand("mceInsertContent", false, "<br/><img src='"+url+"'/><br/> ");
}
},
_initAutosave:function(){
this.autosave_url=window.location.href;
new Ajax.Request('/editor/check_autosave',{
method:'post',
parameters:{url:this.autosave_url},
asynchronous:true,
onSuccess:this._loadAutosave.bind(this)
});
setInterval(this._autosave.bind(this),90*1000);
},
_loadAutosave:function(transport){
var text=transport.responseText;
if(text!="nil"){
eval("this.auto_save = "+text);
$('editor_auto_save_update').update('<span style="color:red">您有一份自动保存于'+this.auto_save.updated_at+'的草稿,<a href="#" onclick=\'editor._setAutosave();return false;\'>恢复</a>还是<a href="#" onclick=\'editor._discardAutosave();return false;\'>丢弃</a>呢?</span>');
}
},
_setAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
this.auto_save.bbcode?this.switchMode("bbcode"):this.switchMode("rich");
this.setValue(this.auto_save.body);
},
_discardAutosave:function(){
$("editor_auto_save_id").value=this.auto_save.id;
$('editor_auto_save_update').update("");
},
_autosave:function(){
var body=this.getValue();
if(body.length<100)return;
new Ajax.Request('/editor/autosave',{
method:'post',
parameters:{
url:this.autosave_url,
body:body,
bbcode:this.mode=="bbcode"
},
asynchronous:true,
onSuccess:function(transport){
$('editor_auto_save_id').value=transport.responseText;
$('editor_auto_save_update').update('<span style="color:red">JavaEye编辑器帮您自动保存草稿于:'+new Date().toLocaleString()+'</span>');
}
});
}
}); | Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_14) on Tue Aug 17 01:05:10 EDT 2010 -->
<TITLE>
org.apache.hadoop.hdfs.server.namenode.metrics (Hadoop-Hdfs 0.21.0 API)
</TITLE>
<META NAME="date" CONTENT="2010-08-17">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style">
</HEAD>
<BODY BGCOLOR="white">
<FONT size="+1" CLASS="FrameTitleFont">
<A HREF="../../../../../../../org/apache/hadoop/hdfs/server/namenode/metrics/package-summary.html" target="classFrame">org.apache.hadoop.hdfs.server.namenode.metrics</A></FONT>
<TABLE BORDER="0" WIDTH="100%" SUMMARY="">
<TR>
<TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont">
Interfaces</FONT>
<FONT CLASS="FrameItemFont">
<BR>
<A HREF="FSNamesystemMBean.html" title="interface in org.apache.hadoop.hdfs.server.namenode.metrics" target="classFrame"><I>FSNamesystemMBean</I></A></FONT></TD>
</TR>
</TABLE>
<TABLE BORDER="0" WIDTH="100%" SUMMARY="">
<TR>
<TD NOWRAP><FONT size="+1" CLASS="FrameHeadingFont">
Classes</FONT>
<FONT CLASS="FrameItemFont">
<BR>
<A HREF="FSNamesystemMetrics.html" title="class in org.apache.hadoop.hdfs.server.namenode.metrics" target="classFrame">FSNamesystemMetrics</A>
<BR>
<A HREF="NameNodeActivityMBean.html" title="class in org.apache.hadoop.hdfs.server.namenode.metrics" target="classFrame">NameNodeActivityMBean</A>
<BR>
<A HREF="NameNodeMetrics.html" title="class in org.apache.hadoop.hdfs.server.namenode.metrics" target="classFrame">NameNodeMetrics</A></FONT></TD>
</TR>
</TABLE>
</BODY>
</HTML>
| Java |
// Lucene version compatibility level 4.8.1
using Lucene.Net.Analysis.TokenAttributes;
using Lucene.Net.Analysis.Util;
namespace Lucene.Net.Analysis.Cjk
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/// <summary>
/// A <see cref="TokenFilter"/> that normalizes CJK width differences:
/// <list type="bullet">
/// <item><description>Folds fullwidth ASCII variants into the equivalent basic latin</description></item>
/// <item><description>Folds halfwidth Katakana variants into the equivalent kana</description></item>
/// </list>
/// <para>
/// NOTE: this filter can be viewed as a (practical) subset of NFKC/NFKD
/// Unicode normalization. See the normalization support in the ICU package
/// for full normalization.
/// </para>
/// </summary>
public sealed class CJKWidthFilter : TokenFilter
{
private ICharTermAttribute termAtt;
/// <summary>
/// halfwidth kana mappings: 0xFF65-0xFF9D
/// <para/>
/// note: 0xFF9C and 0xFF9D are only mapped to 0x3099 and 0x309A
/// as a fallback when they cannot properly combine with a preceding
/// character into a composed form.
/// </summary>
private static readonly char[] KANA_NORM = new char[] {
(char)0x30fb, (char)0x30f2, (char)0x30a1, (char)0x30a3, (char)0x30a5, (char)0x30a7, (char)0x30a9, (char)0x30e3, (char)0x30e5,
(char)0x30e7, (char)0x30c3, (char)0x30fc, (char)0x30a2, (char)0x30a4, (char)0x30a6, (char)0x30a8, (char)0x30aa, (char)0x30ab,
(char)0x30ad, (char)0x30af, (char)0x30b1, (char)0x30b3, (char)0x30b5, (char)0x30b7, (char)0x30b9, (char)0x30bb, (char)0x30bd,
(char)0x30bf, (char)0x30c1, (char)0x30c4, (char)0x30c6, (char)0x30c8, (char)0x30ca, (char)0x30cb, (char)0x30cc, (char)0x30cd,
(char)0x30ce, (char)0x30cf, (char)0x30d2, (char)0x30d5, (char)0x30d8, (char)0x30db, (char)0x30de, (char)0x30df, (char)0x30e0,
(char)0x30e1, (char)0x30e2, (char)0x30e4, (char)0x30e6, (char)0x30e8, (char)0x30e9, (char)0x30ea, (char)0x30eb, (char)0x30ec,
(char)0x30ed, (char)0x30ef, (char)0x30f3, (char)0x3099, (char)0x309A
};
public CJKWidthFilter(TokenStream input)
: base(input)
{
termAtt = AddAttribute<ICharTermAttribute>();
}
public override bool IncrementToken()
{
if (m_input.IncrementToken())
{
char[] text = termAtt.Buffer;
int length = termAtt.Length;
for (int i = 0; i < length; i++)
{
char ch = text[i];
if (ch >= 0xFF01 && ch <= 0xFF5E)
{
// Fullwidth ASCII variants
text[i] = (char)(text[i] - 0xFEE0);
}
else if (ch >= 0xFF65 && ch <= 0xFF9F)
{
// Halfwidth Katakana variants
if ((ch == 0xFF9E || ch == 0xFF9F) && i > 0 && Combine(text, i, ch))
{
length = StemmerUtil.Delete(text, i--, length);
}
else
{
text[i] = KANA_NORM[ch - 0xFF65];
}
}
}
termAtt.Length = length;
return true;
}
else
{
return false;
}
}
/// <summary>kana combining diffs: 0x30A6-0x30FD </summary>
private static readonly sbyte[] KANA_COMBINE_VOICED = new sbyte[] {
78, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1,
0, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 0, 1,
0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 8, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1
};
private static readonly sbyte[] KANA_COMBINE_HALF_VOICED = new sbyte[] {
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 2, 0, 0, 2,
0, 0, 2, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
};
/// <summary>
/// returns true if we successfully combined the voice mark </summary>
private static bool Combine(char[] text, int pos, char ch)
{
char prev = text[pos - 1];
if (prev >= 0x30A6 && prev <= 0x30FD)
{
text[pos - 1] += (char)((ch == 0xFF9F) ? KANA_COMBINE_HALF_VOICED[prev - 0x30A6] : KANA_COMBINE_VOICED[prev - 0x30A6]);
return text[pos - 1] != prev;
}
return false;
}
}
} | Java |
/*
* Copyright (c) 2010-2014 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.testing.longtest;
import com.evolveum.midpoint.common.LoggingConfigurationManager;
import com.evolveum.midpoint.common.ProfilingConfigurationManager;
import com.evolveum.midpoint.model.impl.sync.ReconciliationTaskHandler;
import com.evolveum.midpoint.model.test.AbstractModelIntegrationTest;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.query.ObjectQuery;
import com.evolveum.midpoint.prism.util.PrismAsserts;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.schema.ResultHandler;
import com.evolveum.midpoint.schema.constants.MidPointConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.schema.util.ObjectQueryUtil;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.test.util.MidPointTestConstants;
import com.evolveum.midpoint.test.util.TestUtil;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.xml.ns._public.common.common_3.AssignmentPolicyEnforcementType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectTemplateType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.RoleType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemConfigurationType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.SystemObjectsType;
import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.mutable.MutableInt;
import org.opends.server.types.Entry;
import org.opends.server.types.LDIFImportConfig;
import org.opends.server.util.LDIFException;
import org.opends.server.util.LDIFReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.annotation.DirtiesContext.ClassMode;
import org.springframework.test.context.ContextConfiguration;
import org.testng.annotations.AfterClass;
import org.testng.annotations.Test;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.IOException;
import static com.evolveum.midpoint.test.IntegrationTestTools.display;
import static org.testng.AssertJUnit.assertEquals;
/**
* Mix of various tests for issues that are difficult to replicate using dummy resources.
*
* @author Radovan Semancik
*
*/
@ContextConfiguration(locations = {"classpath:ctx-longtest-test-main.xml"})
@DirtiesContext(classMode = ClassMode.AFTER_CLASS)
public class TestLdapComplex extends AbstractModelIntegrationTest {
public static final File TEST_DIR = new File(MidPointTestConstants.TEST_RESOURCES_DIR, "ldap-complex");
public static final File SYSTEM_CONFIGURATION_FILE = new File(COMMON_DIR, "system-configuration.xml");
public static final String SYSTEM_CONFIGURATION_OID = SystemObjectsType.SYSTEM_CONFIGURATION.value();
public static final File USER_TEMPLATE_FILE = new File(TEST_DIR, "user-template.xml");
protected static final File USER_ADMINISTRATOR_FILE = new File(COMMON_DIR, "user-administrator.xml");
protected static final String USER_ADMINISTRATOR_OID = "00000000-0000-0000-0000-000000000002";
protected static final String USER_ADMINISTRATOR_USERNAME = "administrator";
protected static final File ROLE_SUPERUSER_FILE = new File(COMMON_DIR, "role-superuser.xml");
protected static final String ROLE_SUPERUSER_OID = "00000000-0000-0000-0000-000000000004";
protected static final File ROLE_CAPTAIN_FILE = new File(TEST_DIR, "role-captain.xml");
protected static final File ROLE_JUDGE_FILE = new File(TEST_DIR, "role-judge.xml");
protected static final File ROLE_PIRATE_FILE = new File(TEST_DIR, "role-pirate.xml");
protected static final File ROLE_SAILOR_FILE = new File(TEST_DIR, "role-sailor.xml");
protected static final String ROLE_PIRATE_OID = "12345678-d34d-b33f-f00d-555555556603";
protected static final File ROLES_LDIF_FILE = new File(TEST_DIR, "roles.ldif");
protected static final File RESOURCE_OPENDJ_FILE = new File(COMMON_DIR, "resource-opendj-complex.xml");
protected static final String RESOURCE_OPENDJ_NAME = "Localhost OpenDJ";
protected static final String RESOURCE_OPENDJ_OID = "10000000-0000-0000-0000-000000000003";
protected static final String RESOURCE_OPENDJ_NAMESPACE = MidPointConstants.NS_RI;
// Make it at least 1501 so it will go over the 3000 entries size limit
private static final int NUM_LDAP_ENTRIES = 1000;
private static final String LDAP_GROUP_PIRATES_DN = "cn=Pirates,ou=groups,dc=example,dc=com";
protected ResourceType resourceOpenDjType;
protected PrismObject<ResourceType> resourceOpenDj;
@Autowired
private ReconciliationTaskHandler reconciliationTaskHandler;
@Override
protected void startResources() throws Exception {
openDJController.startCleanServer();
}
@AfterClass
public static void stopResources() throws Exception {
openDJController.stop();
}
@Override
public void initSystem(Task initTask, OperationResult initResult) throws Exception {
super.initSystem(initTask, initResult);
modelService.postInit(initResult);
// System Configuration
PrismObject<SystemConfigurationType> config;
try {
config = repoAddObjectFromFile(SYSTEM_CONFIGURATION_FILE, SystemConfigurationType.class, initResult);
} catch (ObjectAlreadyExistsException e) {
throw new ObjectAlreadyExistsException("System configuration already exists in repository;" +
"looks like the previous test haven't cleaned it up", e);
}
LoggingConfigurationManager.configure(
ProfilingConfigurationManager.checkSystemProfilingConfiguration(config),
config.asObjectable().getVersion(), initResult);
// administrator
PrismObject<UserType> userAdministrator = repoAddObjectFromFile(USER_ADMINISTRATOR_FILE, UserType.class, initResult);
repoAddObjectFromFile(ROLE_SUPERUSER_FILE, RoleType.class, initResult);
login(userAdministrator);
// Roles
repoAddObjectFromFile(ROLE_CAPTAIN_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_JUDGE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_PIRATE_FILE, RoleType.class, initResult);
repoAddObjectFromFile(ROLE_SAILOR_FILE, RoleType.class, initResult);
// templates
repoAddObjectFromFile(USER_TEMPLATE_FILE, ObjectTemplateType.class, initResult);
// Resources
resourceOpenDj = importAndGetObjectFromFile(ResourceType.class, RESOURCE_OPENDJ_FILE, RESOURCE_OPENDJ_OID, initTask, initResult);
resourceOpenDjType = resourceOpenDj.asObjectable();
openDJController.setResource(resourceOpenDj);
assumeAssignmentPolicy(AssignmentPolicyEnforcementType.RELATIVE);
openDJController.addEntriesFromLdifFile(ROLES_LDIF_FILE.getPath());
display("initial LDAP content", openDJController.dumpEntries());
}
@Test
public void test100BigImport() throws Exception {
final String TEST_NAME = "test100BigImport";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
loadEntries("u");
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
modelService.importFromResource(RESOURCE_OPENDJ_OID,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
OperationResult subresult = result.getLastSubresult();
TestUtil.assertInProgress("importAccountsFromResource result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void assertUser(String name, Task task, OperationResult result) throws com.evolveum.midpoint.util.exception.ObjectNotFoundException, com.evolveum.midpoint.util.exception.SchemaException, com.evolveum.midpoint.util.exception.SecurityViolationException, com.evolveum.midpoint.util.exception.CommunicationException, com.evolveum.midpoint.util.exception.ConfigurationException {
UserType user = findUserByUsername("u1").asObjectable();
display("user " + name, user.asPrismObject());
assertEquals("Wrong number of assignments", 4, user.getAssignment().size());
}
@Test(enabled = false)
public void test120BigReconciliation() throws Exception {
final String TEST_NAME = "test120BigReconciliation";
TestUtil.displayTestTile(this, TEST_NAME);
// GIVEN
Task task = taskManager.createTaskInstance(TestLdapComplex.class.getName() + "." + TEST_NAME);
task.setOwner(getUser(USER_ADMINISTRATOR_OID));
OperationResult result = task.getResult();
// WHEN
TestUtil.displayWhen(TEST_NAME);
//task.setExtensionPropertyValue(SchemaConstants.MODEL_EXTENSION_WORKER_THREADS, 2);
ResourceType resource = modelService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, task, result).asObjectable();
reconciliationTaskHandler.launch(resource,
new QName(RESOURCE_OPENDJ_NAMESPACE, "AccountObjectClass"), task, result);
// THEN
TestUtil.displayThen(TEST_NAME);
// TODO
// OperationResult subresult = result.getLastSubresult();
// TestUtil.assertInProgress("reconciliation launch result", subresult);
waitForTaskFinish(task, true, 20000 + NUM_LDAP_ENTRIES*2000);
// THEN
TestUtil.displayThen(TEST_NAME);
int userCount = modelService.countObjects(UserType.class, null, null, task, result);
display("Users", userCount);
assertEquals("Unexpected number of users", NUM_LDAP_ENTRIES+4, userCount);
assertUser("u1", task, result);
}
private void loadEntries(String prefix) throws LDIFException, IOException {
long ldapPopStart = System.currentTimeMillis();
for(int i=0; i < NUM_LDAP_ENTRIES; i++) {
String name = "user"+i;
Entry entry = createEntry(prefix+i, name);
openDJController.addEntry(entry);
}
long ldapPopEnd = System.currentTimeMillis();
display("Loaded "+NUM_LDAP_ENTRIES+" LDAP entries in "+((ldapPopEnd-ldapPopStart)/1000)+" seconds");
}
private Entry createEntry(String uid, String name) throws IOException, LDIFException {
StringBuilder sb = new StringBuilder();
String dn = "uid="+uid+","+openDJController.getSuffixPeople();
sb.append("dn: ").append(dn).append("\n");
sb.append("objectClass: inetOrgPerson\n");
sb.append("uid: ").append(uid).append("\n");
sb.append("cn: ").append(name).append("\n");
sb.append("sn: ").append(name).append("\n");
LDIFImportConfig importConfig = new LDIFImportConfig(IOUtils.toInputStream(sb.toString(), "utf-8"));
LDIFReader ldifReader = new LDIFReader(importConfig);
Entry ldifEntry = ldifReader.readEntry();
return ldifEntry;
}
private String toDn(String username) {
return "uid="+username+","+OPENDJ_PEOPLE_SUFFIX;
}
}
| Java |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/dynamodb/DynamoDB_EXPORTS.h>
#include <aws/dynamodb/model/AttributeValue.h>
#include <aws/dynamodb/model/ComparisonOperator.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <utility>
namespace Aws
{
namespace Utils
{
namespace Json
{
class JsonValue;
class JsonView;
} // namespace Json
} // namespace Utils
namespace DynamoDB
{
namespace Model
{
/**
* <p>Represents a condition to be compared with an attribute value. This condition
* can be used with <code>DeleteItem</code>, <code>PutItem</code>, or
* <code>UpdateItem</code> operations; if the comparison evaluates to true, the
* operation succeeds; if not, the operation fails. You can use
* <code>ExpectedAttributeValue</code> in one of two different ways:</p> <ul> <li>
* <p>Use <code>AttributeValueList</code> to specify one or more values to compare
* against an attribute. Use <code>ComparisonOperator</code> to specify how you
* want to perform the comparison. If the comparison evaluates to true, then the
* conditional operation succeeds.</p> </li> <li> <p>Use <code>Value</code> to
* specify a value that DynamoDB will compare against an attribute. If the values
* match, then <code>ExpectedAttributeValue</code> evaluates to true and the
* conditional operation succeeds. Optionally, you can also set <code>Exists</code>
* to false, indicating that you <i>do not</i> expect to find the attribute value
* in the table. In this case, the conditional operation succeeds only if the
* comparison evaluates to false.</p> </li> </ul> <p> <code>Value</code> and
* <code>Exists</code> are incompatible with <code>AttributeValueList</code> and
* <code>ComparisonOperator</code>. Note that if you use both sets of parameters at
* once, DynamoDB will return a <code>ValidationException</code>
* exception.</p><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/dynamodb-2012-08-10/ExpectedAttributeValue">AWS
* API Reference</a></p>
*/
class AWS_DYNAMODB_API ExpectedAttributeValue
{
public:
ExpectedAttributeValue();
ExpectedAttributeValue(Aws::Utils::Json::JsonView jsonValue);
ExpectedAttributeValue& operator=(Aws::Utils::Json::JsonView jsonValue);
Aws::Utils::Json::JsonValue Jsonize() const;
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline const AttributeValue& GetValue() const{ return m_value; }
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline bool ValueHasBeenSet() const { return m_valueHasBeenSet; }
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline void SetValue(const AttributeValue& value) { m_valueHasBeenSet = true; m_value = value; }
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline void SetValue(AttributeValue&& value) { m_valueHasBeenSet = true; m_value = std::move(value); }
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& WithValue(const AttributeValue& value) { SetValue(value); return *this;}
/**
* <p>Represents the data for the expected attribute.</p> <p>Each attribute value
* is described as a name-value pair. The name is the data type, and the value is
* the data itself.</p> <p>For more information, see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/HowItWorks.NamingRulesDataTypes.html#HowItWorks.DataTypes">Data
* Types</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& WithValue(AttributeValue&& value) { SetValue(std::move(value)); return *this;}
/**
* <p>Causes DynamoDB to evaluate the value before attempting a conditional
* operation:</p> <ul> <li> <p>If <code>Exists</code> is <code>true</code>,
* DynamoDB will check to see if that attribute value already exists in the table.
* If it is found, then the operation succeeds. If it is not found, the operation
* fails with a <code>ConditionCheckFailedException</code>.</p> </li> <li> <p>If
* <code>Exists</code> is <code>false</code>, DynamoDB assumes that the attribute
* value does not exist in the table. If in fact the value does not exist, then the
* assumption is valid and the operation succeeds. If the value is found, despite
* the assumption that it does not exist, the operation fails with a
* <code>ConditionCheckFailedException</code>.</p> </li> </ul> <p>The default
* setting for <code>Exists</code> is <code>true</code>. If you supply a
* <code>Value</code> all by itself, DynamoDB assumes the attribute exists: You
* don't have to set <code>Exists</code> to <code>true</code>, because it is
* implied.</p> <p>DynamoDB returns a <code>ValidationException</code> if:</p> <ul>
* <li> <p> <code>Exists</code> is <code>true</code> but there is no
* <code>Value</code> to check. (You expect a value to exist, but don't specify
* what that value is.)</p> </li> <li> <p> <code>Exists</code> is
* <code>false</code> but you also provide a <code>Value</code>. (You cannot expect
* an attribute to have a value, while also expecting it not to exist.)</p> </li>
* </ul>
*/
inline bool GetExists() const{ return m_exists; }
/**
* <p>Causes DynamoDB to evaluate the value before attempting a conditional
* operation:</p> <ul> <li> <p>If <code>Exists</code> is <code>true</code>,
* DynamoDB will check to see if that attribute value already exists in the table.
* If it is found, then the operation succeeds. If it is not found, the operation
* fails with a <code>ConditionCheckFailedException</code>.</p> </li> <li> <p>If
* <code>Exists</code> is <code>false</code>, DynamoDB assumes that the attribute
* value does not exist in the table. If in fact the value does not exist, then the
* assumption is valid and the operation succeeds. If the value is found, despite
* the assumption that it does not exist, the operation fails with a
* <code>ConditionCheckFailedException</code>.</p> </li> </ul> <p>The default
* setting for <code>Exists</code> is <code>true</code>. If you supply a
* <code>Value</code> all by itself, DynamoDB assumes the attribute exists: You
* don't have to set <code>Exists</code> to <code>true</code>, because it is
* implied.</p> <p>DynamoDB returns a <code>ValidationException</code> if:</p> <ul>
* <li> <p> <code>Exists</code> is <code>true</code> but there is no
* <code>Value</code> to check. (You expect a value to exist, but don't specify
* what that value is.)</p> </li> <li> <p> <code>Exists</code> is
* <code>false</code> but you also provide a <code>Value</code>. (You cannot expect
* an attribute to have a value, while also expecting it not to exist.)</p> </li>
* </ul>
*/
inline bool ExistsHasBeenSet() const { return m_existsHasBeenSet; }
/**
* <p>Causes DynamoDB to evaluate the value before attempting a conditional
* operation:</p> <ul> <li> <p>If <code>Exists</code> is <code>true</code>,
* DynamoDB will check to see if that attribute value already exists in the table.
* If it is found, then the operation succeeds. If it is not found, the operation
* fails with a <code>ConditionCheckFailedException</code>.</p> </li> <li> <p>If
* <code>Exists</code> is <code>false</code>, DynamoDB assumes that the attribute
* value does not exist in the table. If in fact the value does not exist, then the
* assumption is valid and the operation succeeds. If the value is found, despite
* the assumption that it does not exist, the operation fails with a
* <code>ConditionCheckFailedException</code>.</p> </li> </ul> <p>The default
* setting for <code>Exists</code> is <code>true</code>. If you supply a
* <code>Value</code> all by itself, DynamoDB assumes the attribute exists: You
* don't have to set <code>Exists</code> to <code>true</code>, because it is
* implied.</p> <p>DynamoDB returns a <code>ValidationException</code> if:</p> <ul>
* <li> <p> <code>Exists</code> is <code>true</code> but there is no
* <code>Value</code> to check. (You expect a value to exist, but don't specify
* what that value is.)</p> </li> <li> <p> <code>Exists</code> is
* <code>false</code> but you also provide a <code>Value</code>. (You cannot expect
* an attribute to have a value, while also expecting it not to exist.)</p> </li>
* </ul>
*/
inline void SetExists(bool value) { m_existsHasBeenSet = true; m_exists = value; }
/**
* <p>Causes DynamoDB to evaluate the value before attempting a conditional
* operation:</p> <ul> <li> <p>If <code>Exists</code> is <code>true</code>,
* DynamoDB will check to see if that attribute value already exists in the table.
* If it is found, then the operation succeeds. If it is not found, the operation
* fails with a <code>ConditionCheckFailedException</code>.</p> </li> <li> <p>If
* <code>Exists</code> is <code>false</code>, DynamoDB assumes that the attribute
* value does not exist in the table. If in fact the value does not exist, then the
* assumption is valid and the operation succeeds. If the value is found, despite
* the assumption that it does not exist, the operation fails with a
* <code>ConditionCheckFailedException</code>.</p> </li> </ul> <p>The default
* setting for <code>Exists</code> is <code>true</code>. If you supply a
* <code>Value</code> all by itself, DynamoDB assumes the attribute exists: You
* don't have to set <code>Exists</code> to <code>true</code>, because it is
* implied.</p> <p>DynamoDB returns a <code>ValidationException</code> if:</p> <ul>
* <li> <p> <code>Exists</code> is <code>true</code> but there is no
* <code>Value</code> to check. (You expect a value to exist, but don't specify
* what that value is.)</p> </li> <li> <p> <code>Exists</code> is
* <code>false</code> but you also provide a <code>Value</code>. (You cannot expect
* an attribute to have a value, while also expecting it not to exist.)</p> </li>
* </ul>
*/
inline ExpectedAttributeValue& WithExists(bool value) { SetExists(value); return *this;}
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline const ComparisonOperator& GetComparisonOperator() const{ return m_comparisonOperator; }
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline bool ComparisonOperatorHasBeenSet() const { return m_comparisonOperatorHasBeenSet; }
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline void SetComparisonOperator(const ComparisonOperator& value) { m_comparisonOperatorHasBeenSet = true; m_comparisonOperator = value; }
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline void SetComparisonOperator(ComparisonOperator&& value) { m_comparisonOperatorHasBeenSet = true; m_comparisonOperator = std::move(value); }
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline ExpectedAttributeValue& WithComparisonOperator(const ComparisonOperator& value) { SetComparisonOperator(value); return *this;}
/**
* <p>A comparator for evaluating attributes in the
* <code>AttributeValueList</code>. For example, equals, greater than, less than,
* etc.</p> <p>The following comparison operators are available:</p> <p> <code>EQ |
* NE | LE | LT | GE | GT | NOT_NULL | NULL | CONTAINS | NOT_CONTAINS | BEGINS_WITH
* | IN | BETWEEN</code> </p> <p>The following are descriptions of each comparison
* operator.</p> <ul> <li> <p> <code>EQ</code> : Equal. <code>EQ</code> is
* supported for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, Binary, String Set, Number Set, or Binary Set.
* If an item contains an <code>AttributeValue</code> element of a different type
* than the one provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>NE</code> : Not equal. <code>NE</code> is supported
* for all data types, including lists and maps.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String, Number, Binary, String Set, Number Set, or Binary Set. If an
* item contains an <code>AttributeValue</code> of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not equal <code>{"NS":["6", "2", "1"]}</code>.</p>
* <p/> </li> <li> <p> <code>LE</code> : Less than or equal. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>LT</code> : Less than. </p> <p> <code>AttributeValueList</code> can
* contain only one <code>AttributeValue</code> of type String, Number, or Binary
* (not a set type). If an item contains an <code>AttributeValue</code> element of
* a different type than the one provided in the request, the value does not match.
* For example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GE</code> : Greater than or equal.
* </p> <p> <code>AttributeValueList</code> can contain only one
* <code>AttributeValue</code> element of type String, Number, or Binary (not a set
* type). If an item contains an <code>AttributeValue</code> element of a different
* type than the one provided in the request, the value does not match. For
* example, <code>{"S":"6"}</code> does not equal <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2",
* "1"]}</code>.</p> <p/> </li> <li> <p> <code>GT</code> : Greater than. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If an item contains
* an <code>AttributeValue</code> element of a different type than the one provided
* in the request, the value does not match. For example, <code>{"S":"6"}</code>
* does not equal <code>{"N":"6"}</code>. Also, <code>{"N":"6"}</code> does not
* compare to <code>{"NS":["6", "2", "1"]}</code>.</p> <p/> </li> <li> <p>
* <code>NOT_NULL</code> : The attribute exists. <code>NOT_NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the existence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using
* <code>NOT_NULL</code>, the result is a Boolean <code>true</code>. This result is
* because the attribute "<code>a</code>" exists; its data type is not relevant to
* the <code>NOT_NULL</code> comparison operator.</p> </li> <li> <p>
* <code>NULL</code> : The attribute does not exist. <code>NULL</code> is supported
* for all data types, including lists and maps.</p> <p>This operator tests
* for the nonexistence of an attribute, not its data type. If the data type of
* attribute "<code>a</code>" is null, and you evaluate it using <code>NULL</code>,
* the result is a Boolean <code>false</code>. This is because the attribute
* "<code>a</code>" exists; its data type is not relevant to the <code>NULL</code>
* comparison operator.</p> </li> <li> <p> <code>CONTAINS</code> : Checks
* for a subsequence, or value in a set.</p> <p> <code>AttributeValueList</code>
* can contain only one <code>AttributeValue</code> element of type String, Number,
* or Binary (not a set type). If the target attribute of the comparison is of type
* String, then the operator checks for a substring match. If the target attribute
* of the comparison is of type Binary, then the operator looks for a subsequence
* of the target that matches the input. If the target attribute of the comparison
* is a set ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the
* operator evaluates to true if it finds an exact match with any member of the
* set.</p> <p>CONTAINS is supported for lists: When evaluating "<code>a CONTAINS
* b</code>", "<code>a</code>" can be a list; however, "<code>b</code>" cannot be a
* set, a map, or a list.</p> </li> <li> <p> <code>NOT_CONTAINS</code> : Checks for
* absence of a subsequence, or absence of a value in a set.</p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* element of type String, Number, or Binary (not a set type). If the target
* attribute of the comparison is a String, then the operator checks for the
* absence of a substring match. If the target attribute of the comparison is
* Binary, then the operator checks for the absence of a subsequence of the target
* that matches the input. If the target attribute of the comparison is a set
* ("<code>SS</code>", "<code>NS</code>", or "<code>BS</code>"), then the operator
* evaluates to true if it <i>does not</i> find an exact match with any member of
* the set.</p> <p>NOT_CONTAINS is supported for lists: When evaluating "<code>a
* NOT CONTAINS b</code>", "<code>a</code>" can be a list; however,
* "<code>b</code>" cannot be a set, a map, or a list.</p> </li> <li> <p>
* <code>BEGINS_WITH</code> : Checks for a prefix. </p> <p>
* <code>AttributeValueList</code> can contain only one <code>AttributeValue</code>
* of type String or Binary (not a Number or a set type). The target attribute of
* the comparison must be of type String or Binary (not a Number or a set
* type).</p> <p/> </li> <li> <p> <code>IN</code> : Checks for matching elements in
* a list.</p> <p> <code>AttributeValueList</code> can contain one or more
* <code>AttributeValue</code> elements of type String, Number, or Binary. These
* attributes are compared against an existing attribute of an item. If any
* elements of the input are equal to the item attribute, the expression evaluates
* to true.</p> </li> <li> <p> <code>BETWEEN</code> : Greater than or equal to the
* first value, and less than or equal to the second value. </p> <p>
* <code>AttributeValueList</code> must contain two <code>AttributeValue</code>
* elements of the same type, either String, Number, or Binary (not a set type). A
* target attribute matches if the target value is greater than, or equal to, the
* first element and less than, or equal to, the second element. If an item
* contains an <code>AttributeValue</code> element of a different type than the one
* provided in the request, the value does not match. For example,
* <code>{"S":"6"}</code> does not compare to <code>{"N":"6"}</code>. Also,
* <code>{"N":"6"}</code> does not compare to <code>{"NS":["6", "2", "1"]}</code>
* </p> </li> </ul>
*/
inline ExpectedAttributeValue& WithComparisonOperator(ComparisonOperator&& value) { SetComparisonOperator(std::move(value)); return *this;}
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline const Aws::Vector<AttributeValue>& GetAttributeValueList() const{ return m_attributeValueList; }
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline bool AttributeValueListHasBeenSet() const { return m_attributeValueListHasBeenSet; }
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline void SetAttributeValueList(const Aws::Vector<AttributeValue>& value) { m_attributeValueListHasBeenSet = true; m_attributeValueList = value; }
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline void SetAttributeValueList(Aws::Vector<AttributeValue>&& value) { m_attributeValueListHasBeenSet = true; m_attributeValueList = std::move(value); }
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& WithAttributeValueList(const Aws::Vector<AttributeValue>& value) { SetAttributeValueList(value); return *this;}
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& WithAttributeValueList(Aws::Vector<AttributeValue>&& value) { SetAttributeValueList(std::move(value)); return *this;}
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& AddAttributeValueList(const AttributeValue& value) { m_attributeValueListHasBeenSet = true; m_attributeValueList.push_back(value); return *this; }
/**
* <p>One or more values to evaluate against the supplied attribute. The number of
* values in the list depends on the <code>ComparisonOperator</code> being
* used.</p> <p>For type Number, value comparisons are numeric.</p> <p>String value
* comparisons for greater than, equals, or less than are based on ASCII character
* code values. For example, <code>a</code> is greater than <code>A</code>, and
* <code>a</code> is greater than <code>B</code>. For a list of code values, see <a
* href="http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters">http://en.wikipedia.org/wiki/ASCII#ASCII_printable_characters</a>.</p>
* <p>For Binary, DynamoDB treats each byte of the binary data as unsigned when it
* compares binary values.</p> <p>For information on specifying data types in JSON,
* see <a
* href="https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/DataFormat.html">JSON
* Data Format</a> in the <i>Amazon DynamoDB Developer Guide</i>.</p>
*/
inline ExpectedAttributeValue& AddAttributeValueList(AttributeValue&& value) { m_attributeValueListHasBeenSet = true; m_attributeValueList.push_back(std::move(value)); return *this; }
private:
AttributeValue m_value;
bool m_valueHasBeenSet;
bool m_exists;
bool m_existsHasBeenSet;
ComparisonOperator m_comparisonOperator;
bool m_comparisonOperatorHasBeenSet;
Aws::Vector<AttributeValue> m_attributeValueList;
bool m_attributeValueListHasBeenSet;
};
} // namespace Model
} // namespace DynamoDB
} // namespace Aws
| Java |
/*
* Copyright (C) 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#ifndef GAPIR_RESOURCE_LOADER_H
#define GAPIR_RESOURCE_LOADER_H
#include "replay_service.h"
#include "resource.h"
#include <stdint.h>
#include <memory>
namespace gapir {
// RessourceLoader is an interface which can load a list of resources in-orderly
// to the specified location.
// TODO(qining): Change the load() or fetch() interface to accept a callback
// function to process the fetched data, then we won't need two methods anymore.
class ResourceLoader {
public:
virtual ~ResourceLoader() {}
// Loads count resources from the provider and writes them, in-order, to
// target. If the net size of all the resources exceeds size, then false is
// returned.
virtual bool load(const Resource* resources, size_t count, void* target,
size_t targetSize) = 0;
// Fetch queries the specified resources and returns a
// ReplayService::Resources instance which contains the resources data.
virtual std::unique_ptr<ReplayService::Resources> fetch(
const Resource* resources, size_t count) = 0;
};
// PassThroughResourceLoader implements the ResourceLoader interface. It pull
// resources from a ReplayService instance for every resource loading request.
class PassThroughResourceLoader : public ResourceLoader {
public:
static std::unique_ptr<PassThroughResourceLoader> create(ReplayService* srv) {
return std::unique_ptr<PassThroughResourceLoader>(
new PassThroughResourceLoader(srv));
}
// fetch returns the resources instance fetched from
// PassThroughResourceLoader's ReplayService, does not load it to anywhere.
std::unique_ptr<ReplayService::Resources> fetch(const Resource* resources,
size_t count) override {
if (resources == nullptr || count == 0) {
return nullptr;
}
if (mSrv == nullptr) {
return nullptr;
}
return mSrv->getResources(resources, count);
}
// Request all of the requested resources from the ServerConnection with a
// single GET request then loads the data to the target location.
bool load(const Resource* resources, size_t count, void* target,
size_t size) override {
if (count == 0) {
return true;
}
size_t requestSize = 0;
for (size_t i = 0; i < count; i++) {
requestSize += resources[i].getSize();
}
if (requestSize > size) {
return false; // not enough space.
}
auto res = fetch(resources, count);
if (res == nullptr) {
return false;
}
if (res->size() != requestSize) {
return false; // unexpected resource size.
}
memcpy(target, res->data(), res->size());
return true;
}
private:
PassThroughResourceLoader(ReplayService* srv) : mSrv(srv) {}
ReplayService* mSrv;
};
} // namespace gapir
#endif // GAPIR_RESOURCE_LOADER_H
| Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.spark.sql.catalyst.json
import java.nio.charset.{Charset, StandardCharsets}
import java.time.ZoneId
import java.util.Locale
import com.fasterxml.jackson.core.{JsonFactory, JsonFactoryBuilder}
import com.fasterxml.jackson.core.json.JsonReadFeature
import org.apache.spark.internal.Logging
import org.apache.spark.sql.catalyst.util._
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy
/**
* Options for parsing JSON data into Spark SQL rows.
*
* Most of these map directly to Jackson's internal options, specified in [[JsonReadFeature]].
*/
private[sql] class JSONOptions(
@transient val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String)
extends Logging with Serializable {
def this(
parameters: Map[String, String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String = "") = {
this(
CaseInsensitiveMap(parameters),
defaultTimeZoneId,
defaultColumnNameOfCorruptRecord)
}
val samplingRatio =
parameters.get("samplingRatio").map(_.toDouble).getOrElse(1.0)
val primitivesAsString =
parameters.get("primitivesAsString").map(_.toBoolean).getOrElse(false)
val prefersDecimal =
parameters.get("prefersDecimal").map(_.toBoolean).getOrElse(false)
val allowComments =
parameters.get("allowComments").map(_.toBoolean).getOrElse(false)
val allowUnquotedFieldNames =
parameters.get("allowUnquotedFieldNames").map(_.toBoolean).getOrElse(false)
val allowSingleQuotes =
parameters.get("allowSingleQuotes").map(_.toBoolean).getOrElse(true)
val allowNumericLeadingZeros =
parameters.get("allowNumericLeadingZeros").map(_.toBoolean).getOrElse(false)
val allowNonNumericNumbers =
parameters.get("allowNonNumericNumbers").map(_.toBoolean).getOrElse(true)
val allowBackslashEscapingAnyCharacter =
parameters.get("allowBackslashEscapingAnyCharacter").map(_.toBoolean).getOrElse(false)
private val allowUnquotedControlChars =
parameters.get("allowUnquotedControlChars").map(_.toBoolean).getOrElse(false)
val compressionCodec = parameters.get("compression").map(CompressionCodecs.getCodecClassName)
val parseMode: ParseMode =
parameters.get("mode").map(ParseMode.fromString).getOrElse(PermissiveMode)
val columnNameOfCorruptRecord =
parameters.getOrElse("columnNameOfCorruptRecord", defaultColumnNameOfCorruptRecord)
// Whether to ignore column of all null values or empty array/struct during schema inference
val dropFieldIfAllNull = parameters.get("dropFieldIfAllNull").map(_.toBoolean).getOrElse(false)
// Whether to ignore null fields during json generating
val ignoreNullFields = parameters.get("ignoreNullFields").map(_.toBoolean)
.getOrElse(SQLConf.get.jsonGeneratorIgnoreNullFields)
// A language tag in IETF BCP 47 format
val locale: Locale = parameters.get("locale").map(Locale.forLanguageTag).getOrElse(Locale.US)
val zoneId: ZoneId = DateTimeUtils.getZoneId(
parameters.getOrElse(DateTimeUtils.TIMEZONE_OPTION, defaultTimeZoneId))
val dateFormat: String = parameters.getOrElse("dateFormat", DateFormatter.defaultPattern)
val timestampFormat: String = parameters.getOrElse("timestampFormat",
if (SQLConf.get.legacyTimeParserPolicy == LegacyBehaviorPolicy.LEGACY) {
s"${DateFormatter.defaultPattern}'T'HH:mm:ss.SSSXXX"
} else {
s"${DateFormatter.defaultPattern}'T'HH:mm:ss[.SSS][XXX]"
})
val multiLine = parameters.get("multiLine").map(_.toBoolean).getOrElse(false)
/**
* A string between two consecutive JSON records.
*/
val lineSeparator: Option[String] = parameters.get("lineSep").map { sep =>
require(sep.nonEmpty, "'lineSep' cannot be an empty string.")
sep
}
protected def checkedEncoding(enc: String): String = enc
/**
* Standard encoding (charset) name. For example UTF-8, UTF-16LE and UTF-32BE.
* If the encoding is not specified (None) in read, it will be detected automatically
* when the multiLine option is set to `true`. If encoding is not specified in write,
* UTF-8 is used by default.
*/
val encoding: Option[String] = parameters.get("encoding")
.orElse(parameters.get("charset")).map(checkedEncoding)
val lineSeparatorInRead: Option[Array[Byte]] = lineSeparator.map { lineSep =>
lineSep.getBytes(encoding.getOrElse(StandardCharsets.UTF_8.name()))
}
val lineSeparatorInWrite: String = lineSeparator.getOrElse("\n")
/**
* Generating JSON strings in pretty representation if the parameter is enabled.
*/
val pretty: Boolean = parameters.get("pretty").map(_.toBoolean).getOrElse(false)
/**
* Enables inferring of TimestampType from strings matched to the timestamp pattern
* defined by the timestampFormat option.
*/
val inferTimestamp: Boolean = parameters.get("inferTimestamp").map(_.toBoolean).getOrElse(true)
/** Build a Jackson [[JsonFactory]] using JSON options. */
def buildJsonFactory(): JsonFactory = {
new JsonFactoryBuilder()
.configure(JsonReadFeature.ALLOW_JAVA_COMMENTS, allowComments)
.configure(JsonReadFeature.ALLOW_UNQUOTED_FIELD_NAMES, allowUnquotedFieldNames)
.configure(JsonReadFeature.ALLOW_SINGLE_QUOTES, allowSingleQuotes)
.configure(JsonReadFeature.ALLOW_LEADING_ZEROS_FOR_NUMBERS, allowNumericLeadingZeros)
.configure(JsonReadFeature.ALLOW_NON_NUMERIC_NUMBERS, allowNonNumericNumbers)
.configure(
JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER,
allowBackslashEscapingAnyCharacter)
.configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS, allowUnquotedControlChars)
.build()
}
}
private[sql] class JSONOptionsInRead(
@transient override val parameters: CaseInsensitiveMap[String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String)
extends JSONOptions(parameters, defaultTimeZoneId, defaultColumnNameOfCorruptRecord) {
def this(
parameters: Map[String, String],
defaultTimeZoneId: String,
defaultColumnNameOfCorruptRecord: String = "") = {
this(
CaseInsensitiveMap(parameters),
defaultTimeZoneId,
defaultColumnNameOfCorruptRecord)
}
protected override def checkedEncoding(enc: String): String = {
val isBlacklisted = JSONOptionsInRead.blacklist.contains(Charset.forName(enc))
require(multiLine || !isBlacklisted,
s"""The ${enc} encoding must not be included in the blacklist when multiLine is disabled:
|Blacklist: ${JSONOptionsInRead.blacklist.mkString(", ")}""".stripMargin)
val isLineSepRequired =
multiLine || Charset.forName(enc) == StandardCharsets.UTF_8 || lineSeparator.nonEmpty
require(isLineSepRequired, s"The lineSep option must be specified for the $enc encoding")
enc
}
}
private[sql] object JSONOptionsInRead {
// The following encodings are not supported in per-line mode (multiline is false)
// because they cause some problems in reading files with BOM which is supposed to
// present in the files with such encodings. After splitting input files by lines,
// only the first lines will have the BOM which leads to impossibility for reading
// the rest lines. Besides of that, the lineSep option must have the BOM in such
// encodings which can never present between lines.
val blacklist = Seq(
Charset.forName("UTF-16"),
Charset.forName("UTF-32")
)
}
| Java |
<?php
namespace Topxia\Service\User\Dao;
interface UserFortuneLogDao
{
public function addLog(array $log);
} | Java |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_14) on Tue Aug 17 01:07:09 EDT 2010 -->
<TITLE>
Uses of Class org.apache.hadoop.mapred.lib.aggregate.LongValueSum (Hadoop-Mapred 0.21.0 API)
</TITLE>
<META NAME="date" CONTENT="2010-08-17">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.apache.hadoop.mapred.lib.aggregate.LongValueSum (Hadoop-Mapred 0.21.0 API)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/hadoop/mapred/lib/aggregate/LongValueSum.html" title="class in org.apache.hadoop.mapred.lib.aggregate"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/hadoop/mapred/lib/aggregate//class-useLongValueSum.html" target="_top"><B>FRAMES</B></A>
<A HREF="LongValueSum.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.apache.hadoop.mapred.lib.aggregate.LongValueSum</B></H2>
</CENTER>
No usage of org.apache.hadoop.mapred.lib.aggregate.LongValueSum
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../org/apache/hadoop/mapred/lib/aggregate/LongValueSum.html" title="class in org.apache.hadoop.mapred.lib.aggregate"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../../../../index.html?org/apache/hadoop/mapred/lib/aggregate//class-useLongValueSum.html" target="_top"><B>FRAMES</B></A>
<A HREF="LongValueSum.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
Copyright © 2009 The Apache Software Foundation
</BODY>
</HTML>
| Java |
/* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#define C_LUCY_PROXIMITYMATCHER
#define C_LUCY_POSTING
#define C_LUCY_SCOREPOSTING
#include "Lucy/Util/ToolSet.h"
#include "LucyX/Search/ProximityMatcher.h"
#include "Lucy/Index/Posting/ScorePosting.h"
#include "Lucy/Index/PostingList.h"
#include "Lucy/Index/Similarity.h"
#include "Lucy/Search/Compiler.h"
ProximityMatcher*
ProximityMatcher_new(Similarity *sim, Vector *plists, Compiler *compiler,
uint32_t within) {
ProximityMatcher *self =
(ProximityMatcher*)Class_Make_Obj(PROXIMITYMATCHER);
return ProximityMatcher_init(self, sim, plists, compiler, within);
}
ProximityMatcher*
ProximityMatcher_init(ProximityMatcher *self, Similarity *similarity,
Vector *plists, Compiler *compiler, uint32_t within) {
Matcher_init((Matcher*)self);
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
// Init.
ivars->anchor_set = BB_new(0);
ivars->proximity_freq = 0.0;
ivars->proximity_boost = 0.0;
ivars->first_time = true;
ivars->more = true;
ivars->within = within;
// Extract PostingLists out of Vector into local C array for quick access.
ivars->num_elements = Vec_Get_Size(plists);
ivars->plists = (PostingList**)MALLOCATE(
ivars->num_elements * sizeof(PostingList*));
for (size_t i = 0; i < ivars->num_elements; i++) {
PostingList *const plist
= (PostingList*)CERTIFY(Vec_Fetch(plists, i), POSTINGLIST);
if (plist == NULL) {
THROW(ERR, "Missing element %u32", i);
}
ivars->plists[i] = (PostingList*)INCREF(plist);
}
// Assign.
ivars->sim = (Similarity*)INCREF(similarity);
ivars->compiler = (Compiler*)INCREF(compiler);
ivars->weight = Compiler_Get_Weight(compiler);
return self;
}
void
ProximityMatcher_Destroy_IMP(ProximityMatcher *self) {
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
if (ivars->plists) {
for (size_t i = 0; i < ivars->num_elements; i++) {
DECREF(ivars->plists[i]);
}
FREEMEM(ivars->plists);
}
DECREF(ivars->sim);
DECREF(ivars->anchor_set);
DECREF(ivars->compiler);
SUPER_DESTROY(self, PROXIMITYMATCHER);
}
int32_t
ProximityMatcher_Next_IMP(ProximityMatcher *self) {
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
if (ivars->first_time) {
return ProximityMatcher_Advance(self, 1);
}
else if (ivars->more) {
const int32_t target = PList_Get_Doc_ID(ivars->plists[0]) + 1;
return ProximityMatcher_Advance(self, target);
}
else {
return 0;
}
}
int32_t
ProximityMatcher_Advance_IMP(ProximityMatcher *self, int32_t target) {
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
PostingList **const plists = ivars->plists;
const uint32_t num_elements = ivars->num_elements;
int32_t highest = 0;
// Reset match variables to indicate no match. New values will be
// assigned if a match succeeds.
ivars->proximity_freq = 0.0;
ivars->doc_id = 0;
// Find the lowest possible matching doc ID greater than the current doc
// ID. If any one of the PostingLists is exhausted, we're done.
if (ivars->first_time) {
ivars->first_time = false;
// On the first call to Advance(), advance all PostingLists.
for (size_t i = 0, max = ivars->num_elements; i < max; i++) {
int32_t candidate = PList_Advance(plists[i], target);
if (!candidate) {
ivars->more = false;
return 0;
}
else if (candidate > highest) {
// Remember the highest doc ID so far.
highest = candidate;
}
}
}
else {
// On subsequent iters, advance only one PostingList. Its new doc ID
// becomes the minimum target which all the others must move up to.
highest = PList_Advance(plists[0], target);
if (highest == 0) {
ivars->more = false;
return 0;
}
}
// Find a doc which contains all the terms.
while (1) {
bool agreement = true;
// Scoot all posting lists up to at least the current minimum.
for (uint32_t i = 0; i < num_elements; i++) {
PostingList *const plist = plists[i];
int32_t candidate = PList_Get_Doc_ID(plist);
// Is this PostingList already beyond the minimum? Then raise the
// bar for everyone else.
if (highest < candidate) { highest = candidate; }
if (target < highest) { target = highest; }
// Scoot this posting list up.
if (candidate < target) {
candidate = PList_Advance(plist, target);
// If this PostingList is exhausted, we're done.
if (candidate == 0) {
ivars->more = false;
return 0;
}
// After calling PList_Advance(), we are guaranteed to be
// either at or beyond the minimum, so we can assign without
// checking and the minumum will either go up or stay the
// same.
highest = candidate;
}
}
// See whether all the PostingLists have managed to converge on a
// single doc ID.
for (uint32_t i = 0; i < num_elements; i++) {
const int32_t candidate = PList_Get_Doc_ID(plists[i]);
if (candidate != highest) { agreement = false; }
}
// If we've found a doc with all terms in it, see if they form a
// phrase.
if (agreement && highest >= target) {
ivars->proximity_freq = ProximityMatcher_Calc_Proximity_Freq(self);
if (ivars->proximity_freq == 0.0) {
// No phrase. Move on to another doc.
target += 1;
}
else {
// Success!
ivars->doc_id = highest;
return highest;
}
}
}
}
static CFISH_INLINE uint32_t
SI_winnow_anchors(uint32_t *anchors_start, const uint32_t *const anchors_end,
const uint32_t *candidates, const uint32_t *const candidates_end,
uint32_t offset, uint32_t within) {
uint32_t *anchors = anchors_start;
uint32_t *anchors_found = anchors_start;
uint32_t target_anchor;
uint32_t target_candidate;
// Safety check, so there's no chance of a bad dereference.
if (anchors_start == anchors_end || candidates == candidates_end) {
return 0;
}
/* This function is a loop that finds terms that can continue a phrase.
* It overwrites the anchors in place, and returns the number remaining.
* The basic algorithm is to alternately increment the candidates' pointer
* until it is at or beyond its target position, and then increment the
* anchors' pointer until it is at or beyond its target. The non-standard
* form is to avoid unnecessary comparisons. This loop has not been
* tested for speed, but glancing at the object code produced (objdump -S)
* it appears to be significantly faster than the nested loop alternative.
* But given the vagaries of modern processors, it merits actual
* testing.*/
SPIN_CANDIDATES:
target_candidate = *anchors + offset;
while (*candidates < target_candidate) {
if (++candidates == candidates_end) { goto DONE; }
}
if ((*candidates - target_candidate) < within) { goto MATCH; }
goto SPIN_ANCHORS;
SPIN_ANCHORS:
target_anchor = *candidates - offset;
while (*anchors < target_anchor) {
if (++anchors == anchors_end) { goto DONE; }
};
if (*anchors == target_anchor) { goto MATCH; }
goto SPIN_CANDIDATES;
MATCH:
*anchors_found++ = *anchors;
if (++anchors == anchors_end) { goto DONE; }
goto SPIN_CANDIDATES;
DONE:
// Return number of anchors remaining.
return anchors_found - anchors_start;
}
float
ProximityMatcher_Calc_Proximity_Freq_IMP(ProximityMatcher *self) {
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
PostingList **const plists = ivars->plists;
/* Create a overwriteable "anchor set" from the first posting.
*
* Each "anchor" is a position, measured in tokens, corresponding to a a
* term which might start a phrase. We start off with an "anchor set"
* comprised of all positions at which the first term in the phrase occurs
* in the field.
*
* There can never be more proximity matches than instances of this first
* term. There may be fewer however, which we will determine by seeing
* whether all the other terms line up at subsequent position slots.
*
* Every time we eliminate an anchor from the anchor set, we splice it out
* of the array. So if we begin with an anchor set of (15, 51, 72) and we
* discover that matches occur at the first and last instances of the
* first term but not the middle one, the final array will be (15, 72).
*
* The number of elements in the anchor set when we are finished winnowing
* is our proximity freq.
*/
ScorePosting *posting = (ScorePosting*)PList_Get_Posting(plists[0]);
ScorePostingIVARS *const post_ivars = ScorePost_IVARS(posting);
uint32_t anchors_remaining = post_ivars->freq;
if (!anchors_remaining) { return 0.0f; }
size_t amount = anchors_remaining * sizeof(uint32_t);
uint32_t *anchors_start = (uint32_t*)BB_Grow(ivars->anchor_set, amount);
uint32_t *anchors_end = anchors_start + anchors_remaining;
memcpy(anchors_start, post_ivars->prox, amount);
// Match the positions of other terms against the anchor set.
for (uint32_t i = 1, max = ivars->num_elements; i < max; i++) {
// Get the array of positions for the next term. Unlike the anchor
// set (which is a copy), these won't be overwritten.
ScorePosting *next_post = (ScorePosting*)PList_Get_Posting(plists[i]);
ScorePostingIVARS *const next_post_ivars = ScorePost_IVARS(next_post);
uint32_t *candidates_start = next_post_ivars->prox;
uint32_t *candidates_end = candidates_start + next_post_ivars->freq;
// Splice out anchors that don't match the next term. Bail out if
// we've eliminated all possible anchors.
if (ivars->within == 1) { // exact phrase match
anchors_remaining = SI_winnow_anchors(anchors_start, anchors_end,
candidates_start,
candidates_end, i, 1);
}
else { // fuzzy-phrase match
anchors_remaining = SI_winnow_anchors(anchors_start, anchors_end,
candidates_start,
candidates_end, i,
ivars->within);
}
if (!anchors_remaining) { return 0.0f; }
// Adjust end for number of anchors that remain.
anchors_end = anchors_start + anchors_remaining;
}
// The number of anchors left is the proximity freq.
return (float)anchors_remaining;
}
int32_t
ProximityMatcher_Get_Doc_ID_IMP(ProximityMatcher *self) {
return ProximityMatcher_IVARS(self)->doc_id;
}
float
ProximityMatcher_Score_IMP(ProximityMatcher *self) {
ProximityMatcherIVARS *const ivars = ProximityMatcher_IVARS(self);
ScorePosting *posting = (ScorePosting*)PList_Get_Posting(ivars->plists[0]);
float score = Sim_TF(ivars->sim, ivars->proximity_freq)
* ivars->weight
* ScorePost_IVARS(posting)->weight;
return score;
}
| Java |
////////////////////////////////////////////////////////////////////////////////
//
// TYPHOON FRAMEWORK
// Copyright 2015, Typhoon Framework Contributors
// All Rights Reserved.
//
// NOTICE: The authors permit you to use, modify, and distribute this file
// in accordance with the terms of the license agreement accompanying it.
//
////////////////////////////////////////////////////////////////////////////////
#import <Foundation/Foundation.h>
#import "PFWeatherClient.h"
#import "PFThemeable.h"
@protocol PFCityDao;
@protocol PFWeatherClient;
@class PFRootViewController;
@interface PFAddCityViewController : UIViewController <UITextFieldDelegate, PFThemeable>
#pragma mark - Typhoon injected properties
@property(nonatomic, strong) id <PFCityDao> cityDao;
@property(nonatomic, strong) id <PFWeatherClient> weatherClient;
@property(nonatomic, strong) PFTheme* theme;
@property(nonatomic, strong) PFRootViewController* rootViewController;
#pragma mark - Interface Builder injected properties
@property(nonatomic, weak) IBOutlet UITextField* nameOfCityToAdd;
@property(nonatomic, weak) IBOutlet UILabel* validationMessage;
@property(nonatomic, weak) IBOutlet UIActivityIndicatorView* spinner;
@end | Java |
/*
* Copyright 2008-2010 WorldWide Conferencing, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.liftweb {
package jpademo {
package model {
object Genre extends Enumeration with Enumv {
val Mystery = Value("Mystery", "Mystery")
val SciFi = Value("SciFi", "SciFi")
val Classic = Value("Classic", "Classic")
val Childrens = Value("Childrens", "Childrens")
val Horror = Value("Horror", "Horror")
val Poetry = Value("Poetry", "Poetry")
val unknown = Value("Unknown", "Unknown genre")
}
class GenreType extends EnumvType(Genre) {}
}
}
}
| Java |
import os
from segments import Segment, theme
from utils import colors, glyphs
class CurrentDir(Segment):
bg = colors.background(theme.CURRENTDIR_BG)
fg = colors.foreground(theme.CURRENTDIR_FG)
def init(self, cwd):
home = os.path.expanduser('~')
self.text = cwd.replace(home, '~')
class ReadOnly(Segment):
bg = colors.background(theme.READONLY_BG)
fg = colors.foreground(theme.READONLY_FG)
def init(self, cwd):
self.text = ' ' + glyphs.WRITE_ONLY + ' '
if os.access(cwd, os.W_OK):
self.active = False
class Venv(Segment):
bg = colors.background(theme.VENV_BG)
fg = colors.foreground(theme.VENV_FG)
def init(self):
env = os.getenv('VIRTUAL_ENV')
if env is None:
self.active = False
return
env_name = os.path.basename(env)
self.text = glyphs.VIRTUAL_ENV + ' ' + env_name | Java |
<?php
/**
* Copyright 2017 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Google\Cloud\BigQuery\Tests\Snippet;
use Google\Cloud\BigQuery\BigQueryClient;
use Google\Cloud\BigQuery\ExtractJobConfiguration;
use Google\Cloud\Core\Testing\Snippet\SnippetTestCase;
use Google\Cloud\Core\Testing\TestHelpers;
/**
* @group bigquery
*/
class ExtractJobConfigurationTest extends SnippetTestCase
{
const PROJECT_ID = 'my_project';
const DATASET_ID = 'my_dataset';
const TABLE_ID = 'my_table';
const MODEL_ID = 'my_model';
const JOB_ID = '123';
private $config;
public function setUp()
{
$this->config = new ExtractJobConfiguration(
self::PROJECT_ID,
['jobReference' => ['jobId' => self::JOB_ID]],
null
);
}
public function testClass()
{
$snippet = $this->snippetFromClass(ExtractJobConfiguration::class);
$res = $snippet->invoke('extractJobConfig');
$this->assertInstanceOf(ExtractJobConfiguration::class, $res->returnVal());
}
/**
* @dataProvider setterDataProvider
*/
public function testSetters($method, $expected, $bq = null)
{
$snippet = $this->snippetFromMethod(ExtractJobConfiguration::class, $method);
$snippet->addLocal('extractJobConfig', $this->config);
if ($bq) {
$snippet->addLocal('bigQuery', $bq);
}
$actual = $snippet->invoke('extractJobConfig')
->returnVal()
->toArray()['configuration']['extract'][$method];
$this->assertEquals($expected, $actual);
}
public function setterDataProvider()
{
$bq = TestHelpers::stub(BigQueryClient::class, [
['projectId' => self::PROJECT_ID]
]);
return [
[
'compression',
'GZIP'
],
[
'destinationFormat',
'NEWLINE_DELIMITED_JSON'
],
[
'destinationUris',
['gs://my_bucket/destination.csv']
],
[
'fieldDelimiter',
','
],
[
'printHeader',
false
],
[
'sourceTable',
[
'projectId' => self::PROJECT_ID,
'datasetId' => self::DATASET_ID,
'tableId' => self::TABLE_ID
],
$bq
],
[
'sourceModel',
[
'projectId' => self::PROJECT_ID,
'datasetId' => self::DATASET_ID,
'modelId' => self::MODEL_ID
],
$bq
],
[
'useAvroLogicalTypes',
true
]
];
}
}
| Java |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.pivotal.gemfirexd.internal.impl.services.cache;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.SQLState;
import com.pivotal.gemfirexd.internal.iapi.services.cache.Cacheable;
import com.pivotal.gemfirexd.internal.iapi.services.cache.CacheableFactory;
/**
* An extension to {@link ConcurrentCache} for GemFireXD that sets the identity
* on a {@link CacheEntry} before inserting into the cache. This is to avoid
* deadlock scenario with DDL read-write locks:
*
* distributed write lock (other VM) -> local write lock -> cache hit with
* existing entry -> {@link CacheEntry#waitUntilIdentityIsSet()}
*
* cache miss -> cache put -> {@link Cacheable#setIdentity(Object)} -> read from
* SYSTABLES -> local read lock
*
* See bug #40683 for more details.
*
* Currently this is only used for <code>TDCacheble</code>s while for other
* {@link Cacheable}s the normal {@link ConcurrentCache} is used.
*
* @see ConcurrentCache
*
* @author swale
*/
final class GfxdConcurrentCache extends ConcurrentCache {
/**
* Creates a new cache manager.
*
* @param holderFactory
* factory which creates <code>Cacheable</code>s
* @param name
* the name of the cache
* @param initialSize
* the initial capacity of the cache
* @param maxSize
* maximum number of elements in the cache
*/
GfxdConcurrentCache(CacheableFactory holderFactory, String name,
int initialSize, int maxSize) {
super(holderFactory, name, initialSize, maxSize);
}
// Overrides of ConcurrentCache
/**
* Find an object in the cache. If it is not present, add it to the cache. The
* returned object is kept until <code>release()</code> is called.
*
* @param key
* identity of the object to find
* @return the cached object, or <code>null</code> if it cannot be found
*/
@Override
public Cacheable find(Object key) throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = cache.get(key);
while (true) {
if (entry != null) {
// Found an entry in the cache, lock it.
entry.lock();
if (entry.isValid()) {
try {
// Entry is still valid. Return it.
item = entry.getCacheable();
// The object is already cached. Increase the use count and
// return it.
entry.keep(true);
return item;
} finally {
entry.unlock();
}
}
else {
// This entry has been removed from the cache while we were
// waiting for the lock. Unlock it and try again.
entry.unlock();
entry = cache.get(key);
}
}
else {
entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Set the identity without holding the lock on the entry. If we
// hold the lock, we may run into a deadlock if the user code in
// setIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.setIdentity(key);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
// add the entry to cache
CacheEntry oldEntry = cache.putIfAbsent(key, entry);
if (oldEntry != null) {
// Someone inserted the entry while we created a new
// one. Retry with the entry currently in the cache.
entry = oldEntry;
}
else {
// We successfully inserted a new entry.
return itemWithIdentity;
}
}
else {
return null;
}
}
}
}
/**
* Create an object in the cache. The object is kept until
* <code>release()</code> is called.
*
* @param key
* identity of the object to create
* @param createParameter
* parameters passed to <code>Cacheable.createIdentity()</code>
* @return a reference to the cached object, or <code>null</code> if the
* object cannot be created
* @exception StandardException
* if the object is already in the cache, or if some other error
* occurs
* @see Cacheable#createIdentity(Object,Object)
*/
@Override
public Cacheable create(Object key, Object createParameter)
throws StandardException {
if (stopped) {
return null;
}
Cacheable item;
CacheEntry entry = new CacheEntry(true);
// Lock the entry before it's inserted in free slot.
entry.lock();
try {
// The object is not cached. Insert the entry into a free
// slot and retrieve a reusable Cacheable.
item = insertIntoFreeSlot(key, entry);
} finally {
entry.unlock();
}
// Create the identity without holding the lock on the entry.
// Otherwise, we may run into a deadlock if the user code in
// createIdentity() re-enters the buffer manager.
Cacheable itemWithIdentity = item.createIdentity(key, createParameter);
if (itemWithIdentity != null) {
entry.setCacheable(itemWithIdentity);
if (cache.putIfAbsent(key, entry) != null) {
// We can't create the object if it's already in the cache.
throw StandardException.newException(SQLState.OBJECT_EXISTS_IN_CACHE,
name, key);
}
}
return itemWithIdentity;
}
}
| Java |
<?xml version="1.0" encoding="iso-8859-1"?>
<!DOCTYPE html
PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
<title>satisfy (Spec::Matchers)</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1" />
<link rel="stylesheet" href="../../.././rdoc-style.css" type="text/css" media="screen" />
</head>
<body class="standalone-code">
<pre><span class="ruby-comment cmt"># File lib/spec/matchers/satisfy.rb, line 43</span>
<span class="ruby-keyword kw">def</span> <span class="ruby-identifier">satisfy</span>(<span class="ruby-operator">&</span><span class="ruby-identifier">block</span>)
<span class="ruby-constant">Matchers</span><span class="ruby-operator">::</span><span class="ruby-constant">Satisfy</span>.<span class="ruby-identifier">new</span>(<span class="ruby-operator">&</span><span class="ruby-identifier">block</span>)
<span class="ruby-keyword kw">end</span></pre>
</body>
</html> | Java |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.StandardJavaFileManager;
import javax.tools.ToolProvider;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.io.ByteStreams;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.Path;
import org.apache.pig.ExecType;
import org.apache.pig.PigServer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
/**
* Ensure classes from a registered jar are available in the UDFContext.
* Please see PIG-2532 for additional details.
*/
public class TestRegisteredJarVisibility {
private static final Log LOG = LogFactory.getLog(TestRegisteredJarVisibility.class);
private static final String JAR_FILE_NAME = "test-foo-loader.jar";
private static final String PACKAGE_NAME = "org.apache.pig.test";
// Actual data is not important. Reusing an existing input file.
private static final File INPUT_FILE = new File("test/data/pigunit/top_queries_input_data.txt");
private static MiniCluster cluster;
private static File jarFile;
@BeforeClass()
public static void setUp() throws IOException {
String testResourcesDir = "test/resources/" + PACKAGE_NAME.replace(".", "/");
String testBuildDataDir = "build/test/data";
// Create the test data directory if needed
File testDataDir = new File(testBuildDataDir,
TestRegisteredJarVisibility.class.getCanonicalName());
testDataDir.mkdirs();
jarFile = new File(testDataDir, JAR_FILE_NAME);
File[] javaFiles = new File[]{
new File(testResourcesDir, "RegisteredJarVisibilityLoader.java"),
new File(testResourcesDir, "RegisteredJarVisibilitySchema.java")};
List<File> classFiles = compile(javaFiles);
// Canonical class name to class file
Map<String, File> filesToJar = Maps.newHashMap();
for (File classFile : classFiles) {
filesToJar.put(
PACKAGE_NAME + "." + classFile.getName().replace(".class", ""),
classFile);
}
jar(filesToJar);
cluster = MiniCluster.buildCluster();
}
@AfterClass()
public static void tearDown() {
cluster.shutDown();
}
@Test()
public void testRegisteredJarVisibilitySchemaNotOnClasspath() {
boolean exceptionThrown = false;
try {
Class.forName("org.apache.pig.test.FooSchema");
} catch (ClassNotFoundException e) {
exceptionThrown = true;
}
Assert.assertTrue(exceptionThrown);
}
@Test()
public void testRegisteredJarVisibility() throws IOException {
cluster.getFileSystem().copyFromLocalFile(
new Path("file://" + INPUT_FILE.getAbsolutePath()), new Path(INPUT_FILE.getName()));
PigServer pigServer = new PigServer(ExecType.MAPREDUCE, cluster.getProperties());
String query = "register " + jarFile.getAbsolutePath() + ";\n"
+ "a = load '" + INPUT_FILE.getName()
+ "' using org.apache.pig.test.RegisteredJarVisibilityLoader();";
LOG.info("Running pig script:\n" + query);
pigServer.registerScript(new ByteArrayInputStream(query.getBytes()));
pigServer.openIterator("a");
pigServer.shutdown();
}
private static List<File> compile(File[] javaFiles) {
LOG.info("Compiling: " + Arrays.asList(javaFiles));
JavaCompiler compiler = ToolProvider.getSystemJavaCompiler();
StandardJavaFileManager fileManager = compiler.getStandardFileManager(null, null, null);
Iterable<? extends JavaFileObject> compilationUnits1 =
fileManager.getJavaFileObjects(javaFiles);
JavaCompiler.CompilationTask task =
compiler.getTask(null, fileManager, null, null, null, compilationUnits1);
task.call();
List<File> classFiles = Lists.newArrayList();
for (File javaFile : javaFiles) {
File classFile = new File(javaFile.getAbsolutePath().replace(".java", ".class"));
classFile.deleteOnExit();
Assert.assertTrue(classFile.exists());
classFiles.add(classFile);
LOG.info("Created " + classFile.getAbsolutePath());
}
return classFiles;
}
/**
* Create a jar file containing the generated classes.
*
* @param filesToJar map of canonical class name to class file
* @throws IOException on error
*/
private static void jar(Map<String, File> filesToJar) throws IOException {
LOG.info("Creating jar file containing: " + filesToJar);
JarOutputStream jos = new JarOutputStream(new FileOutputStream(jarFile.getAbsolutePath()));
try {
for (Map.Entry<String, File> entry : filesToJar.entrySet()) {
String zipEntryName = entry.getKey().replace(".", "/") + ".class";
LOG.info("Adding " + zipEntryName + " to " + jarFile.getAbsolutePath());
jos.putNextEntry(new ZipEntry(zipEntryName));
InputStream classInputStream = new FileInputStream(entry.getValue().getAbsolutePath());
try {
ByteStreams.copy(classInputStream, jos);
} finally {
classInputStream.close();
}
}
} finally {
jos.close();
}
Assert.assertTrue(jarFile.exists());
LOG.info("Created " + jarFile.getAbsolutePath());
}
}
| Java |
package com.kodcu.service.extension.chart;
import com.kodcu.controller.ApplicationController;
import com.kodcu.other.Current;
import com.kodcu.service.ThreadService;
import javafx.scene.chart.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
/**
* Created by usta on 31.03.2015.
*/
@Component("area-bean")
public class AreaChartBuilderService extends XYChartBuilderService {
private final ThreadService threadService;
private final Current current;
private final ApplicationController controller;
@Autowired
public AreaChartBuilderService(ThreadService threadService, Current current, ApplicationController controller) {
super(threadService, current, controller);
this.threadService = threadService;
this.current = current;
this.controller = controller;
}
@Override
protected XYChart<Number, Number> createXYChart() {
final NumberAxis xAxis = new NumberAxis();
final NumberAxis yAxis = new NumberAxis();
final XYChart<Number, Number> lineChart = new AreaChart<Number, Number>(xAxis, yAxis);
return lineChart;
}
}
| Java |
package com.taobao.zeus.broadcast.alarm;
import java.net.InetAddress;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Properties;
import javax.mail.Message;
import javax.mail.MessagingException;
import javax.mail.NoSuchProviderException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import com.taobao.zeus.model.LogDescriptor;
import com.taobao.zeus.store.UserManager;
import com.taobao.zeus.store.mysql.MysqlLogManager;
import com.taobao.zeus.store.mysql.persistence.ZeusUser;
import com.taobao.zeus.util.Environment;
public class MailAlarm extends AbstractZeusAlarm {
private static Logger log = LoggerFactory.getLogger(MailAlarm.class);
@Autowired
private UserManager userManager;
@Autowired
private MysqlLogManager zeusLogManager;
private static String host = Environment.getHost();// 邮件服务器
private static String port = Environment.getPort();// 端口
private static String from = Environment.getSendFrom();// 发送者
private static String user = Environment.getUsername();// 用户名
private static String password = Environment.getPassword();// 密码
@Override
public void alarm(String jobId, List<String> users, String title, String content)
throws Exception {
List<ZeusUser> userList = userManager.findListByUidByOrder(users);
List<String> emails = new ArrayList<String>();
if (userList != null && userList.size() > 0) {
for (ZeusUser user : userList) {
String userEmail = user.getEmail();
if (userEmail != null && !userEmail.isEmpty()
&& userEmail.contains("@")) {
if (userEmail.contains(";")) {
String[] userEmails = userEmail.split(";");
for (String ems : userEmails) {
if (ems.contains("@")) {
emails.add(ems);
}
}
} else {
emails.add(userEmail);
}
}
}
if (emails.size() > 0) {
content = content.replace("<br/>", "\r\n");
sendEmail(jobId, emails, title, content);
/*try{
LogDescriptor logDescriptor = new LogDescriptor();
logDescriptor.setLogType("email");
logDescriptor.setIp(InetAddress.getLocalHost().getHostAddress());
logDescriptor.setUserName("zeus");
logDescriptor.setUrl(jobId);
logDescriptor.setRpc(emails.toString());
logDescriptor.setDelegate(title);
logDescriptor.setMethod("");
// logDescriptor.setDescription((content.length()>4000 ? content.substring(4000) : content));
logDescriptor.setDescription("");
zeusLogManager.addLog(logDescriptor);
}catch(Exception ex){
log.error(ex.toString());
}*/
}
}
}
public void sendEmail(String jobId, List<String> emails, String subject,
String body) {
try {
log.info( "jobId: " + jobId +" begin to send the email!");
Properties props = new Properties();
props.put("mail.smtp.host", host);
props.put("mail.smtp.port", port);
props.put("mail.smtp.auth", "true");
Transport transport = null;
Session session = Session.getDefaultInstance(props, null);
transport = session.getTransport("smtp");
transport.connect(host, user, password);
MimeMessage msg = new MimeMessage(session);
msg.setSentDate(new Date());
InternetAddress fromAddress = new InternetAddress(from);
msg.setFrom(fromAddress);
InternetAddress[] toAddress = new InternetAddress[emails.size()];
for (int i = 0; i < emails.size(); i++) {
toAddress[i] = new InternetAddress(emails.get(i));
}
msg.setRecipients(Message.RecipientType.TO, toAddress);
msg.setSubject(subject, "UTF-8");
msg.setText(body, "UTF-8");
msg.saveChanges();
transport.sendMessage(msg, msg.getAllRecipients());
log.info("jobId: " + jobId + " send email: " + emails + "; from: " + from + " subject: "
+ subject + ", send success!");
} catch (NoSuchProviderException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (MessagingException e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
} catch (Exception e) {
log.error("jobId: " + jobId + " fail to send the mail. ", e);
}
}
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.