hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a1de5b026019452c5fa5d9e4252d98c94c55795 | 8,629 | py | Python | cpgames/modules/core/flappybird/flappybird.py | Wasabii88/Games | 33262ca1958207a24e57e3532feded7e275b1dd1 | [
"MIT"
] | 1 | 2022-02-27T10:33:41.000Z | 2022-02-27T10:33:41.000Z | cpgames/modules/core/flappybird/flappybird.py | beiwei365/Games | f6499f378802d3212a08aeca761191b58714b7f0 | [
"MIT"
] | null | null | null | cpgames/modules/core/flappybird/flappybird.py | beiwei365/Games | f6499f378802d3212a08aeca761191b58714b7f0 | [
"MIT"
] | null | null | null | '''
Function:
飞扬的小鸟小游戏
Author:
Charles
微信公众号:
Charles的皮卡丘
'''
import os
import random
import pygame
from ...utils import QuitGame
from ..base import PygameBaseGame
from .modules import GameEndIterface, GameStartInterface, Bird, Pipe
'''配置类'''
class Config():
# 根目录
rootdir = os.path.split(os.path.abspath(__file__))[0]
# FPS
FPS = 60
# 屏幕大小
SCREENSIZE = (288, 512)
# 管道之间的空隙
PIPE_GAP_SIZE = 100
# 标题
TITLE = 'Flappy Bird —— Charles的皮卡丘'
# 游戏声音路径
SOUND_PATHS_DICT = {
'die': os.path.join(rootdir, 'resources/audios/die.wav'),
'hit': os.path.join(rootdir, 'resources/audios/hit.wav'),
'point': os.path.join(rootdir, 'resources/audios/point.wav'),
'swoosh': os.path.join(rootdir, 'resources/audios/swoosh.wav'),
'wing': os.path.join(rootdir, 'resources/audios/wing.wav'),
}
# 游戏图片路径
IMAGE_PATHS_DICT = {
'number': {
'0': os.path.join(rootdir, 'resources/images/0.png'), '1': os.path.join(rootdir, 'resources/images/1.png'),
'2': os.path.join(rootdir, 'resources/images/2.png'), '3': os.path.join(rootdir, 'resources/images/3.png'),
'4': os.path.join(rootdir, 'resources/images/4.png'), '5': os.path.join(rootdir, 'resources/images/5.png'),
'6': os.path.join(rootdir, 'resources/images/6.png'), '7': os.path.join(rootdir, 'resources/images/7.png'),
'8': os.path.join(rootdir, 'resources/images/8.png'), '9': os.path.join(rootdir, 'resources/images/9.png'),
},
'bird': {
'red': {
'up': os.path.join(rootdir, 'resources/images/redbird-upflap.png'),
'mid': os.path.join(rootdir, 'resources/images/redbird-midflap.png'),
'down': os.path.join(rootdir, 'resources/images/redbird-downflap.png')
},
'blue': {
'up': os.path.join(rootdir, 'resources/images/bluebird-upflap.png'),
'mid': os.path.join(rootdir, 'resources/images/bluebird-midflap.png'),
'down': os.path.join(rootdir, 'resources/images/bluebird-downflap.png')
},
'yellow': {
'up': os.path.join(rootdir, 'resources/images/yellowbird-upflap.png'),
'mid': os.path.join(rootdir, 'resources/images/yellowbird-midflap.png'),
'down': os.path.join(rootdir, 'resources/images/yellowbird-downflap.png')
},
},
'background': {
'day': os.path.join(rootdir, 'resources/images/background-day.png'),
'night': os.path.join(rootdir, 'resources/images/background-night.png'),
},
'pipe': {
'green': os.path.join(rootdir, 'resources/images/pipe-green.png'),
'red': os.path.join(rootdir, 'resources/images/pipe-red.png'),
},
'others': {
'gameover': os.path.join(rootdir, 'resources/images/gameover.png'),
'message': os.path.join(rootdir, 'resources/images/message.png'),
'base': os.path.join(rootdir, 'resources/images/base.png'),
},
}
'''飞扬的小鸟小游戏'''
class FlappyBirdGame(PygameBaseGame):
game_type = 'flappybird'
def __init__(self, **kwargs):
self.cfg = Config
super(FlappyBirdGame, self).__init__(config=self.cfg, **kwargs)
'''运行游戏'''
def run(self):
while True:
# 初始化
screen, resource_loader, cfg = self.screen, self.resource_loader, self.cfg
# 定义游戏资源
# --音频
sounds = resource_loader.sounds
# --数字图片
number_images = resource_loader.images['number']
for key in number_images: number_images[key] = number_images[key].convert_alpha()
# --管道
pipe_images = dict()
pipe_images['bottom'] = random.choice(list(resource_loader.images['pipe'].values())).convert_alpha()
pipe_images['top'] = pygame.transform.rotate(pipe_images['bottom'], 180)
# --小鸟图片
bird_images = random.choice(list(resource_loader.images['bird'].values()))
for key in bird_images: bird_images[key] = bird_images[key].convert_alpha()
# --背景图片
backgroud_image = random.choice(list(resource_loader.images['background'].values())).convert_alpha()
# --其他图片
other_images = resource_loader.images['others']
for key in other_images: other_images[key] = other_images[key].convert_alpha()
# 游戏开始界面
game_start_info = GameStartInterface(screen, sounds, bird_images, other_images, backgroud_image, cfg)
# 进入主游戏
score = 0
bird_pos, base_pos, bird_idx = list(game_start_info.values())
base_diff_bg = other_images['base'].get_width() - backgroud_image.get_width()
clock = pygame.time.Clock()
# --管道类
pipe_sprites = pygame.sprite.Group()
for i in range(2):
pipe_pos = Pipe.randomPipe(cfg, pipe_images.get('top'))
pipe_sprites.add(Pipe(image=pipe_images.get('top'), position=(cfg.SCREENSIZE[0]+200+i*cfg.SCREENSIZE[0]/2, pipe_pos.get('top')[-1])))
pipe_sprites.add(Pipe(image=pipe_images.get('bottom'), position=(cfg.SCREENSIZE[0]+200+i*cfg.SCREENSIZE[0]/2, pipe_pos.get('bottom')[-1])))
# --bird类
bird = Bird(images=bird_images, idx=bird_idx, position=bird_pos)
# --是否增加pipe
is_add_pipe = True
# --游戏是否进行中
is_game_running = True
while is_game_running:
for event in pygame.event.get():
if event.type == pygame.QUIT or (event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE):
QuitGame()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_SPACE or event.key == pygame.K_UP:
bird.setFlapped()
sounds['wing'].play()
# --碰撞检测
for pipe in pipe_sprites:
if pygame.sprite.collide_mask(bird, pipe):
sounds['hit'].play()
is_game_running = False
# --更新小鸟
boundary_values = [0, base_pos[-1]]
is_dead = bird.update(boundary_values, float(clock.tick(cfg.FPS))/1000.)
if is_dead:
sounds['hit'].play()
is_game_running = False
# --移动base实现小鸟往前飞的效果
base_pos[0] = -((-base_pos[0] + 4) % base_diff_bg)
# --移动pipe实现小鸟往前飞的效果
flag = False
for pipe in pipe_sprites:
pipe.rect.left -= 4
if pipe.rect.centerx < bird.rect.centerx and not pipe.used_for_score:
pipe.used_for_score = True
score += 0.5
if '.5' in str(score):
sounds['point'].play()
if pipe.rect.left < 5 and pipe.rect.left > 0 and is_add_pipe:
pipe_pos = Pipe.randomPipe(cfg, pipe_images.get('top'))
pipe_sprites.add(Pipe(image=pipe_images.get('top'), position=pipe_pos.get('top')))
pipe_sprites.add(Pipe(image=pipe_images.get('bottom'), position=pipe_pos.get('bottom')))
is_add_pipe = False
elif pipe.rect.right < 0:
pipe_sprites.remove(pipe)
flag = True
if flag: is_add_pipe = True
# --绑定必要的元素在屏幕上
screen.blit(backgroud_image, (0, 0))
pipe_sprites.draw(screen)
screen.blit(other_images['base'], base_pos)
self.showScore(cfg, screen, score, number_images)
bird.draw(screen)
pygame.display.update()
clock.tick(cfg.FPS)
GameEndIterface(screen, sounds, self.showScore, score, number_images, bird, pipe_sprites, backgroud_image, other_images, base_pos, cfg)
'''显示当前分数'''
@staticmethod
def showScore(cfg, screen, score, number_images):
digits = list(str(int(score)))
width = 0
for d in digits:
width += number_images.get(d).get_width()
offset = (cfg.SCREENSIZE[0] - width) / 2
for d in digits:
screen.blit(number_images.get(d), (offset, cfg.SCREENSIZE[1] * 0.1))
offset += number_images.get(d).get_width() | 46.643243 | 155 | 0.554178 |
4a1de71dcd740ea68c0b556dce82fc3b88c4a6f5 | 662 | py | Python | third_party/cyw30739_sdk/btp_reader.py | carol-apple/connectedhomeip | b1d40eb423ba5c2f4bbe15ff42a2b5d1b78ba2ce | [
"Apache-2.0"
] | 1 | 2022-02-22T02:02:10.000Z | 2022-02-22T02:02:10.000Z | third_party/cyw30739_sdk/btp_reader.py | carol-apple/connectedhomeip | b1d40eb423ba5c2f4bbe15ff42a2b5d1b78ba2ce | [
"Apache-2.0"
] | null | null | null | third_party/cyw30739_sdk/btp_reader.py | carol-apple/connectedhomeip | b1d40eb423ba5c2f4bbe15ff42a2b5d1b78ba2ce | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
import json
import sys
def main():
btp_file = sys.argv[1]
items = {}
with open(btp_file) as btp:
for line in btp:
item = line.strip().split("=")
if len(item) == 2:
key = item[0].strip()
value = item[1].strip()
items[key] = value
items["XIP_DS_OFFSET"] = "0x0001e000"
items["XIP_LEN"] = "0x{:08x}".format(
int(items["ConfigDS2Location"], 16)
- int(items["ConfigDSLocation"], 16)
- int(items["XIP_DS_OFFSET"], 16)
)
print(json.dumps(items))
return 0
if __name__ == "__main__":
sys.exit(main())
| 20.6875 | 44 | 0.522659 |
4a1de7c04ee4bad3a93f90c2c06b81bde823b9b1 | 802 | py | Python | bin/Python27/Lib/site-packages/numpy/linalg/tests/test_deprecations.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | bin/Python27/Lib/site-packages/numpy/linalg/tests/test_deprecations.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | null | null | null | bin/Python27/Lib/site-packages/numpy/linalg/tests/test_deprecations.py | lefevre-fraser/openmeta-mms | 08f3115e76498df1f8d70641d71f5c52cab4ce5f | [
"MIT"
] | 1 | 2020-05-07T11:04:14.000Z | 2020-05-07T11:04:14.000Z | """Test deprecation and future warnings.
"""
from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import assert_warns, run_module_suite
def test_qr_mode_full_future_warning():
"""Check mode='full' FutureWarning.
In numpy 1.8 the mode options 'full' and 'economic' in linalg.qr were
deprecated. The release date will probably be sometime in the summer
of 2013.
"""
a = np.eye(2)
assert_warns(DeprecationWarning, np.linalg.qr, a, mode='full')
assert_warns(DeprecationWarning, np.linalg.qr, a, mode='f')
assert_warns(DeprecationWarning, np.linalg.qr, a, mode='economic')
assert_warns(DeprecationWarning, np.linalg.qr, a, mode='e')
if __name__ == "__main__":
run_module_suite()
| 29.703704 | 74 | 0.704489 |
4a1de98b863672b08e5c8675021aed9a3b396b45 | 19,081 | py | Python | keras/activations.py | slowy07/keras | d3688b72924a4235598f0f80038de8c897f44799 | [
"Apache-2.0"
] | 1 | 2021-07-21T15:54:12.000Z | 2021-07-21T15:54:12.000Z | keras/activations.py | slowy07/keras | d3688b72924a4235598f0f80038de8c897f44799 | [
"Apache-2.0"
] | null | null | null | keras/activations.py | slowy07/keras | d3688b72924a4235598f0f80038de8c897f44799 | [
"Apache-2.0"
] | null | null | null | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Built-in activation functions."""
import tensorflow.compat.v2 as tf
from keras import backend
from keras.layers import advanced_activations
from keras.utils.generic_utils import deserialize_keras_object
from keras.utils.generic_utils import serialize_keras_object
from tensorflow.python.util.tf_export import keras_export
# b/123041942
# In TF 2.x, if the `tf.nn.softmax` is used as an activation function in Keras
# layers, it gets serialized as 'softmax_v2' instead of 'softmax' as the
# internal method name is returned in serialization. This results in errors in
# model exporting and loading as Keras can't find any activation function with
# the name of `softmax_v2`.
# This dict maps the activation function name from its v2 version to its
# canonical name.
_TF_ACTIVATIONS_V2 = {
'softmax_v2': 'softmax',
}
@keras_export('keras.activations.softmax')
@tf.__internal__.dispatch.add_dispatch_support
def softmax(x, axis=-1):
"""Softmax converts a vector of values to a probability distribution.
The elements of the output vector are in range (0, 1) and sum to 1.
Each vector is handled independently. The `axis` argument sets which axis
of the input the function is applied along.
Softmax is often used as the activation for the last
layer of a classification network because the result could be interpreted as
a probability distribution.
The softmax of each vector x is computed as
`exp(x) / tf.reduce_sum(exp(x))`.
The input values in are the log-odds of the resulting probability.
Args:
x : Input tensor.
axis: Integer, axis along which the softmax normalization is applied.
Returns:
Tensor, output of softmax transformation (all values are non-negative
and sum to 1).
Examples:
**Example 1: standalone usage**
>>> inputs = tf.random.normal(shape=(32, 10))
>>> outputs = tf.keras.activations.softmax(inputs)
>>> tf.reduce_sum(outputs[0, :]) # Each sample in the batch now sums to 1
<tf.Tensor: shape=(), dtype=float32, numpy=1.0000001>
**Example 2: usage in a `Dense` layer**
>>> layer = tf.keras.layers.Dense(32, activation=tf.keras.activations.softmax)
"""
if x.shape.rank > 1:
if isinstance(axis, int):
output = tf.nn.softmax(x, axis=axis)
else:
# nn.softmax does not support tuple axis.
e = tf.exp(x - tf.reduce_max(x, axis=axis, keepdims=True))
s = tf.reduce_sum(e, axis=axis, keepdims=True)
output = e / s
else:
raise ValueError('Cannot apply softmax to a tensor that is 1D. '
'Received input: %s' % (x,))
# Cache the logits to use for crossentropy loss.
output._keras_logits = x # pylint: disable=protected-access
return output
@keras_export('keras.activations.elu')
@tf.__internal__.dispatch.add_dispatch_support
def elu(x, alpha=1.0):
"""Exponential Linear Unit.
The exponential linear unit (ELU) with `alpha > 0` is:
`x` if `x > 0` and
`alpha * (exp(x) - 1)` if `x < 0`
The ELU hyperparameter `alpha` controls the value to which an
ELU saturates for negative net inputs. ELUs diminish the
vanishing gradient effect.
ELUs have negative values which pushes the mean of the activations
closer to zero.
Mean activations that are closer to zero enable faster learning as they
bring the gradient closer to the natural gradient.
ELUs saturate to a negative value when the argument gets smaller.
Saturation means a small derivative which decreases the variation
and the information that is propagated to the next layer.
Example Usage:
>>> import tensorflow as tf
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Conv2D(32, (3, 3), activation='elu',
... input_shape=(28, 28, 1)))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
>>> model.add(tf.keras.layers.MaxPooling2D((2, 2)))
>>> model.add(tf.keras.layers.Conv2D(64, (3, 3), activation='elu'))
<tensorflow.python.keras.engine.sequential.Sequential object ...>
Args:
x: Input tensor.
alpha: A scalar, slope of negative section. `alpha` controls the value to
which an ELU saturates for negative net inputs.
Returns:
The exponential linear unit (ELU) activation function: `x` if `x > 0` and
`alpha * (exp(x) - 1)` if `x < 0`.
Reference:
[Fast and Accurate Deep Network Learning by Exponential Linear Units
(ELUs) (Clevert et al, 2016)](https://arxiv.org/abs/1511.07289)
"""
return backend.elu(x, alpha)
@keras_export('keras.activations.selu')
@tf.__internal__.dispatch.add_dispatch_support
def selu(x):
"""Scaled Exponential Linear Unit (SELU).
The Scaled Exponential Linear Unit (SELU) activation function is defined as:
- `if x > 0: return scale * x`
- `if x < 0: return scale * alpha * (exp(x) - 1)`
where `alpha` and `scale` are pre-defined constants
(`alpha=1.67326324` and `scale=1.05070098`).
Basically, the SELU activation function multiplies `scale` (> 1) with the
output of the `tf.keras.activations.elu` function to ensure a slope larger
than one for positive inputs.
The values of `alpha` and `scale` are
chosen so that the mean and variance of the inputs are preserved
between two consecutive layers as long as the weights are initialized
correctly (see `tf.keras.initializers.LecunNormal` initializer)
and the number of input units is "large enough"
(see reference paper for more information).
Example Usage:
>>> num_classes = 10 # 10-class problem
>>> model = tf.keras.Sequential()
>>> model.add(tf.keras.layers.Dense(64, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(32, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(16, kernel_initializer='lecun_normal',
... activation='selu'))
>>> model.add(tf.keras.layers.Dense(num_classes, activation='softmax'))
Args:
x: A tensor or variable to compute the activation function for.
Returns:
The scaled exponential unit activation: `scale * elu(x, alpha)`.
Notes:
- To be used together with the
`tf.keras.initializers.LecunNormal` initializer.
- To be used together with the dropout variant
`tf.keras.layers.AlphaDropout` (not regular dropout).
References:
- [Klambauer et al., 2017](https://arxiv.org/abs/1706.02515)
"""
return tf.nn.selu(x)
@keras_export('keras.activations.softplus')
@tf.__internal__.dispatch.add_dispatch_support
def softplus(x):
"""Softplus activation function, `softplus(x) = log(exp(x) + 1)`.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.softplus(a)
>>> b.numpy()
array([2.0611537e-09, 3.1326166e-01, 6.9314718e-01, 1.3132616e+00,
2.0000000e+01], dtype=float32)
Args:
x: Input tensor.
Returns:
The softplus activation: `log(exp(x) + 1)`.
"""
return tf.math.softplus(x)
@keras_export('keras.activations.softsign')
@tf.__internal__.dispatch.add_dispatch_support
def softsign(x):
"""Softsign activation function, `softsign(x) = x / (abs(x) + 1)`.
Example Usage:
>>> a = tf.constant([-1.0, 0.0, 1.0], dtype = tf.float32)
>>> b = tf.keras.activations.softsign(a)
>>> b.numpy()
array([-0.5, 0. , 0.5], dtype=float32)
Args:
x: Input tensor.
Returns:
The softsign activation: `x / (abs(x) + 1)`.
"""
return tf.math.softsign(x)
@keras_export('keras.activations.swish')
@tf.__internal__.dispatch.add_dispatch_support
def swish(x):
"""Swish activation function, `swish(x) = x * sigmoid(x)`.
Swish activation function which returns `x*sigmoid(x)`.
It is a smooth, non-monotonic function that consistently matches
or outperforms ReLU on deep networks, it is unbounded above and
bounded below.
Example Usage:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.swish(a)
>>> b.numpy()
array([-4.1223075e-08, -2.6894143e-01, 0.0000000e+00, 7.3105860e-01,
2.0000000e+01], dtype=float32)
Args:
x: Input tensor.
Returns:
The swish activation applied to `x` (see reference paper for details).
Reference:
- [Ramachandran et al., 2017](https://arxiv.org/abs/1710.05941)
"""
return tf.nn.silu(x)
@keras_export('keras.activations.relu')
@tf.__internal__.dispatch.add_dispatch_support
def relu(x, alpha=0., max_value=None, threshold=0.):
"""Applies the rectified linear unit activation function.
With default values, this returns the standard ReLU activation:
`max(x, 0)`, the element-wise maximum of 0 and the input tensor.
Modifying default parameters allows you to use non-zero thresholds,
change the max value of the activation,
and to use a non-zero multiple of the input for values below the threshold.
For example:
>>> foo = tf.constant([-10, -5, 0.0, 5, 10], dtype = tf.float32)
>>> tf.keras.activations.relu(foo).numpy()
array([ 0., 0., 0., 5., 10.], dtype=float32)
>>> tf.keras.activations.relu(foo, alpha=0.5).numpy()
array([-5. , -2.5, 0. , 5. , 10. ], dtype=float32)
>>> tf.keras.activations.relu(foo, max_value=5.).numpy()
array([0., 0., 0., 5., 5.], dtype=float32)
>>> tf.keras.activations.relu(foo, threshold=5.).numpy()
array([-0., -0., 0., 0., 10.], dtype=float32)
Args:
x: Input `tensor` or `variable`.
alpha: A `float` that governs the slope for values lower than the
threshold.
max_value: A `float` that sets the saturation threshold (the largest value
the function will return).
threshold: A `float` giving the threshold value of the activation function
below which values will be damped or set to zero.
Returns:
A `Tensor` representing the input tensor,
transformed by the relu activation function.
Tensor will be of the same shape and dtype of input `x`.
"""
return backend.relu(x, alpha=alpha, max_value=max_value, threshold=threshold)
@keras_export('keras.activations.gelu', v1=[])
@tf.__internal__.dispatch.add_dispatch_support
def gelu(x, approximate=False):
"""Applies the Gaussian error linear unit (GELU) activation function.
Gaussian error linear unit (GELU) computes
`x * P(X <= x)`, where `P(X) ~ N(0, 1)`.
The (GELU) nonlinearity weights inputs by their value, rather than gates
inputs by their sign as in ReLU.
For example:
>>> x = tf.constant([-3.0, -1.0, 0.0, 1.0, 3.0], dtype=tf.float32)
>>> y = tf.keras.activations.gelu(x)
>>> y.numpy()
array([-0.00404951, -0.15865529, 0. , 0.8413447 , 2.9959507 ],
dtype=float32)
>>> y = tf.keras.activations.gelu(x, approximate=True)
>>> y.numpy()
array([-0.00363752, -0.15880796, 0. , 0.841192 , 2.9963627 ],
dtype=float32)
Args:
x: Input tensor.
approximate: A `bool`, whether to enable approximation.
Returns:
The gaussian error linear activation:
`0.5 * x * (1 + tanh(sqrt(2 / pi) * (x + 0.044715 * x^3)))`
if `approximate` is `True` or
`x * P(X <= x) = 0.5 * x * (1 + erf(x / sqrt(2)))`,
where `P(X) ~ N(0, 1)`,
if `approximate` is `False`.
Reference:
- [Gaussian Error Linear Units (GELUs)](https://arxiv.org/abs/1606.08415)
"""
return tf.nn.gelu(x, approximate)
@keras_export('keras.activations.tanh')
@tf.__internal__.dispatch.add_dispatch_support
def tanh(x):
"""Hyperbolic tangent activation function.
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.tanh(a)
>>> b.numpy()
array([-0.9950547, -0.7615942, 0., 0.7615942, 0.9950547], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor of same shape and dtype of input `x`, with tanh activation:
`tanh(x) = sinh(x)/cosh(x) = ((exp(x) - exp(-x))/(exp(x) + exp(-x)))`.
"""
return tf.tanh(x)
@keras_export('keras.activations.sigmoid')
@tf.__internal__.dispatch.add_dispatch_support
def sigmoid(x):
"""Sigmoid activation function, `sigmoid(x) = 1 / (1 + exp(-x))`.
Applies the sigmoid activation function. For small values (<-5),
`sigmoid` returns a value close to zero, and for large values (>5)
the result of the function gets close to 1.
Sigmoid is equivalent to a 2-element Softmax, where the second element is
assumed to be zero. The sigmoid function always returns a value between
0 and 1.
For example:
>>> a = tf.constant([-20, -1.0, 0.0, 1.0, 20], dtype = tf.float32)
>>> b = tf.keras.activations.sigmoid(a)
>>> b.numpy()
array([2.0611537e-09, 2.6894143e-01, 5.0000000e-01, 7.3105860e-01,
1.0000000e+00], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor with the sigmoid activation: `1 / (1 + exp(-x))`.
"""
output = tf.sigmoid(x)
# Cache the logits to use for crossentropy loss.
output._keras_logits = x # pylint: disable=protected-access
return output
@keras_export('keras.activations.exponential')
@tf.__internal__.dispatch.add_dispatch_support
def exponential(x):
"""Exponential activation function.
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.exponential(a)
>>> b.numpy()
array([0.04978707, 0.36787945, 1., 2.7182817 , 20.085537], dtype=float32)
Args:
x: Input tensor.
Returns:
Tensor with exponential activation: `exp(x)`.
"""
return tf.exp(x)
@keras_export('keras.activations.hard_sigmoid')
@tf.__internal__.dispatch.add_dispatch_support
def hard_sigmoid(x):
"""Hard sigmoid activation function.
A faster approximation of the sigmoid activation.
Piecewise linear approximation of the sigmoid function.
Ref: 'https://en.wikipedia.org/wiki/Hard_sigmoid'
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.hard_sigmoid(a)
>>> b.numpy()
array([0. , 0.3, 0.5, 0.7, 1. ], dtype=float32)
Args:
x: Input tensor.
Returns:
The hard sigmoid activation, defined as:
- `if x < -2.5: return 0`
- `if x > 2.5: return 1`
- `if -2.5 <= x <= 2.5: return 0.2 * x + 0.5`
"""
return backend.hard_sigmoid(x)
@keras_export('keras.activations.linear')
@tf.__internal__.dispatch.add_dispatch_support
def linear(x):
"""Linear activation function (pass-through).
For example:
>>> a = tf.constant([-3.0,-1.0, 0.0,1.0,3.0], dtype = tf.float32)
>>> b = tf.keras.activations.linear(a)
>>> b.numpy()
array([-3., -1., 0., 1., 3.], dtype=float32)
Args:
x: Input tensor.
Returns:
The input, unmodified.
"""
return x
@keras_export('keras.activations.serialize')
@tf.__internal__.dispatch.add_dispatch_support
def serialize(activation):
"""Returns the string identifier of an activation function.
Args:
activation : Function object.
Returns:
String denoting the name attribute of the input function
For example:
>>> tf.keras.activations.serialize(tf.keras.activations.tanh)
'tanh'
>>> tf.keras.activations.serialize(tf.keras.activations.sigmoid)
'sigmoid'
>>> tf.keras.activations.serialize('abcd')
Traceback (most recent call last):
...
ValueError: ('Cannot serialize', 'abcd')
Raises:
ValueError: The input function is not a valid one.
"""
if (hasattr(activation, '__name__') and
activation.__name__ in _TF_ACTIVATIONS_V2):
return _TF_ACTIVATIONS_V2[activation.__name__]
return serialize_keras_object(activation)
# Add additional globals so that deserialize can find these common activation
# functions
leaky_relu = tf.nn.leaky_relu
log_softmax = tf.nn.log_softmax
relu6 = tf.nn.relu6
silu = tf.nn.silu
@keras_export('keras.activations.deserialize')
@tf.__internal__.dispatch.add_dispatch_support
def deserialize(name, custom_objects=None):
"""Returns activation function given a string identifier.
Args:
name: The name of the activation function.
custom_objects: Optional `{function_name: function_obj}`
dictionary listing user-provided activation functions.
Returns:
Corresponding activation function.
For example:
>>> tf.keras.activations.deserialize('linear')
<function linear at 0x1239596a8>
>>> tf.keras.activations.deserialize('sigmoid')
<function sigmoid at 0x123959510>
>>> tf.keras.activations.deserialize('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function:abcd
Raises:
ValueError: `Unknown activation function` if the input string does not
denote any defined Tensorflow activation function.
"""
globs = globals()
# only replace missing activations
advanced_activations_globs = advanced_activations.get_globals()
for key, val in advanced_activations_globs.items():
if key not in globs:
globs[key] = val
return deserialize_keras_object(
name,
module_objects=globs,
custom_objects=custom_objects,
printable_module_name='activation function')
@keras_export('keras.activations.get')
@tf.__internal__.dispatch.add_dispatch_support
def get(identifier):
"""Returns function.
Args:
identifier: Function or string
Returns:
Function corresponding to the input string or input function.
For example:
>>> tf.keras.activations.get('softmax')
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(tf.keras.activations.softmax)
<function softmax at 0x1222a3d90>
>>> tf.keras.activations.get(None)
<function linear at 0x1239596a8>
>>> tf.keras.activations.get(abs)
<built-in function abs>
>>> tf.keras.activations.get('abcd')
Traceback (most recent call last):
...
ValueError: Unknown activation function:abcd
Raises:
ValueError: Input is an unknown function or string, i.e., the input does
not denote any defined function.
"""
if identifier is None:
return linear
if isinstance(identifier, str):
identifier = str(identifier)
return deserialize(identifier)
elif isinstance(identifier, dict):
return deserialize(identifier)
elif callable(identifier):
return identifier
else:
raise TypeError(
'Could not interpret activation function identifier: {}'.format(
identifier))
| 31.538843 | 80 | 0.68052 |
4a1de9bbdfbab437a671f76fc1b03b0a1264b20d | 5,203 | py | Python | scanpy/tools/_tsne_fix.py | aertslab/scanpy | da0ffd787681a5df98d523f93374853481fac8b8 | [
"BSD-3-Clause"
] | null | null | null | scanpy/tools/_tsne_fix.py | aertslab/scanpy | da0ffd787681a5df98d523f93374853481fac8b8 | [
"BSD-3-Clause"
] | null | null | null | scanpy/tools/_tsne_fix.py | aertslab/scanpy | da0ffd787681a5df98d523f93374853481fac8b8 | [
"BSD-3-Clause"
] | 1 | 2022-03-21T15:28:17.000Z | 2022-03-21T15:28:17.000Z | # Author: David DeTomaso (https://github.com/deto)
"""\
Fix for sklearn.tsne gradient descent.
This module fixes it by patching the original function with this
modified version. Patch is only applied for versions earlier than 0.19.
Code courtesy of David DeTomaso; available from
https://github.com/YosefLab/FastProject/blob/stable/FastProject/_tsne_fix.py
"""
from typing import Callable, Tuple, Optional, Mapping, Sequence
import numpy as np
from scipy import linalg
import sklearn
def _gradient_descent(
objective: Callable[[np.ndarray, ...], Tuple[int, np.ndarray]],
p0: np.ndarray,
it: int,
n_iter: int,
objective_error: Optional[Callable[[np.ndarray, ...], float]] = None,
n_iter_check: int = 1,
n_iter_without_progress: int = 50,
momentum: float = 0.5,
learning_rate: float = 1000.0,
min_gain: float = 0.01,
min_grad_norm: float = 1e-7,
min_error_diff: float = 1e-7,
verbose: int = 0,
args: Optional[Sequence] = None,
kwargs: Optional[Mapping] = None,
) -> Tuple[np.ndarray, float, int]:
"""\
Batch gradient descent with momentum and individual gains.
Parameters
----------
objective
Should return a tuple of cost and gradient for a given parameter
vector. When expensive to compute, the cost can optionally
be None and can be computed every n_iter_check steps using
the objective_error function.
p0
Initial parameter vector. shape (n_params,)
it
Current number of iterations (this function will be called more than
once during the optimization).
n_iter
Maximum number of gradient descent iterations.
objective_error
Should return error for a given parameter vector.
n_iter_check
Number of iterations before evaluating the global error. If the error
is sufficiently low, we abort the optimization.
n_iter_without_progress
Maximum number of iterations without progress before we abort the
optimization.
momentum
The momentum generates a weight for previous gradients that decays
exponentially. within (0.0, 1.0)
learning_rate
The learning rate should be extremely high for t-SNE! Values in the
range [100.0, 1000.0] are common.
min_gain
Minimum individual gain for each parameter.
min_grad_norm
If the gradient norm is below this threshold, the optimization will
be aborted.
min_error_diff
If the absolute difference of two successive cost function values
is below this threshold, the optimization will be aborted.
verbose
Verbosity level.
args
Arguments to pass to objective function.
kwargs
Keyword arguments to pass to objective function.
Returns
-------
p
Optimum parameters. shape (n_params,)
error
Optimum.
i
Last iteration.
"""
if args is None:
args = []
if kwargs is None:
kwargs = {}
p = p0.copy().ravel()
update = np.zeros_like(p)
gains = np.ones_like(p)
error = np.finfo(np.float).max
best_error = np.finfo(np.float).max
best_iter = 0
for i in range(it, n_iter):
new_error, grad = objective(p, *args, **kwargs)
grad_norm = linalg.norm(grad)
inc = update * grad < 0.0
dec = np.invert(inc)
gains[inc] += 0.2
gains[dec] *= 0.8
np.clip(gains, min_gain, np.inf, out=gains)
grad *= gains
update = momentum * update - learning_rate * grad
p += update
if (i + 1) % n_iter_check == 0:
if new_error is None:
new_error = objective_error(p, *args)
error_diff = np.abs(new_error - error)
error = new_error
if verbose >= 2:
m = "[t-SNE] Iteration %d: error = %.7f, gradient norm = %.7f"
print(m % (i + 1, error, grad_norm))
if error < best_error:
best_error = error
best_iter = i
elif i - best_iter > n_iter_without_progress:
if verbose >= 2:
print("[t-SNE] Iteration %d: did not make any progress "
"during the last %d episodes. Finished."
% (i + 1, n_iter_without_progress))
break
if grad_norm <= min_grad_norm:
if verbose >= 2:
print("[t-SNE] Iteration %d: gradient norm %f. Finished."
% (i + 1, grad_norm))
break
if error_diff <= min_error_diff:
if verbose >= 2:
m = "[t-SNE] Iteration %d: error difference %f. Finished."
print(m % (i + 1, error_diff))
break
if new_error is not None:
error = new_error
return p, error, i
sk_ver = []
for c in sklearn.__version__.split("."):
try:
ic = int(c)
sk_ver.append(ic)
except ValueError:
pass
sk_ver = tuple(sk_ver)
if sk_ver < (0, 19, 0):
from sklearn.manifold import t_sne
t_sne._gradient_descent = _gradient_descent
| 31.920245 | 78 | 0.601576 |
4a1dea19f5ab73bc0e4ed6791daa6b7d02d56e12 | 21,640 | py | Python | payment_lib_examples/weixin-python/virtual_environement/lib/python3.4/site-packages/Cryptodome/Util/RFC1751.py | cuhk-mobitec/S3KVetter | 9ae79a242afbe6edae27c17065a88feca2896cf6 | [
"Apache-2.0"
] | 2 | 2022-01-21T12:51:19.000Z | 2022-01-21T12:51:30.000Z | payment_lib_examples/weixin-python/virtual_environement/lib/python3.4/site-packages/Cryptodome/Util/RFC1751.py | cuhk-mobitec/S3KVetter | 9ae79a242afbe6edae27c17065a88feca2896cf6 | [
"Apache-2.0"
] | 3 | 2021-03-10T03:52:20.000Z | 2021-10-06T09:50:47.000Z | payment_lib_examples/weixin-python/virtual_environement/lib/python3.4/site-packages/Cryptodome/Util/RFC1751.py | cuhk-mobitec/S3KVetter | 9ae79a242afbe6edae27c17065a88feca2896cf6 | [
"Apache-2.0"
] | 1 | 2019-12-30T08:22:13.000Z | 2019-12-30T08:22:13.000Z | # rfc1751.py : Converts between 128-bit strings and a human-readable
# sequence of words, as defined in RFC1751: "A Convention for
# Human-Readable 128-bit Keys", by Daniel L. McDonald.
#
# Part of the Python Cryptography Toolkit
#
# Written by Andrew M. Kuchling and others
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
from __future__ import print_function
import binascii
from Cryptodome.Util.py3compat import *
binary={0:'0000', 1:'0001', 2:'0010', 3:'0011', 4:'0100', 5:'0101',
6:'0110', 7:'0111', 8:'1000', 9:'1001', 10:'1010', 11:'1011',
12:'1100', 13:'1101', 14:'1110', 15:'1111'}
def _key2bin(s):
"Convert a key into a string of binary digits"
kl=map(lambda x: bord(x), s)
kl=map(lambda x: binary[x>>4]+binary[x&15], kl)
return ''.join(kl)
def _extract(key, start, length):
"""Extract a bitstring(2.x)/bytestring(2.x) from a string of binary digits, and return its
numeric value."""
result = 0
for y in key[start:start+length]:
result = result * 2 + ord(y) - 48
return result
def key_to_english (key):
"""Transform an arbitrary key into a string containing English words.
Example::
>>> from Cryptodome.Util.RFC1751 import key_to_english
>>> key_to_english(b'66666666')
'RAM LOIS GOAD CREW CARE HIT'
Args:
key (byte string):
The key to convert. Its length must be a multiple of 8.
Return:
A string of English words.
"""
english=''
for index in range(0, len(key), 8): # Loop over 8-byte subkeys
subkey=key[index:index+8]
# Compute the parity of the key
skbin=_key2bin(subkey) ; p=0
for i in range(0, 64, 2): p=p+_extract(skbin, i, 2)
# Append parity bits to the subkey
skbin=_key2bin(subkey+bchr((p<<6) & 255))
for i in range(0, 64, 11):
english=english+wordlist[_extract(skbin, i, 11)]+' '
return english.strip()
def english_to_key (s):
"""Transform a string into a corresponding key.
Example::
>>> from Cryptodome.Util.RFC1751 import english_to_key
>>> english_to_key('RAM LOIS GOAD CREW CARE HIT')
b'66666666'
Args:
s (string): the string with the words separated by whitespace;
the number of words must be a multiple of 6.
Return:
A byte string.
"""
L=s.upper().split() ; key=b''
for index in range(0, len(L), 6):
sublist=L[index:index+6] ; char=9*[0] ; bits=0
for i in sublist:
index = wordlist.index(i)
shift = (8-(bits+11)%8) %8
y = index << shift
cl, cc, cr = (y>>16), (y>>8)&0xff, y & 0xff
if (shift>5):
char[bits>>3] = char[bits>>3] | cl
char[(bits>>3)+1] = char[(bits>>3)+1] | cc
char[(bits>>3)+2] = char[(bits>>3)+2] | cr
elif shift>-3:
char[bits>>3] = char[bits>>3] | cc
char[(bits>>3)+1] = char[(bits>>3)+1] | cr
else: char[bits>>3] = char[bits>>3] | cr
bits=bits+11
subkey = b''
for y in char:
subkey = subkey + bchr(y)
# Check the parity of the resulting key
skbin=_key2bin(subkey)
p=0
for i in range(0, 64, 2): p=p+_extract(skbin, i, 2)
if (p&3) != _extract(skbin, 64, 2):
raise ValueError("Parity error in resulting key")
key=key+subkey[0:8]
return key
wordlist=[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD",
"AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY", "AN", "ANA",
"AND", "ANN", "ANT", "ANY", "APE", "APS", "APT", "ARC", "ARE", "ARK",
"ARM", "ART", "AS", "ASH", "ASK", "AT", "ATE", "AUG", "AUK", "AVE",
"AWE", "AWK", "AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM",
"BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG", "BEN", "BET",
"BEY", "BIB", "BID", "BIG", "BIN", "BIT", "BOB", "BOG", "BON", "BOO",
"BOP", "BOW", "BOY", "BUB", "BUD", "BUG", "BUM", "BUN", "BUS", "BUT",
"BUY", "BY", "BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT",
"CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT", "COW", "COY",
"CRY", "CUB", "CUE", "CUP", "CUR", "CUT", "DAB", "DAD", "DAM", "DAN",
"DAR", "DAY", "DEE", "DEL", "DEN", "DES", "DEW", "DID", "DIE", "DIG",
"DIN", "DIP", "DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB",
"DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL", "EGG", "EGO",
"ELI", "ELK", "ELM", "ELY", "EM", "END", "EST", "ETC", "EVA", "EVE",
"EWE", "EYE", "FAD", "FAN", "FAR", "FAT", "FAY", "FED", "FEE", "FEW",
"FIB", "FIG", "FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR",
"FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL", "GAM", "GAP",
"GAS", "GAY", "GEE", "GEL", "GEM", "GET", "GIG", "GIL", "GIN", "GO",
"GOT", "GUM", "GUN", "GUS", "GUT", "GUY", "GYM", "GYP", "HA", "HAD",
"HAL", "HAM", "HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM",
"HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP", "HIS", "HIT",
"HO", "HOB", "HOC", "HOE", "HOG", "HOP", "HOT", "HOW", "HUB", "HUE",
"HUG", "HUH", "HUM", "HUT", "I", "ICY", "IDA", "IF", "IKE", "ILL",
"INK", "INN", "IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT",
"ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW", "JAY", "JET",
"JIG", "JIM", "JO", "JOB", "JOE", "JOG", "JOT", "JOY", "JUG", "JUT",
"KAY", "KEG", "KEN", "KEY", "KID", "KIM", "KIN", "KIT", "LA", "LAB",
"LAC", "LAD", "LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE",
"LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN", "LIP", "LIT",
"LO", "LOB", "LOG", "LOP", "LOS", "LOT", "LOU", "LOW", "LOY", "LUG",
"LYE", "MA", "MAC", "MAD", "MAE", "MAN", "MAO", "MAP", "MAT", "MAW",
"MAY", "ME", "MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT",
"MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW", "MUD", "MUG",
"MUM", "MY", "NAB", "NAG", "NAN", "NAP", "NAT", "NAY", "NE", "NED",
"NEE", "NET", "NEW", "NIB", "NIL", "NIP", "NIT", "NO", "NOB", "NOD",
"NON", "NOR", "NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF",
"OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT", "OH", "OIL",
"OK", "OLD", "ON", "ONE", "OR", "ORB", "ORE", "ORR", "OS", "OTT",
"OUR", "OUT", "OVA", "OW", "OWE", "OWL", "OWN", "OX", "PA", "PAD",
"PAL", "PAM", "PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG",
"PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE", "PIN", "PIT",
"PLY", "PO", "POD", "POE", "POP", "POT", "POW", "PRO", "PRY", "PUB",
"PUG", "PUN", "PUP", "PUT", "QUO", "RAG", "RAM", "RAN", "RAP", "RAT",
"RAW", "RAY", "REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM",
"RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW", "ROY", "RUB",
"RUE", "RUG", "RUM", "RUN", "RYE", "SAC", "SAD", "SAG", "SAL", "SAM",
"SAN", "SAP", "SAT", "SAW", "SAY", "SEA", "SEC", "SEE", "SEN", "SET",
"SEW", "SHE", "SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY",
"SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY", "SPA", "SPY",
"SUB", "SUD", "SUE", "SUM", "SUN", "SUP", "TAB", "TAD", "TAG", "TAN",
"TAP", "TAR", "TEA", "TED", "TEE", "TEN", "THE", "THY", "TIC", "TIE",
"TIM", "TIN", "TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP",
"TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO", "UN", "UP",
"US", "USE", "VAN", "VAT", "VET", "VIE", "WAD", "WAG", "WAR", "WAS",
"WAY", "WE", "WEB", "WED", "WEE", "WET", "WHO", "WHY", "WIN", "WIT",
"WOK", "WON", "WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE",
"YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE", "ABUT",
"ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM", "ADDS",
"ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA", "AIDE",
"AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA", "ALIA",
"ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA", "AMEN",
"AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY", "ANEW",
"ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH", "AREA",
"ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS", "ATOM",
"AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON", "AVOW",
"AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE", "BAIL",
"BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL", "BALM",
"BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE", "BARK",
"BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE", "BATH",
"BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR", "BEAT",
"BEAU", "BECK", "BEEF", "BEEN", "BEER",
"BEET", "BELA", "BELL", "BELT", "BEND", "BENT", "BERG", "BERN",
"BERT", "BESS", "BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE",
"BIEN", "BILE", "BILK", "BILL", "BIND", "BING", "BIRD", "BITE",
"BITS", "BLAB", "BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT",
"BLOW", "BLUE", "BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK",
"BODE", "BODY", "BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT",
"BOMB", "BONA", "BOND", "BONE", "BONG", "BONN", "BONY", "BOOK",
"BOOM", "BOON", "BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS",
"BOTH", "BOUT", "BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN",
"BRAY", "BRED", "BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD",
"BUFF", "BULB", "BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG",
"BURL", "BURN", "BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST",
"BUSY", "BYTE", "CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF",
"CALL", "CALM", "CAME", "CANE", "CANT", "CARD", "CARE", "CARL",
"CARR", "CART", "CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL",
"CELL", "CENT", "CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF",
"CHEN", "CHEW", "CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG",
"CHUM", "CITE", "CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY",
"CLOD", "CLOG", "CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA",
"COCK", "COCO", "CODA", "CODE", "CODY", "COED", "COIL", "COIN",
"COKE", "COLA", "COLD", "COLT", "COMA", "COMB", "COME", "COOK",
"COOL", "COON", "COOT", "CORD", "CORE", "CORK", "CORN", "COST",
"COVE", "COWL", "CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB",
"CROW", "CRUD", "CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY",
"CURB", "CURD", "CURE", "CURL", "CURT", "CUTS", "DADE", "DALE",
"DAME", "DANA", "DANE", "DANG", "DANK", "DARE", "DARK", "DARN",
"DART", "DASH", "DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS",
"DEAD", "DEAF", "DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED",
"DEEM", "DEER", "DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK",
"DIAL", "DICE", "DIED", "DIET", "DIME", "DINE", "DING", "DINT",
"DIRE", "DIRT", "DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES",
"DOLE", "DOLL", "DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA",
"DOSE", "DOTE", "DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG",
"DRAM", "DRAW", "DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK",
"DUCT", "DUEL", "DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK",
"DUSK", "DUST", "DUTY", "EACH", "EARL", "EARN", "EASE", "EAST",
"EASY", "EBEN", "ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT",
"EDNA", "EGAN", "ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT",
"EMMA", "ENDS", "ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED",
"FACE", "FACT", "FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL",
"FAME", "FANG", "FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT",
"FEED", "FEEL", "FEET", "FELL", "FELT", "FEND", "FERN", "FEST",
"FEUD", "FIEF", "FIGS", "FILE", "FILL", "FILM", "FIND", "FINE",
"FINK", "FIRE", "FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE",
"FLAG", "FLAK", "FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW",
"FLIT", "FLOC", "FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM",
"FOGY", "FOIL", "FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL",
"FOOT", "FORD", "FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL",
"FOUR", "FOWL", "FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY",
"FROG", "FROM", "FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY",
"FUSE", "FUSS", "GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA",
"GALE", "GALL", "GALT", "GAME", "GANG", "GARB", "GARY", "GASH",
"GATE", "GAUL", "GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE",
"GENT", "GERM", "GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT",
"GINA", "GIRD", "GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN",
"GLIB", "GLOB", "GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD",
"GOAL", "GOAT", "GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG",
"GOOD", "GOOF", "GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB",
"GRAD", "GRAY", "GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN",
"GRIT", "GROW", "GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH",
"GUST", "GWEN", "GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR",
"HALE", "HALF", "HALL", "HALO", "HALT", "HAND", "HANG", "HANK",
"HANS", "HARD", "HARK", "HARM", "HART", "HASH", "HAST", "HATE",
"HATH", "HAUL", "HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR",
"HEAT", "HEBE", "HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL",
"HELM", "HERB", "HERD", "HERE", "HERO", "HERS", "HESS", "HEWN",
"HICK", "HIDE", "HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT",
"HIRE", "HISS", "HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE",
"HOLM", "HOLT", "HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK",
"HOOT", "HORN", "HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL",
"HOYT", "HUCK", "HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK",
"HULL", "HUNK", "HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE",
"HYMN", "IBIS", "ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH",
"INTO", "IONS", "IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE",
"ITCH", "ITEM", "IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE",
"JAVA", "JEAN", "JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL",
"JILT", "JIVE", "JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN",
"JOIN", "JOKE", "JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY",
"JUJU", "JUKE", "JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST",
"JUTE", "KAHN", "KALE", "KANE", "KANT", "KARL", "KATE", "KEEL",
"KEEN", "KENO", "KENT", "KERN", "KERR", "KEYS", "KICK", "KILL",
"KIND", "KING", "KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW",
"KNIT", "KNOB", "KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD",
"KURT", "KYLE", "LACE", "LACK", "LACY", "LADY", "LAID", "LAIN",
"LAIR", "LAKE", "LAMB", "LAME", "LAND", "LANE", "LANG", "LARD",
"LARK", "LASS", "LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS",
"LAYS", "LEAD", "LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER",
"LEFT", "LEND", "LENS", "LENT", "LEON", "LESK", "LESS", "LEST",
"LETS", "LIAR", "LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU",
"LIFE", "LIFT", "LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB",
"LIME", "LIND", "LINE", "LINK", "LINT", "LION", "LISA", "LIST",
"LIVE", "LOAD", "LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE",
"LOIS", "LOLA", "LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD",
"LORE", "LOSE", "LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK",
"LUCY", "LUGE", "LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE",
"LURK", "LUSH", "LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE",
"MADE", "MAGI", "MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI",
"MALL", "MALT", "MANA", "MANN", "MANY", "MARC", "MARE", "MARK",
"MARS", "MART", "MARY", "MASH", "MASK", "MASS", "MAST", "MATE",
"MATH", "MAUL", "MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK",
"MEET", "MELD", "MELT", "MEMO", "MEND", "MENU", "MERT", "MESH",
"MESS", "MICE", "MIKE", "MILD", "MILE", "MILK", "MILL", "MILT",
"MIMI", "MIND", "MINE", "MINI", "MINK", "MINT", "MIRE", "MISS",
"MIST", "MITE", "MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD",
"MOLE", "MOLL", "MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON",
"MOOR", "MOOT", "MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH",
"MOVE", "MUCH", "MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK",
"MUSH", "MUST", "MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL",
"NAIR", "NAME", "NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR",
"NEAT", "NECK", "NEED", "NEIL", "NELL", "NEON", "NERO", "NESS",
"NEST", "NEWS", "NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA",
"NINE", "NOAH", "NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON",
"NORM", "NOSE", "NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB",
"OATH", "OBEY", "OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY",
"OLAF", "OLDY", "OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE",
"ONES", "ONLY", "ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS",
"OTTO", "OUCH", "OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY",
"OWNS", "QUAD", "QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT",
"RAGE", "RAID", "RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE",
"RASH", "RATE", "RAVE", "RAYS", "READ", "REAL", "REAM", "REAR",
"RECK", "REED", "REEF", "REEK", "REEL", "REID", "REIN", "RENA",
"REND", "RENT", "REST", "RICE", "RICH", "RICK", "RIDE", "RIFT",
"RILL", "RIME", "RING", "RINK", "RISE", "RISK", "RITE", "ROAD",
"ROAM", "ROAR", "ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME",
"ROOD", "ROOF", "ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS",
"ROSY", "ROTH", "ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY",
"RUDE", "RUDY", "RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE",
"RUSH", "RUSK", "RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE",
"SAID", "SAIL", "SALE", "SALK", "SALT", "SAME", "SAND", "SANE",
"SANG", "SANK", "SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR",
"SCAT", "SCOT", "SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK",
"SEEM", "SEEN", "SEES", "SELF", "SELL", "SEND", "SENT", "SETS",
"SEWN", "SHAG", "SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN",
"SHOD", "SHOE", "SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE",
"SIFT", "SIGH", "SIGN", "SILK", "SILL", "SILO", "SILT", "SINE",
"SING", "SINK", "SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW",
"SKID", "SKIM", "SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY",
"SLED", "SLEW", "SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT",
"SLOW", "SLUG", "SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB",
"SNOW", "SNUB", "SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA",
"SOFT", "SOIL", "SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE",
"SORT", "SOUL", "SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR",
"STAY", "STEM", "STEW", "STIR", "STOW", "STUB", "STUN", "SUCH",
"SUDS", "SUIT", "SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF",
"SWAB", "SWAG", "SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM",
"TACK", "TACT", "TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK",
"TASK", "TATE", "TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM",
"TEEN", "TEET", "TELL", "TEND", "TENT", "TERM", "TERN", "TESS",
"TEST", "THAN", "THAT", "THEE", "THEM", "THEN", "THEY", "THIN",
"THIS", "THUD", "THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER",
"TILE", "TILL", "TILT", "TIME", "TINA", "TINE", "TINT", "TINY",
"TIRE", "TOAD", "TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG",
"TONY", "TOOK", "TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR",
"TOUT", "TOWN", "TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG",
"TRIM", "TRIO", "TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE",
"TUCK", "TUFT", "TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK",
"TWIG", "TWIN", "TWIT", "ULAN", "UNIT", "URGE", "USED", "USER",
"USES", "UTAH", "VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST",
"VEAL", "VEDA", "VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY",
"VETO", "VICE", "VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE",
"WACK", "WADE", "WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK",
"WALL", "WALT", "WAND", "WANE", "WANG", "WANT", "WARD", "WARM",
"WARN", "WART", "WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY",
"WAYS", "WEAK", "WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR",
"WELD", "WELL", "WELT", "WENT", "WERE", "WERT", "WEST", "WHAM",
"WHAT", "WHEE", "WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE",
"WILD", "WILL", "WIND", "WINE", "WING", "WINK", "WINO", "WIRE",
"WISE", "WISH", "WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD",
"WORE", "WORK", "WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE",
"YANG", "YANK", "YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR",
"YELL", "YOGA", "YOKE" ]
if __name__=='__main__':
data = [('EB33F77EE73D4053', 'TIDE ITCH SLOW REIN RULE MOT'),
('CCAC2AED591056BE4F90FD441C534766',
'RASH BUSH MILK LOOK BAD BRIM AVID GAFF BAIT ROT POD LOVE'),
('EFF81F9BFBC65350920CDD7416DE8009',
'TROD MUTE TAIL WARM CHAR KONG HAAG CITY BORE O TEAL AWL')
]
for key_hex, words in data:
print('Trying key', key_hex)
key_bin=binascii.a2b_hex(key_hex)
w2=key_to_english(key_bin)
if w2!=words:
print('key_to_english fails on key', key_hex, ', producing', w2)
k2=english_to_key(words)
if k2!=key_bin:
print('english_to_key fails on key', key_hex, ', producing',
repr(k2))
| 55.204082 | 94 | 0.490342 |
4a1dea3426c2e4f2eb67e4ac5eca24a5a675a1d8 | 4,365 | py | Python | app/main_lbr.py | ansjin/memory_leak_detection | 47497fb495ade3e98f46a9d3ddd44ac229de85b0 | [
"MIT"
] | 2 | 2021-06-24T12:57:43.000Z | 2021-08-13T12:56:13.000Z | app/main_lbr.py | ansjin/memory_leak_detection | 47497fb495ade3e98f46a9d3ddd44ac229de85b0 | [
"MIT"
] | null | null | null | app/main_lbr.py | ansjin/memory_leak_detection | 47497fb495ade3e98f46a9d3ddd44ac229de85b0 | [
"MIT"
] | 1 | 2021-10-05T01:28:59.000Z | 2021-10-05T01:28:59.000Z | import warnings # `do not disturbe` mode
import pandas as pd
import numpy as np
warnings.filterwarnings('ignore')
from influxdb import InfluxDBClient
from influxdb import DataFrameClient
from .operations import Operations
import sys
sys.path.append('../')
from mem_leak_detection import MemLeakDetectionAlgorithm
from mem_leak_detection import MemLeakDetectionAlgorithmPolyFit
from mem_leak_detection import MemLeakDetectionAlgorithmChangePoints
from mem_leak_detection import Precog
class MainLBR:
def __init__(self, req_trend_r2=0.6, min_value=0, critical_time="5000d"):
self.host = "10.195.1.185"
self.port = 8086
self.user = "root"
self.password = "root"
self.dbname = "lrz_ccs_main"
self.anomalous_dbname = "lrz_ccs_main_anomaly"
self.measurement_names_orig = ['host_mem', 'ccs_ccs_1', 'prometheus']
self.op_obj = Operations()
self.client = InfluxDBClient(self.host, self.port, self.user, self.password, self.dbname)
self.client_anomaly = InfluxDBClient(self.host, self.port, self.user, self.password, self.anomalous_dbname)
self.df_client_anomaly = DataFrameClient(self.host, self.port, self.user, self.password, self.anomalous_dbname)
self.train_data_set_percent = 0.25
self.client.create_database(self.anomalous_dbname)
self.protocol = 'line'
self.measurements = ["host_mem_lbr", 'ccs_ccs_1_lbr', 'prometheus_lbr']
self.req_trend_r2 = req_trend_r2
def train(self):
iter = 0
for orig_measurement in self.measurement_names_orig:
if orig_measurement == "host_mem":
mem_used_df = self.op_obj.get_host_memory_usage(self.client)
dataset = pd.DataFrame(mem_used_df['used'].values)
dataset.index = mem_used_df['time'].values
dataset.columns = ['mem_util_percent']
else:
mem_used_df = self.op_obj.get_container_memory_usage(self.client, orig_measurement)
dataset = pd.DataFrame(mem_used_df['usage'].values)
dataset.index = mem_used_df['time'].values
dataset.columns = ['mem_util_percent']
print(dataset)
dataset.columns = ['Value']
p1 = MemLeakDetectionAlgorithm(req_trend_r2=self.req_trend_r2, min_window=12)
p1 = p1.fit(dataset)
dataset_n = p1.predict(dataset)
dataset['anomalous'] = 0
dataset['trend'] = 0
if len(dataset_n) > 0:
dataset['anomalous'][dataset_n.index[0]:] = 1
dataset['trend'][dataset_n.index[0]:] = dataset_n['trend']
else:
None
self.client_anomaly.drop_measurement(self.measurements[iter])
self.df_client_anomaly.write_points(dataset, measurement=self.measurements[iter], protocol=self.protocol)
iter = iter + 1
def predict(self):
iter = 0
for orig_measurement in self.measurement_names_orig:
if orig_measurement == "host_mem":
mem_used_df = self.op_obj.get_host_memory_usage(self.client)
dataset = pd.DataFrame(mem_used_df['used'].values)
dataset.index = mem_used_df['time'].values
dataset.columns = ['mem_util_percent']
else:
mem_used_df = self.op_obj.get_container_memory_usage(self.client, orig_measurement)
dataset = pd.DataFrame(mem_used_df['usage'].values)
dataset.index = mem_used_df['time'].values
dataset.columns = ['mem_util_percent']
dataset.columns = ['Value']
p1 = MemLeakDetectionAlgorithm(req_trend_r2=self.req_trend_r2, min_window=12)
p1 = p1.fit(dataset)
dataset_n = p1.predict(dataset)
dataset['anomalous'] = 0
dataset['trend'] = 0
if len(dataset_n) > 0:
dataset['anomalous'][dataset_n.index[0]:] = 1
dataset['trend'][dataset_n.index[0]:] = dataset_n['trend']
else:
None
self.client_anomaly.drop_measurement(self.measurements[iter])
self.df_client_anomaly.write_points(dataset, measurement=self.measurements[iter], protocol=self.protocol)
iter = iter + 1
| 40.045872 | 119 | 0.635739 |
4a1ded9197e8f527c83c268481b3d091a067fb07 | 9,775 | py | Python | pymatgen/io/abinitio/netcdf.py | rousseab/pymatgen | ecfba4a576a21f31c222be8fd20ce2ddaa77495a | [
"MIT"
] | 1 | 2015-05-18T14:31:20.000Z | 2015-05-18T14:31:20.000Z | pymatgen/io/abinitio/netcdf.py | rousseab/pymatgen | ecfba4a576a21f31c222be8fd20ce2ddaa77495a | [
"MIT"
] | null | null | null | pymatgen/io/abinitio/netcdf.py | rousseab/pymatgen | ecfba4a576a21f31c222be8fd20ce2ddaa77495a | [
"MIT"
] | null | null | null | # coding: utf-8
"""Wrapper for netCDF readers."""
from __future__ import unicode_literals, division, print_function
import os.path
from monty.dev import requires, deprecated
from monty.collections import AttrDict
from monty.functools import lazy_property
from pymatgen.core.units import ArrayWithUnit
from pymatgen.core.structure import Structure
import logging
logger = logging.getLogger(__name__)
__author__ = "Matteo Giantomassi"
__copyright__ = "Copyright 2013, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Matteo Giantomassi"
__email__ = "gmatteo at gmail.com"
__status__ = "Development"
__date__ = "$Feb 21, 2013M$"
__all__ = [
"as_ncreader",
"as_etsfreader",
"NetcdfReader",
"ETSF_Reader",
"structure_from_ncdata",
]
try:
import netCDF4
except ImportError:
netCDF4 = None
def _asreader(file, cls):
closeit = False
if not isinstance(file, cls):
file, closeit = cls(file), True
return file, closeit
def as_ncreader(file):
"""
Convert file into a NetcdfReader instance.
Returns reader, closeit where closeit is set to True
if we have to close the file before leaving the procedure.
"""
return _asreader(file, NetcdfReader)
def as_etsfreader(file):
return _asreader(file, ETSF_Reader)
class NetcdfReaderError(Exception):
"""Base error class for NetcdfReader"""
class NO_DEFAULT(object):
"""Signal that read_value should raise an Error"""
class NetcdfReader(object):
"""
Wraps and extends netCDF4.Dataset. Read only mode. Supports with statements.
Additional documentation available at:
http://netcdf4-python.googlecode.com/svn/trunk/docs/netCDF4-module.html
"""
Error = NetcdfReaderError
@requires(netCDF4 is not None, "netCDF4 must be installed to use this class")
def __init__(self, path):
"""Open the Netcdf file specified by path (read mode)."""
self.path = os.path.abspath(path)
try:
self.rootgrp = netCDF4.Dataset(self.path, mode="r")
except Exception as exc:
raise self.Error("In file %s: %s" % (self.path, str(exc)))
self.ngroups = len(list(self.walk_tree()))
#self.path2group = collections.OrderedDict()
#for children in self.walk_tree():
# for child in children:
# #print(child.group, child.path)
# self.path2group[child.path] = child.group
def __enter__(self):
"""Activated when used in the with statement."""
return self
def __exit__(self, type, value, traceback):
"""Activated at the end of the with statement. It automatically closes the file."""
self.rootgrp.close()
def close(self):
try:
self.rootgrp.close()
except Exception as exc:
logger.warning("Exception %s while trying to close %s" % (exc, self.path))
#@staticmethod
#def pathjoin(*args):
# return "/".join(args)
def walk_tree(self, top=None):
"""
Navigate all the groups in the file starting from top.
If top is None, the root group is used.
"""
if top is None:
top = self.rootgrp
values = top.groups.values()
yield values
for value in top.groups.values():
for children in self.walk_tree(value):
yield children
def print_tree(self):
for children in self.walk_tree():
for child in children:
print(child)
def read_dimvalue(self, dimname, path="/"):
"""Returns the value of a dimension."""
dim = self._read_dimensions(dimname, path=path)[0]
return len(dim)
def read_varnames(self, path="/"):
"""List of variable names stored in the group specified by path."""
if path == "/":
return self.rootgrp.variables.keys()
else:
group = self.path2group[path]
return group.variables.keys()
def read_value(self, varname, path="/", cmode=None, default=NO_DEFAULT):
"""
Returns the values of variable with name varname in the group specified by path.
Args:
varname: Name of the variable
path: path to the group.
cmode: if cmode=="c", a complex ndarrays is constructed and returned
(netcdf does not provide native support from complex datatype).
default: read_value returns default if varname is not present.
Returns:
numpy array if varname represents an array, scalar otherwise.
"""
try:
var = self.read_variable(varname, path=path)
except self.Error:
if default is NO_DEFAULT: raise
return default
if cmode is None:
# scalar or array
# getValue is not portable!
try:
return var.getValue()[0] if not var.shape else var[:]
except IndexError:
return var.getValue() if not var.shape else var[:]
else:
assert var.shape[-1] == 2
if cmode == "c":
return var[...,0] + 1j*var[...,1]
else:
raise ValueError("Wrong value for cmode %s" % cmode)
def read_variable(self, varname, path="/"):
"""Returns the variable with name varname in the group specified by path."""
return self._read_variables(varname, path=path)[0]
def _read_dimensions(self, *dimnames, **kwargs):
path = kwargs.get("path", "/")
try:
if path == "/":
return [self.rootgrp.dimensions[dname] for dname in dimnames]
else:
group = self.path2group[path]
return [group.dimensions[dname] for dname in dimnames]
except KeyError:
raise self.Error("In file %s:\ndimnames %s, kwargs %s" % (self.path, dimnames, kwargs))
def _read_variables(self, *varnames, **kwargs):
path = kwargs.get("path", "/")
try:
if path == "/":
return [self.rootgrp.variables[vname] for vname in varnames]
else:
group = self.path2group[path]
return [group.variables[vname] for vname in varnames]
except KeyError:
raise self.Error("In file %s:\nvarnames %s, kwargs %s" % (self.path, varnames, kwargs))
def read_keys(self, keys, dict_cls=AttrDict, path="/"):
"""
Read a list of variables/dimensions from file. If a key is not present the corresponding
entry in the output dictionary is set to None.
"""
od = dict_cls()
for k in keys:
try:
# Try to read a variable.
od[k] = self.read_value(k, path=path)
except self.Error:
try:
# Try to read a dimension.
od[k] = self.read_dimvalue(k, path=path)
except self.Error:
od[k] = None
return od
class ETSF_Reader(NetcdfReader):
"""
This object reads data from a file written according to the ETSF-IO specifications.
We assume that the netcdf file contains at least the crystallographic section.
"""
@lazy_property
def chemical_symbols(self):
"""Chemical symbols char [number of atom species][symbol length]."""
charr = self.read_value("chemical_symbols")
symbols = []
for v in charr:
symbols.append("".join(c for c in v))
#symbols = ["".join(str(c)) for symb in symbols for c in symb]
#symbols = [s.decode("ascii") for s in symbols]
#chemical_symbols = [str("".join(s)) for s in symbols]
#print(symbols)
return symbols
def typeidx_from_symbol(self, symbol):
"""Returns the type index from the chemical symbol. Note python convention."""
return self.chemical_symbols.index(symbol)
def read_structure(self, cls=Structure):
"""Returns the crystalline structure."""
if self.ngroups != 1:
raise NotImplementedError("In file %s: ngroups != 1" % self.path)
return structure_from_ncdata(self, cls=cls)
def structure_from_ncdata(ncdata, site_properties=None, cls=Structure):
"""
Reads and returns a pymatgen structure from a NetCDF file
containing crystallographic data in the ETSF-IO format.
Args:
ncdata: filename or NetcdfReader instance.
site_properties: Dictionary with site properties.
cls: The Structure class to instanciate.
"""
ncdata, closeit = as_ncreader(ncdata)
# TODO check whether atomic units are used
lattice = ArrayWithUnit(ncdata.read_value("primitive_vectors"), "bohr").to("ang")
red_coords = ncdata.read_value("reduced_atom_positions")
natom = len(red_coords)
znucl_type = ncdata.read_value("atomic_numbers")
# type_atom[0:natom] --> index Between 1 and number of atom species
type_atom = ncdata.read_value("atom_species")
# Fortran to C index and float --> int conversion.
species = natom * [None]
for atom in range(natom):
type_idx = type_atom[atom] - 1
species[atom] = int(znucl_type[type_idx])
d = {}
if site_properties is not None:
for prop in site_properties:
d[property] = ncdata.read_value(prop)
structure = cls(lattice, species, red_coords, site_properties=d)
# Quick and dirty hack.
# I need an abipy structure since I need to_abivars and other methods.
try:
from abipy.core.structure import Structure as AbipyStructure
structure.__class__ = AbipyStructure
except ImportError:
pass
if closeit:
ncdata.close()
return structure
| 31.840391 | 99 | 0.617187 |
4a1dee40a7b2b65b2078c707f4bdcc24e6397ed1 | 3,669 | py | Python | python/oneflow/test/modules/test_rand.py | grybd/oneflow | 82237ad096a10527591660c09b61444c42917e69 | [
"Apache-2.0"
] | 3,285 | 2020-07-31T05:51:22.000Z | 2022-03-31T15:20:16.000Z | python/oneflow/test/modules/test_rand.py | grybd/oneflow | 82237ad096a10527591660c09b61444c42917e69 | [
"Apache-2.0"
] | 2,417 | 2020-07-31T06:28:58.000Z | 2022-03-31T23:04:14.000Z | python/oneflow/test/modules/test_rand.py | grybd/oneflow | 82237ad096a10527591660c09b61444c42917e69 | [
"Apache-2.0"
] | 520 | 2020-07-31T05:52:42.000Z | 2022-03-29T02:38:11.000Z | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
import numpy as np
import oneflow as flow
import oneflow.unittest
from test_util import GenArgList
from oneflow.test_utils.automated_test_util import *
def _test_rand(test_case, device, shape):
y1 = flow.rand(*shape, device=flow.device(device))
y2 = flow.rand(*shape, device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
def _test_0d_rand(test_case, device, shape):
y1 = flow.rand(*shape, device=flow.device(device))
y2 = flow.rand(*shape, device=flow.device(device))
test_case.assertTrue(
np.allclose(y1.numpy(), y2.numpy(), atol=1e-4, rtol=1e-4)
) # 0d is [] and []
test_case.assertTrue(shape == y1.shape)
def _test_different_dtype(test_case, device, shape):
y1 = flow.rand(*shape, dtype=flow.float32, device=flow.device(device))
y2 = flow.rand(*shape, dtype=flow.float64, device=flow.device(device))
test_case.assertTrue(not np.array_equal(y1.numpy(), y2.numpy()))
test_case.assertTrue(shape == y1.shape)
with test_case.assertRaises(
oneflow._oneflow_internal.exception.UnimplementedException
):
flow.rand(*shape, dtype=flow.int32, device=flow.device(device))
def _test_backward(test_case, device, shape):
x = flow.rand(*shape, device=flow.device(device), requires_grad=True)
y = x.sum()
y.backward()
test_case.assertTrue(np.array_equal(np.ones(shape), x.grad.numpy()))
def _test_with_generator(test_case, device, shape):
gen = flow.Generator()
gen.manual_seed(0)
y1 = flow.rand(
*shape, dtype=flow.float32, device=flow.device(device), generator=gen
)
gen.manual_seed(0)
y2 = flow.rand(
*shape, dtype=flow.float32, device=flow.device(device), generator=gen
)
test_case.assertTrue(np.allclose(y1.numpy(), y2.numpy(), atol=1e-4, rtol=1e-4))
@flow.unittest.skip_unless_1n1d()
class TestConstantModule(flow.unittest.TestCase):
def test_consistent_naive(test_case):
placement = flow.placement("cpu", {0: [0]})
sbp = (flow.sbp.broadcast,)
x = flow.rand(16, 16, placement=placement, sbp=sbp)
test_case.assertEqual(x.sbp, sbp)
test_case.assertEqual(x.placement, placement)
def test_0d_randint(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [_test_0d_rand]
arg_dict["device"] = ["cpu", "cuda"]
arg_dict["shape"] = [(2, 0, 4), (2, 0, 2)]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
def test_cases(test_case):
arg_dict = OrderedDict()
arg_dict["test_fun"] = [
_test_rand,
_test_different_dtype,
_test_backward,
_test_with_generator,
]
arg_dict["device"] = ["cpu", "cuda"]
arg_dict["shape"] = [(2, 3), (2, 3, 4), (2, 3, 4, 5), (2, 4)]
for arg in GenArgList(arg_dict):
arg[0](test_case, *arg[1:])
if __name__ == "__main__":
unittest.main()
| 33.054054 | 83 | 0.674298 |
4a1def160ced25838644d0de04bd98307bcb51c3 | 938 | py | Python | parlai/tasks/fvqa/build.py | ricsinaruto/ParlAI | 733b627ae456d6b11a2fc4624088a781bc6c1d03 | [
"MIT"
] | 258 | 2020-04-10T07:01:06.000Z | 2022-03-26T11:49:30.000Z | parlai/tasks/fvqa/build.py | ricsinaruto/ParlAI | 733b627ae456d6b11a2fc4624088a781bc6c1d03 | [
"MIT"
] | 33 | 2020-04-10T04:28:51.000Z | 2022-03-31T02:52:02.000Z | parlai/tasks/fvqa/build.py | ricsinaruto/ParlAI | 733b627ae456d6b11a2fc4624088a781bc6c1d03 | [
"MIT"
] | 43 | 2020-04-14T10:43:33.000Z | 2022-03-13T02:27:54.000Z | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import parlai.core.build_data as build_data
import os
def build(opt):
dpath = os.path.join(opt['datapath'], 'FVQA')
version = None
if not build_data.built(dpath, version_string=version):
print('[building data: ' + dpath + ']')
# An older version exists, so remove these outdated files.
if build_data.built(dpath):
build_data.remove_dir(dpath)
build_data.make_dir(dpath)
# Download the data.
build_data.download('https://dl.dropboxusercontent.com/s/iyz6l7jhbt6jb7q/new_dataset_release.zip', dpath, 'FVQA.zip') # noqa: E501
build_data.untar(dpath, 'FVQA.zip')
# Mark the data as built.
build_data.mark_done(dpath, version_string=version)
| 33.5 | 139 | 0.683369 |
4a1def353a73f0b22c84c5e6453921c23433896c | 1,114 | py | Python | share/rpcuser/rpcuser.py | e5e5/YoloYachtCoin | 5d439df14fe66ecef326ed23c9b8c0108a12ce8c | [
"MIT"
] | 5 | 2021-04-06T19:04:37.000Z | 2021-04-19T23:31:23.000Z | share/rpcuser/rpcuser.py | e5e5/YoloYachtCoin | 5d439df14fe66ecef326ed23c9b8c0108a12ce8c | [
"MIT"
] | null | null | null | share/rpcuser/rpcuser.py | e5e5/YoloYachtCoin | 5d439df14fe66ecef326ed23c9b8c0108a12ce8c | [
"MIT"
] | 3 | 2021-05-18T23:21:45.000Z | 2021-05-19T21:51:24.000Z | #!/usr/bin/env python2
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import hashlib
import sys
import os
from random import SystemRandom
import base64
import hmac
if len(sys.argv) < 2:
sys.stderr.write('Please include username as an argument.\n')
sys.exit(0)
username = sys.argv[1]
#This uses os.urandom() underneath
cryptogen = SystemRandom()
#Create 16 byte hex salt
salt_sequence = [cryptogen.randrange(256) for i in range(16)]
hexseq = list(map(hex, salt_sequence))
salt = "".join([x[2:] for x in hexseq])
#Create 32 byte b64 password
password = base64.urlsafe_b64encode(os.urandom(32))
digestmod = hashlib.sha256
if sys.version_info.major >= 3:
password = password.decode('utf-8')
digestmod = 'SHA256'
m = hmac.new(bytearray(salt, 'utf-8'), bytearray(password, 'utf-8'), digestmod)
result = m.hexdigest()
print("String to be appended to yyccoin.conf:")
print("rpcauth="+username+":"+salt+"$"+result)
print("Your password:\n"+password)
| 26.52381 | 79 | 0.728007 |
4a1def4c2ccb09c763f19d0e54e3dabeb0c7204d | 10,524 | py | Python | journal/models.py | amiyatulu/journal_biohelikon | 47e99410110a42abeec20e2d3b5c383ae06cddbf | [
"MIT"
] | null | null | null | journal/models.py | amiyatulu/journal_biohelikon | 47e99410110a42abeec20e2d3b5c383ae06cddbf | [
"MIT"
] | null | null | null | journal/models.py | amiyatulu/journal_biohelikon | 47e99410110a42abeec20e2d3b5c383ae06cddbf | [
"MIT"
] | null | null | null | from PIL import Image
import datetime
from django import forms
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models.signals import post_save
from django.dispatch.dispatcher import receiver
from django.forms.models import ModelForm, ModelChoiceField
from django.template.defaultfilters import slugify
from django.utils.timezone import utc
from haystack.forms import SearchForm
from os import path
import os
from central.settings import MEDIA_ROOT
from tracking.models import Journals, Manuscript
def get_upload_path_user(instance, filename):
return os.path.join(
"profileimages","profileuser_%d" % instance.user.id, filename)
MEMBER_TYPE = (
('eb','Eb Member'),
('rv','Reviewer'),
('sub','Subscriber')
)
class ProfileDetails(models.Model):
user = models.OneToOneField(User,primary_key=True)
address = models.TextField()
research_interest = models.TextField()
education = models.TextField(null=True,blank=True)
experience = models.TextField(null=True,blank=True)
publications = models.TextField(null=True,blank=True)
membertype = models.CharField(max_length=100, choices = MEMBER_TYPE)
journal = models.ForeignKey(Journals, null=True,blank=True)
arrange = models.CharField(max_length=1000, null=True, blank = True)
photo = models.ImageField(upload_to=get_upload_path_user,max_length=10000,null=True,blank=True)
create_time = models.DateTimeField()
update_time = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if self.create_time == None:
self.create_time = datetime.datetime.utcnow().replace(tzinfo=utc)
self.update_time = datetime.datetime.utcnow().replace(tzinfo=utc)
if not self.membertype:
self.membertype ='sub'
super(ProfileDetails, self).save(*args, **kwargs)
if self.photo.name:
filename = MEDIA_ROOT + self.photo.name
size = (300,300)
image = Image.open(filename)
image.thumbnail(size,Image.ANTIALIAS)
image.save(filename)
def __unicode__(self):
return self.user.username + " " + self.user.first_name + " " + self.user.last_name + " " + self.user.email
class PublishedManuscript(models.Model):
title = models.TextField()
abstract = models.TextField()
authors = models.TextField(null = True,blank=True)
metatag = models.TextField(null = True, blank=True)
article = models.TextField()
volume = models.CharField(max_length=1000)
issue = models.CharField(max_length=20)
year = models.CharField(max_length=4)
e_locator = models.CharField(max_length=1000)
link = models.CharField(max_length=1000)
supp_link = models.TextField(null=True,blank=True)
size = models.CharField(max_length=1000)
journal = models.ForeignKey(Journals)
manuscript = models.OneToOneField(Manuscript, null=True, blank=True)
REVISION_TYPES = (
('Major','Major'),
('Minor','Minor'),
)
revisiontype = models.CharField(max_length=10, choices= REVISION_TYPES)
slug = models.SlugField()
slug2 = models.SlugField()
create_time = models.DateTimeField()
update_time = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if self.create_time == None:
self.create_time = datetime.datetime.utcnow().replace(tzinfo=utc)
self.revisiontype = 'Major'
self.update_time = datetime.datetime.utcnow().replace(tzinfo=utc)
self.slug = slugify(self.title)
self.slug2 = slugify("biohelikon-"+ self.e_locator)
super(PublishedManuscript, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('journal.views.articlefulldetails', kwargs={'aid': str(self.id),'slug2': str(self.slug2),'slug':str(self.slug)})
def __unicode__(self):
return self.title
class PublishedManuscriptRevisions(models.Model):
publishedmanuscript = models.ForeignKey(PublishedManuscript)
title = models.TextField()
abstract = models.TextField()
authors = models.TextField(null = True, blank=True)
metatag = models.TextField(null = True, blank=True)
article = models.TextField()
volume = models.CharField(max_length=1000)
issue = models.CharField(max_length=20)
year = models.CharField(max_length=4)
e_locator = models.CharField(max_length=1000)
link = models.CharField(max_length=1000)
supp_link = models.TextField(null=True,blank=True)
size = models.CharField(max_length=1000)
version = models.CharField(max_length=1000)
create_time = models.DateTimeField()
update_time = models.DateTimeField()
def save(self, *args, **kwargs):
''' On save, update timestamps '''
if self.create_time == None:
self.create_time = datetime.datetime.utcnow().replace(tzinfo=utc)
self.update_time = datetime.datetime.utcnow().replace(tzinfo=utc)
super(PublishedManuscriptRevisions, self).save(*args, **kwargs)
def __unicode__(self):
return self.title
class JournalLink(models.Model):
journals = models.OneToOneField(Journals)
home = models.CharField(max_length=250, unique = True)
def __unicode__(self):
return self.journals.name
def get_upload_path_homephoto(instance, filename):
return os.path.join(
"homephoto","homephotoid_%d" % instance.journals.id, filename)
class JournalHome(models.Model):
journals = models.OneToOneField(Journals)
h1 = models.CharField(max_length=500)
block1 = models.TextField()
block2 = models.TextField(null = True, blank =True ,verbose_name= "metatag")
block3 = models.TextField(null=True, blank=True)
block4 = models.TextField(null=True, blank=True)
photo = models.ImageField(upload_to=get_upload_path_homephoto,max_length=10000)
def __unicode__(self):
return self.journals.name
class Instructions(models.Model):
journals = models.OneToOneField(Journals)
block1 = models.TextField()
block2 = models.TextField(null = True, blank =True)
block3 = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.journals.name
class AboutJournal(models.Model):
journals = models.OneToOneField(Journals)
block1 = models.TextField()
block2 = models.TextField(null = True, blank =True)
block3 = models.TextField(null=True, blank=True)
def __unicode__(self):
return self.journals.name
class ProfileDetailsForm(ModelForm):
class Meta:
model = ProfileDetails
exclude = ['create_time','update_time']
class ProfileDetailsUserForm(ModelForm):
class Meta:
model = ProfileDetails
exclude = ['create_time','update_time','user','membertype','journal','arrange']
class FormatString(str):
def format(self, *args, **kwargs):
arguments = list(args)
arguments[1] = path.basename(arguments[1])
return super(FormatString, self).format(*arguments, **kwargs)
class SmallClearableFileInput(forms.ClearableFileInput):
url_markup_template = FormatString('<a href="{0}">{1}</a>')
class ProfileDetailsUserUpdateForm(ModelForm):
class Meta:
model = ProfileDetails
exclude = ['create_time','update_time','user','membertype','journal','arrange']
widgets = {'photo': SmallClearableFileInput}
class PublishedManuscriptForm(ModelForm):
class Meta:
model = PublishedManuscript
exclude=['create_time','update_time','revisiontype','slug','slug2']
class PublishedManuscriptUpdateForm(ModelForm):
class Meta:
model = PublishedManuscript
exclude=['create_time','update_time','slug','slug2']
@receiver(post_save, sender=PublishedManuscript)
def copy_revisions(sender,instance, **kwargs):
if instance.revisiontype == "Major":
count = PublishedManuscriptRevisions.objects.filter(publishedmanuscript_id = instance.id).count()
version = count + 1
p = PublishedManuscriptRevisions(publishedmanuscript_id = instance.id, title = instance.title,authors= instance.authors, metatag = instance.metatag, abstract = instance.abstract,
article = instance.article, volume = instance.volume, issue = instance.issue ,year = instance.year, e_locator = instance.e_locator,
link = instance.link,supp_link = instance.supp_link, size = instance.size, version = version
)
p.save()
else:
try:
rev = PublishedManuscriptRevisions.objects.filter(publishedmanuscript_id = instance.id).order_by('-id')[0]
rev.title = instance.title
rev.abstract = instance.abstract
rev.authors = instance.authors
rev.metatag = instance.metatag
rev.article = instance.article
rev.volume = instance.volume
rev.issue = instance.issue
rev.year = instance.year
rev.e_locator = instance.e_locator
rev.link = instance.link
rev.supp_link = instance.supp_link
rev.size = instance.size
rev.save()
except:
pass
class DateRangeSearchForm(SearchForm):
models = [
PublishedManuscript
]
journal = ModelChoiceField(queryset=Journals.objects.all(),required=False,empty_label= "All", label="Subjects")
def get_models(self):
return self.models
def search(self):
# First, store the SearchQuerySet received from other processing.
sqs = super(DateRangeSearchForm, self).search().models(*self.get_models())
if not self.is_valid():
return self.no_query_found()
# Check to see if a start_date was chosen.
if self.cleaned_data['journal']:
sqs = sqs.filter(journal=self.cleaned_data['journal'])
# Check to see if an end_date was chosen.
return sqs
| 39.268657 | 187 | 0.654029 |
4a1def8424f6afdee5510b5b7717b7f3c635764b | 990 | py | Python | api/migrations/20210429_01_zBzXQ-add-endpoint-name-to-models-table-and-remove-upload-timestamp-column-which-is-now-redundant.py | zpapakipos/dynabench-1 | 95884b4e29c57263dc1a85909be979c084d5fac3 | [
"MIT"
] | 15 | 2021-09-24T00:46:04.000Z | 2022-03-16T13:24:56.000Z | api/migrations/20210429_01_zBzXQ-add-endpoint-name-to-models-table-and-remove-upload-timestamp-column-which-is-now-redundant.py | zpapakipos/dynabench-1 | 95884b4e29c57263dc1a85909be979c084d5fac3 | [
"MIT"
] | 98 | 2021-09-22T12:33:21.000Z | 2022-03-21T22:23:52.000Z | api/migrations/20210429_01_zBzXQ-add-endpoint-name-to-models-table-and-remove-upload-timestamp-column-which-is-now-redundant.py | zpapakipos/dynabench-1 | 95884b4e29c57263dc1a85909be979c084d5fac3 | [
"MIT"
] | 12 | 2021-09-25T05:08:18.000Z | 2022-02-28T21:02:20.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Add endpoint name to models table and
remove upload_timestamp column which is now redundant
"""
from yoyo import step
__depends__ = {"20210425_02_AtJot-remove-overall-perf"}
steps = [
step(
"""
ALTER TABLE models ADD COLUMN endpoint_name TEXT
""",
"""
ALTER TABLE models DROP endpoint_name
""",
),
step(
"""
UPDATE models SET endpoint_name=CONCAT("ts", upload_timestamp, "-", name)
where deployment_status="deployed"
""",
"""
UPDATE models SET endpoint_name=NULL where deployment_status="deployed"
""",
),
step(
"""
ALTER TABLE models DROP upload_timestamp
""",
"""
ALTER TABLE models ADD COLUMN upload_timestamp BIGINT
""",
),
]
| 23.571429 | 81 | 0.60404 |
4a1df038cec2afe914a7f0c9c70483ea17893d4c | 3,101 | py | Python | huaweicloud-sdk-bcs/huaweicloudsdkbcs/v2/model/show_blockchain_nodes_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 64 | 2020-06-12T07:05:07.000Z | 2022-03-30T03:32:50.000Z | huaweicloud-sdk-bcs/huaweicloudsdkbcs/v2/model/show_blockchain_nodes_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 11 | 2020-07-06T07:56:54.000Z | 2022-01-11T11:14:40.000Z | huaweicloud-sdk-bcs/huaweicloudsdkbcs/v2/model/show_blockchain_nodes_request.py | huaweicloud/huaweicloud-sdk-python-v3 | 7a6270390fcbf192b3882bf763e7016e6026ef78 | [
"Apache-2.0"
] | 24 | 2020-06-08T11:42:13.000Z | 2022-03-04T06:44:08.000Z | # coding: utf-8
import re
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ShowBlockchainNodesRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'blockchain_id': 'str'
}
attribute_map = {
'blockchain_id': 'blockchain_id'
}
def __init__(self, blockchain_id=None):
"""ShowBlockchainNodesRequest - a model defined in huaweicloud sdk"""
self._blockchain_id = None
self.discriminator = None
self.blockchain_id = blockchain_id
@property
def blockchain_id(self):
"""Gets the blockchain_id of this ShowBlockchainNodesRequest.
blockchainID
:return: The blockchain_id of this ShowBlockchainNodesRequest.
:rtype: str
"""
return self._blockchain_id
@blockchain_id.setter
def blockchain_id(self, blockchain_id):
"""Sets the blockchain_id of this ShowBlockchainNodesRequest.
blockchainID
:param blockchain_id: The blockchain_id of this ShowBlockchainNodesRequest.
:type: str
"""
self._blockchain_id = blockchain_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowBlockchainNodesRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.201754 | 83 | 0.567559 |
4a1df063347a5955c18d4be0e1938f549504ae05 | 5,891 | py | Python | custom_components/ui_lovelace_minimalist/configure.py | basbruss/UI | d5b1dd4799b9cb3a8b43c4e8b8b9df8a14f303a2 | [
"Apache-2.0"
] | null | null | null | custom_components/ui_lovelace_minimalist/configure.py | basbruss/UI | d5b1dd4799b9cb3a8b43c4e8b8b9df8a14f303a2 | [
"Apache-2.0"
] | null | null | null | custom_components/ui_lovelace_minimalist/configure.py | basbruss/UI | d5b1dd4799b9cb3a8b43c4e8b8b9df8a14f303a2 | [
"Apache-2.0"
] | null | null | null | """Process Yaml and Config for UI Lovelace Minimalist Integration."""
from __future__ import annotations
import logging
import os
import shutil
from homeassistant.core import HomeAssistant
from .base import UlmBase
from .const import DOMAIN
_LOGGER: logging.Logger = logging.getLogger(__name__)
LANGUAGES = {
"Czech": "cs",
"Danish": "da",
"German": "de",
"English (GB)": "en",
"Spanish": "es",
"French": "fr",
"Italian": "it",
"Dutch": "nl",
"Norwegian": "no",
"Polish": "pl",
"Portuguese": "pt",
"Portuguese (Brazil)": "pt-BR",
"Slovak": "sk",
"Swedish": "sv",
"Turkish": "tr",
"Russian": "ru",
}
def configure_cards(hass: HomeAssistant, ulm: UlmBase):
"""Configure initial dashboard & cards directory."""
_LOGGER.info("Confguring Cards")
# Cleanup
shutil.rmtree(hass.config.path(f"{DOMAIN}/configs"), ignore_errors=True)
shutil.rmtree(hass.config.path(f"{DOMAIN}/addons"), ignore_errors=True)
# Create config dir
os.makedirs(hass.config.path(f"{DOMAIN}/dashboard"), exist_ok=True)
os.makedirs(hass.config.path(f"{DOMAIN}/custom_cards"), exist_ok=True)
os.makedirs(hass.config.path(f"{DOMAIN}/custom_actions"), exist_ok=True)
if os.path.exists(hass.config.path(f"{DOMAIN}/dashboard")):
# Create combined cards dir
combined_cards_dir = hass.config.path(
f"custom_components/{DOMAIN}/__ui_minimalist__/ulm_templates"
)
os.makedirs(combined_cards_dir, exist_ok=True)
# Translations
language = LANGUAGES[ulm.configuration.language]
# Copy default language file over to config dir
shutil.copy2(
hass.config.path(
f"custom_components/{DOMAIN}/lovelace/translations/default.yaml"
),
hass.config.path(f"{combined_cards_dir}/default.yaml"),
)
# Copy example dashboard file over to user config dir if not exists
if ulm.configuration.sidepanel_enabled:
if not os.path.exists(
hass.config.path(f"{DOMAIN}/dashboard/ui-lovelace.yaml")
):
shutil.copy2(
hass.config.path(
f"custom_components/{DOMAIN}/lovelace/ui-lovelace.yaml"
),
hass.config.path(f"{DOMAIN}/dashboard/ui-lovelace.yaml"),
)
# Copy adaptive dashboard if not exists and is selected as option
if ulm.configuration.adaptive_ui_enabled:
if not os.path.exists(
hass.config.path(f"{DOMAIN}/dashboard/adaptive-dash")
):
shutil.copytree(
hass.config.path(
f"custom_components/{DOMAIN}/lovelace/adaptive-dash"
),
hass.config.path(f"{DOMAIN}/dashboard/adaptive-dash"),
)
# Copy example custom actions file over to user config dir if not exists
if not os.path.exists(
hass.config.path(f"{DOMAIN}/custom_actions/custom_actions.yaml")
):
shutil.copy2(
hass.config.path(
f"custom_components/{DOMAIN}/lovelace/custom_actions.yaml"
),
hass.config.path(f"{DOMAIN}/custom_actions/custom_actions.yaml"),
)
# Copy chosen language file over to config dir
shutil.copy2(
hass.config.path(
f"custom_components/{DOMAIN}/lovelace/translations/{language}.yaml"
),
hass.config.path(f"{combined_cards_dir}/language.yaml"),
)
# Copy over cards from integration
shutil.copytree(
hass.config.path(f"custom_components/{DOMAIN}/lovelace/ulm_templates"),
hass.config.path(f"{combined_cards_dir}"),
dirs_exist_ok=True,
)
# Copy over manually installed custom_cards from user
shutil.copytree(
hass.config.path(f"{DOMAIN}/custom_cards"),
hass.config.path(f"{combined_cards_dir}/custom_cards"),
dirs_exist_ok=True,
)
# Copy over manually installed custom_actions from user
shutil.copytree(
hass.config.path(f"{DOMAIN}/custom_actions"),
hass.config.path(f"{combined_cards_dir}/custom_actions"),
dirs_exist_ok=True,
)
# Copy over themes to defined themes folder
shutil.copytree(
hass.config.path(f"custom_components/{DOMAIN}/lovelace/themefiles"),
hass.config.path(f"{ulm.configuration.theme_path}/"),
dirs_exist_ok=True,
)
hass.bus.async_fire("ui_lovelace_minimalist_reload")
async def handle_reload(call):
_LOGGER.debug("Reload UI Lovelace Minimalist Configuration")
reload_configuration(hass)
# Register servcie ui_lovelace_minimalist.reload
hass.services.async_register(DOMAIN, "reload", handle_reload)
def reload_configuration(hass):
"""Reload Configuration."""
combined_cards_dir = hass.config.path(
f"custom_components/{DOMAIN}/__ui_minimalist__/ulm_templates"
)
if os.path.exists(hass.config.path(f"{DOMAIN}/custom_cards")):
# Copy over manually installed custom_cards from user
shutil.copytree(
hass.config.path(f"{DOMAIN}/custom_cards"),
hass.config.path(f"{combined_cards_dir}/custom_cards"),
dirs_exist_ok=True,
)
if os.path.exists(hass.config.path(f"{DOMAIN}/custom_actions")):
# Copy over manually installed custom_actions from user
shutil.copytree(
hass.config.path(f"{DOMAIN}/custom_actions"),
hass.config.path(f"{combined_cards_dir}/custom_actions"),
dirs_exist_ok=True,
)
hass.bus.async_fire("ui_lovelace_minimalist_reload")
| 36.364198 | 83 | 0.615345 |
4a1df09449ca04f73e50d22600c28160aae1d431 | 2,523 | py | Python | src/opnsense/scripts/filter/kill_table.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | 2 | 2019-03-15T03:35:54.000Z | 2019-03-15T07:50:36.000Z | src/opnsense/scripts/filter/kill_table.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | null | null | null | src/opnsense/scripts/filter/kill_table.py | ass-a2s/opnsense-core | a0634d180325f6afe3be7f514b4470e47ff5eb75 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/local/bin/python3.6
"""
Copyright (c) 2016-2019 Ad Schellevis <[email protected]>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY,
OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------------
drop an existing pf alias table
"""
import tempfile
import subprocess
import os
import sys
import ujson
if __name__ == '__main__':
with tempfile.NamedTemporaryFile() as output_stream:
subprocess.call(['/sbin/pfctl', '-sT'], stdout=output_stream, stderr=open(os.devnull, 'wb'))
output_stream.seek(0)
tables = list()
for line in output_stream.read().decode().strip().split('\n'):
tables.append(line.strip())
# only try to remove alias if it exists
if len(sys.argv) > 1 and sys.argv[1] in tables:
# cleanup related alias file
for suffix in ['txt', 'md5.txt', 'self.txt']:
if os.path.isfile('/var/db/aliastables/%s.%s' % (sys.argv[1], suffix)):
os.remove('/var/db/aliastables/%s.%s' % (sys.argv[1], suffix))
subprocess.call(['/sbin/pfctl', '-t', sys.argv[1], '-T', 'kill'], stdout=open(os.devnull, 'wb'), stderr=open(os.devnull, 'wb'))
# all good, exit 0
sys.exit(0)
# not found (or other issue)
sys.exit(-1)
| 45.053571 | 139 | 0.658343 |
4a1df0c64775906cdfcbc4f108dd32ca387dfc47 | 167,693 | py | Python | python/ccxt/async_support/okex3.py | ysdede/ccxt | c64cfdb0c364f4b965ef588bf67d1bdedad410a5 | [
"MIT"
] | 1 | 2021-01-12T07:03:55.000Z | 2021-01-12T07:03:55.000Z | python/ccxt/async_support/okex3.py | ysdede/ccxt | c64cfdb0c364f4b965ef588bf67d1bdedad410a5 | [
"MIT"
] | 3 | 2022-01-27T15:38:05.000Z | 2022-03-31T23:04:15.000Z | python/ccxt/async_support/okex3.py | RonSherfey/ccxt | c64cfdb0c364f4b965ef588bf67d1bdedad410a5 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import CancelPending
from ccxt.base.errors import NotSupported
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.errors import InvalidNonce
from ccxt.base.errors import RequestTimeout
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class okex3(Exchange):
def describe(self):
return self.deep_extend(super(okex3, self).describe(), {
'id': 'okex3',
'name': 'OKEX',
'countries': ['CN'],
'version': 'v3',
'rateLimit': 1000, # up to 3000 requests per 5 minutes ≈ 600 requests per minute ≈ 10 requests per second ≈ 100 ms
'pro': True,
'has': {
'cancelOrder': True,
'CORS': None,
'createOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchCurrencies': True, # see below
'fetchDepositAddress': True,
'fetchDeposits': True,
'fetchLedger': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': None,
'fetchOrderTrades': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchTransactions': None,
'fetchWithdrawals': True,
'future': True,
'withdraw': True,
},
'timeframes': {
'1m': '60',
'3m': '180',
'5m': '300',
'15m': '900',
'30m': '1800',
'1h': '3600',
'2h': '7200',
'4h': '14400',
'6h': '21600',
'12h': '43200',
'1d': '86400',
'1w': '604800',
'1M': '2678400',
'3M': '8035200',
'6M': '16070400',
'1y': '31536000',
},
'hostname': 'okex.com',
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/32552768-0d6dd3c6-c4a6-11e7-90f8-c043b64756a7.jpg',
'api': {
'rest': 'https://www.{hostname}',
},
'www': 'https://www.okex.com',
'doc': 'https://www.okex.com/docs/en/',
'fees': 'https://www.okex.com/pages/products/fees.html',
'referral': 'https://www.okex.com/join/1888677',
'test': {
'rest': 'https://testnet.okex.com',
},
},
'api': {
'general': {
'get': [
'time',
],
},
'account': {
'get': [
'wallet',
'sub-account',
'asset-valuation',
'wallet/{currency}',
'withdrawal/history',
'withdrawal/history/{currency}',
'ledger',
'deposit/address',
'deposit/history',
'deposit/history/{currency}',
'currencies',
'withdrawal/fee',
],
'post': [
'transfer',
'withdrawal',
],
},
'spot': {
'get': [
'accounts',
'accounts/{currency}',
'accounts/{currency}/ledger',
'orders',
'amend_order/{instrument_id}',
'orders_pending',
'orders/{order_id}',
'orders/{client_oid}',
'trade_fee',
'fills',
'algo',
# public
'instruments',
'instruments/{instrument_id}/book',
'instruments/ticker',
'instruments/{instrument_id}/ticker',
'instruments/{instrument_id}/trades',
'instruments/{instrument_id}/candles',
'instruments/{instrument_id}/history/candles',
],
'post': [
'order_algo',
'orders',
'batch_orders',
'cancel_orders/{order_id}',
'cancel_orders/{client_oid}',
'cancel_batch_algos',
'cancel_batch_orders',
],
},
'margin': {
'get': [
'accounts',
'accounts/{instrument_id}',
'accounts/{instrument_id}/ledger',
'accounts/availability',
'accounts/{instrument_id}/availability',
'accounts/borrowed',
'accounts/{instrument_id}/borrowed',
'orders',
'accounts/{instrument_id}/leverage',
'orders/{order_id}',
'orders/{client_oid}',
'orders_pending',
'fills',
# public
'instruments/{instrument_id}/mark_price',
],
'post': [
'accounts/borrow',
'accounts/repayment',
'orders',
'batch_orders',
'cancel_orders',
'cancel_orders/{order_id}',
'cancel_orders/{client_oid}',
'cancel_batch_orders',
'accounts/{instrument_id}/leverage',
],
},
'futures': {
'get': [
'position',
'{instrument_id}/position',
'accounts',
'accounts/{underlying}',
'accounts/{underlying}/leverage',
'accounts/{underlying}/ledger',
'order_algo/{instrument_id}',
'orders/{instrument_id}',
'orders/{instrument_id}/{order_id}',
'orders/{instrument_id}/{client_oid}',
'fills',
'trade_fee',
'accounts/{instrument_id}/holds',
'order_algo/{instrument_id}',
# public
'instruments',
'instruments/{instrument_id}/book',
'instruments/ticker',
'instruments/{instrument_id}/ticker',
'instruments/{instrument_id}/trades',
'instruments/{instrument_id}/candles',
'instruments/{instrument_id}/history/candles',
'instruments/{instrument_id}/index',
'rate',
'instruments/{instrument_id}/estimated_price',
'instruments/{instrument_id}/open_interest',
'instruments/{instrument_id}/price_limit',
'instruments/{instrument_id}/mark_price',
'instruments/{instrument_id}/liquidation',
],
'post': [
'accounts/{underlying}/leverage',
'order',
'amend_order/{instrument_id}',
'orders',
'cancel_order/{instrument_id}/{order_id}',
'cancel_order/{instrument_id}/{client_oid}',
'cancel_batch_orders/{instrument_id}',
'accounts/margin_mode',
'close_position',
'cancel_all',
'order_algo',
'cancel_algos',
],
},
'swap': {
'get': [
'position',
'{instrument_id}/position',
'accounts',
'{instrument_id}/accounts',
'accounts/{instrument_id}/settings',
'accounts/{instrument_id}/ledger',
'orders/{instrument_id}',
'orders/{instrument_id}/{order_id}',
'orders/{instrument_id}/{client_oid}',
'fills',
'accounts/{instrument_id}/holds',
'trade_fee',
'order_algo/{instrument_id}',
# public
'instruments',
'instruments/{instrument_id}/depth',
'instruments/ticker',
'instruments/{instrument_id}/ticker',
'instruments/{instrument_id}/trades',
'instruments/{instrument_id}/candles',
'instruments/{instrument_id}/history/candles',
'instruments/{instrument_id}/index',
'rate',
'instruments/{instrument_id}/open_interest',
'instruments/{instrument_id}/price_limit',
'instruments/{instrument_id}/liquidation',
'instruments/{instrument_id}/funding_time',
'instruments/{instrument_id}/mark_price',
'instruments/{instrument_id}/historical_funding_rate',
],
'post': [
'accounts/{instrument_id}/leverage',
'order',
'amend_order/{instrument_id}',
'orders',
'cancel_order/{instrument_id}/{order_id}',
'cancel_order/{instrument_id}/{client_oid}',
'cancel_batch_orders/{instrument_id}',
'order_algo',
'cancel_algos',
'close_position',
'cancel_all',
'order_algo',
'cancel_algos',
],
},
'option': {
'get': [
'accounts',
'position',
'{underlying}/position',
'accounts/{underlying}',
'orders/{underlying}',
'fills/{underlying}',
'accounts/{underlying}/ledger',
'trade_fee',
'orders/{underlying}/{order_id}',
'orders/{underlying}/{client_oid}',
# public
'underlying',
'instruments/{underlying}',
'instruments/{underlying}/summary',
'instruments/{underlying}/summary/{instrument_id}',
'instruments/{instrument_id}/book',
'instruments/{instrument_id}/trades',
'instruments/{instrument_id}/ticker',
'instruments/{instrument_id}/candles',
],
'post': [
'order',
'orders',
'cancel_order/{underlying}/{order_id}',
'cancel_order/{underlying}/{client_oid}',
'cancel_batch_orders/{underlying}',
'amend_order/{underlying}',
'amend_batch_orders/{underlying}',
],
},
'information': {
'get': [
'{currency}/long_short_ratio',
'{currency}/volume',
'{currency}/taker',
'{currency}/sentiment',
'{currency}/margin',
],
},
'index': {
'get': [
'{instrument_id}/constituents',
],
},
},
'fees': {
'trading': {
'taker': 0.0015,
'maker': 0.0010,
},
'spot': {
'taker': 0.0015,
'maker': 0.0010,
},
'futures': {
'taker': 0.0005,
'maker': 0.0002,
},
'swap': {
'taker': 0.00075,
'maker': 0.00020,
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'exceptions': {
# http error codes
# 400 Bad Request — Invalid request format
# 401 Unauthorized — Invalid API Key
# 403 Forbidden — You do not have access to the requested resource
# 404 Not Found
# 429 Client Error: Too Many Requests for url
# 500 Internal Server Error — We had a problem with our server
'exact': {
'1': ExchangeError, # {"code": 1, "message": "System error"}
# undocumented
'failure to get a peer from the ring-balancer': ExchangeNotAvailable, # {"message": "failure to get a peer from the ring-balancer"}
'Server is busy, please try again.': ExchangeNotAvailable, # {"message": "Server is busy, please try again."}
'An unexpected error occurred': ExchangeError, # {"message": "An unexpected error occurred"}
'System error': ExchangeError, # {"error_message":"System error","message":"System error"}
'4010': PermissionDenied, # {"code": 4010, "message": "For the security of your funds, withdrawals are not permitted within 24 hours after changing fund password / mobile number / Google Authenticator settings "}
# common
# '0': ExchangeError, # 200 successful,when the order placement / cancellation / operation is successful
'4001': ExchangeError, # no data received in 30s
'4002': ExchangeError, # Buffer full. cannot write data
# --------------------------------------------------------
'30001': AuthenticationError, # {"code": 30001, "message": 'request header "OK_ACCESS_KEY" cannot be blank'}
'30002': AuthenticationError, # {"code": 30002, "message": 'request header "OK_ACCESS_SIGN" cannot be blank'}
'30003': AuthenticationError, # {"code": 30003, "message": 'request header "OK_ACCESS_TIMESTAMP" cannot be blank'}
'30004': AuthenticationError, # {"code": 30004, "message": 'request header "OK_ACCESS_PASSPHRASE" cannot be blank'}
'30005': InvalidNonce, # {"code": 30005, "message": "invalid OK_ACCESS_TIMESTAMP"}
'30006': AuthenticationError, # {"code": 30006, "message": "invalid OK_ACCESS_KEY"}
'30007': BadRequest, # {"code": 30007, "message": 'invalid Content_Type, please use "application/json" format'}
'30008': RequestTimeout, # {"code": 30008, "message": "timestamp request expired"}
'30009': ExchangeError, # {"code": 30009, "message": "system error"}
'30010': AuthenticationError, # {"code": 30010, "message": "API validation failed"}
'30011': PermissionDenied, # {"code": 30011, "message": "invalid IP"}
'30012': AuthenticationError, # {"code": 30012, "message": "invalid authorization"}
'30013': AuthenticationError, # {"code": 30013, "message": "invalid sign"}
'30014': DDoSProtection, # {"code": 30014, "message": "request too frequent"}
'30015': AuthenticationError, # {"code": 30015, "message": 'request header "OK_ACCESS_PASSPHRASE" incorrect'}
'30016': ExchangeError, # {"code": 30015, "message": "you are using v1 apiKey, please use v1 endpoint. If you would like to use v3 endpoint, please subscribe to v3 apiKey"}
'30017': ExchangeError, # {"code": 30017, "message": "apikey's broker id does not match"}
'30018': ExchangeError, # {"code": 30018, "message": "apikey's domain does not match"}
'30019': ExchangeNotAvailable, # {"code": 30019, "message": "Api is offline or unavailable"}
'30020': BadRequest, # {"code": 30020, "message": "body cannot be blank"}
'30021': BadRequest, # {"code": 30021, "message": "Json data format error"}, {"code": 30021, "message": "json data format error"}
'30022': PermissionDenied, # {"code": 30022, "message": "Api has been frozen"}
'30023': BadRequest, # {"code": 30023, "message": "{0} parameter cannot be blank"}
'30024': BadSymbol, # {"code":30024,"message":"\"instrument_id\" is an invalid parameter"}
'30025': BadRequest, # {"code": 30025, "message": "{0} parameter category error"}
'30026': DDoSProtection, # {"code": 30026, "message": "requested too frequent"}
'30027': AuthenticationError, # {"code": 30027, "message": "login failure"}
'30028': PermissionDenied, # {"code": 30028, "message": "unauthorized execution"}
'30029': AccountSuspended, # {"code": 30029, "message": "account suspended"}
'30030': ExchangeNotAvailable, # {"code": 30030, "message": "endpoint request failed. Please try again"}
'30031': BadRequest, # {"code": 30031, "message": "token does not exist"}
'30032': BadSymbol, # {"code": 30032, "message": "pair does not exist"}
'30033': BadRequest, # {"code": 30033, "message": "exchange domain does not exist"}
'30034': ExchangeError, # {"code": 30034, "message": "exchange ID does not exist"}
'30035': ExchangeError, # {"code": 30035, "message": "trading is not supported in self website"}
'30036': ExchangeError, # {"code": 30036, "message": "no relevant data"}
'30037': ExchangeNotAvailable, # {"code": 30037, "message": "endpoint is offline or unavailable"}
# '30038': AuthenticationError, # {"code": 30038, "message": "user does not exist"}
'30038': OnMaintenance, # {"client_oid":"","code":"30038","error_code":"30038","error_message":"Matching engine is being upgraded. Please try in about 1 minute.","message":"Matching engine is being upgraded. Please try in about 1 minute.","order_id":"-1","result":false}
'30044': RequestTimeout, # {"code":30044, "message":"Endpoint request timeout"}
# futures
'32001': AccountSuspended, # {"code": 32001, "message": "futures account suspended"}
'32002': PermissionDenied, # {"code": 32002, "message": "futures account does not exist"}
'32003': CancelPending, # {"code": 32003, "message": "canceling, please wait"}
'32004': ExchangeError, # {"code": 32004, "message": "you have no unfilled orders"}
'32005': InvalidOrder, # {"code": 32005, "message": "max order quantity"}
'32006': InvalidOrder, # {"code": 32006, "message": "the order price or trigger price exceeds USD 1 million"}
'32007': InvalidOrder, # {"code": 32007, "message": "leverage level must be the same for orders on the same side of the contract"}
'32008': InvalidOrder, # {"code": 32008, "message": "Max. positions to open(cross margin)"}
'32009': InvalidOrder, # {"code": 32009, "message": "Max. positions to open(fixed margin)"}
'32010': ExchangeError, # {"code": 32010, "message": "leverage cannot be changed with open positions"}
'32011': ExchangeError, # {"code": 32011, "message": "futures status error"}
'32012': ExchangeError, # {"code": 32012, "message": "futures order update error"}
'32013': ExchangeError, # {"code": 32013, "message": "token type is blank"}
'32014': ExchangeError, # {"code": 32014, "message": "your number of contracts closing is larger than the number of contracts available"}
'32015': ExchangeError, # {"code": 32015, "message": "margin ratio is lower than 100% before opening positions"}
'32016': ExchangeError, # {"code": 32016, "message": "margin ratio is lower than 100% after opening position"}
'32017': ExchangeError, # {"code": 32017, "message": "no BBO"}
'32018': ExchangeError, # {"code": 32018, "message": "the order quantity is less than 1, please try again"}
'32019': ExchangeError, # {"code": 32019, "message": "the order price deviates from the price of the previous minute by more than 3%"}
'32020': ExchangeError, # {"code": 32020, "message": "the price is not in the range of the price limit"}
'32021': ExchangeError, # {"code": 32021, "message": "leverage error"}
'32022': ExchangeError, # {"code": 32022, "message": "self function is not supported in your country or region according to the regulations"}
'32023': ExchangeError, # {"code": 32023, "message": "self account has outstanding loan"}
'32024': ExchangeError, # {"code": 32024, "message": "order cannot be placed during delivery"}
'32025': ExchangeError, # {"code": 32025, "message": "order cannot be placed during settlement"}
'32026': ExchangeError, # {"code": 32026, "message": "your account is restricted from opening positions"}
'32027': ExchangeError, # {"code": 32027, "message": "cancelled over 20 orders"}
'32028': ExchangeError, # {"code": 32028, "message": "account is suspended and liquidated"}
'32029': ExchangeError, # {"code": 32029, "message": "order info does not exist"}
'32030': InvalidOrder, # The order cannot be cancelled
'32031': ArgumentsRequired, # client_oid or order_id is required.
'32038': AuthenticationError, # User does not exist
'32040': ExchangeError, # User have open contract orders or position
'32044': ExchangeError, # {"code": 32044, "message": "The margin ratio after submitting self order is lower than the minimum requirement({0}) for your tier."}
'32045': ExchangeError, # String of commission over 1 million
'32046': ExchangeError, # Each user can hold up to 10 trade plans at the same time
'32047': ExchangeError, # system error
'32048': InvalidOrder, # Order strategy track range error
'32049': ExchangeError, # Each user can hold up to 10 track plans at the same time
'32050': InvalidOrder, # Order strategy rang error
'32051': InvalidOrder, # Order strategy ice depth error
'32052': ExchangeError, # String of commission over 100 thousand
'32053': ExchangeError, # Each user can hold up to 6 ice plans at the same time
'32057': ExchangeError, # The order price is zero. Market-close-all function cannot be executed
'32054': ExchangeError, # Trade not allow
'32055': InvalidOrder, # cancel order error
'32056': ExchangeError, # iceberg per order average should between {0}-{1} contracts
'32058': ExchangeError, # Each user can hold up to 6 initiative plans at the same time
'32059': InvalidOrder, # Total amount should exceed per order amount
'32060': InvalidOrder, # Order strategy type error
'32061': InvalidOrder, # Order strategy initiative limit error
'32062': InvalidOrder, # Order strategy initiative range error
'32063': InvalidOrder, # Order strategy initiative rate error
'32064': ExchangeError, # Time Stringerval of orders should set between 5-120s
'32065': ExchangeError, # Close amount exceeds the limit of Market-close-all(999 for BTC, and 9999 for the rest tokens)
'32066': ExchangeError, # You have open orders. Please cancel all open orders before changing your leverage level.
'32067': ExchangeError, # Account equity < required margin in self setting. Please adjust your leverage level again.
'32068': ExchangeError, # The margin for self position will fall short of the required margin in self setting. Please adjust your leverage level or increase your margin to proceed.
'32069': ExchangeError, # Target leverage level too low. Your account balance is insufficient to cover the margin required. Please adjust the leverage level again.
'32070': ExchangeError, # Please check open position or unfilled order
'32071': ExchangeError, # Your current liquidation mode does not support self action.
'32072': ExchangeError, # The highest available margin for your order’s tier is {0}. Please edit your margin and place a new order.
'32073': ExchangeError, # The action does not apply to the token
'32074': ExchangeError, # The number of contracts of your position, open orders, and the current order has exceeded the maximum order limit of self asset.
'32075': ExchangeError, # Account risk rate breach
'32076': ExchangeError, # Liquidation of the holding position(s) at market price will require cancellation of all pending close orders of the contracts.
'32077': ExchangeError, # Your margin for self asset in futures account is insufficient and the position has been taken over for liquidation.(You will not be able to place orders, close positions, transfer funds, or add margin during self period of time. Your account will be restored after the liquidation is complete.)
'32078': ExchangeError, # Please cancel all open orders before switching the liquidation mode(Please cancel all open orders before switching the liquidation mode)
'32079': ExchangeError, # Your open positions are at high risk.(Please add margin or reduce positions before switching the mode)
'32080': ExchangeError, # Funds cannot be transferred out within 30 minutes after futures settlement
'32083': ExchangeError, # The number of contracts should be a positive multiple of %%. Please place your order again
# token and margin trading
'33001': PermissionDenied, # {"code": 33001, "message": "margin account for self pair is not enabled yet"}
'33002': AccountSuspended, # {"code": 33002, "message": "margin account for self pair is suspended"}
'33003': InsufficientFunds, # {"code": 33003, "message": "no loan balance"}
'33004': ExchangeError, # {"code": 33004, "message": "loan amount cannot be smaller than the minimum limit"}
'33005': ExchangeError, # {"code": 33005, "message": "repayment amount must exceed 0"}
'33006': ExchangeError, # {"code": 33006, "message": "loan order not found"}
'33007': ExchangeError, # {"code": 33007, "message": "status not found"}
'33008': InsufficientFunds, # {"code": 33008, "message": "loan amount cannot exceed the maximum limit"}
'33009': ExchangeError, # {"code": 33009, "message": "user ID is blank"}
'33010': ExchangeError, # {"code": 33010, "message": "you cannot cancel an order during session 2 of call auction"}
'33011': ExchangeError, # {"code": 33011, "message": "no new market data"}
'33012': ExchangeError, # {"code": 33012, "message": "order cancellation failed"}
'33013': InvalidOrder, # {"code": 33013, "message": "order placement failed"}
'33014': OrderNotFound, # {"code": 33014, "message": "order does not exist"}
'33015': InvalidOrder, # {"code": 33015, "message": "exceeded maximum limit"}
'33016': ExchangeError, # {"code": 33016, "message": "margin trading is not open for self token"}
'33017': InsufficientFunds, # {"code": 33017, "message": "insufficient balance"}
'33018': ExchangeError, # {"code": 33018, "message": "self parameter must be smaller than 1"}
'33020': ExchangeError, # {"code": 33020, "message": "request not supported"}
'33021': BadRequest, # {"code": 33021, "message": "token and the pair do not match"}
'33022': InvalidOrder, # {"code": 33022, "message": "pair and the order do not match"}
'33023': ExchangeError, # {"code": 33023, "message": "you can only place market orders during call auction"}
'33024': InvalidOrder, # {"code": 33024, "message": "trading amount too small"}
'33025': InvalidOrder, # {"code": 33025, "message": "base token amount is blank"}
'33026': ExchangeError, # {"code": 33026, "message": "transaction completed"}
'33027': InvalidOrder, # {"code": 33027, "message": "cancelled order or order cancelling"}
'33028': InvalidOrder, # {"code": 33028, "message": "the decimal places of the trading price exceeded the limit"}
'33029': InvalidOrder, # {"code": 33029, "message": "the decimal places of the trading size exceeded the limit"}
'33034': ExchangeError, # {"code": 33034, "message": "You can only place limit order after Call Auction has started"}
'33035': ExchangeError, # This type of order cannot be canceled(This type of order cannot be canceled)
'33036': ExchangeError, # Exceeding the limit of entrust order
'33037': ExchangeError, # The buy order price should be lower than 130% of the trigger price
'33038': ExchangeError, # The sell order price should be higher than 70% of the trigger price
'33039': ExchangeError, # The limit of callback rate is 0 < x <= 5%
'33040': ExchangeError, # The trigger price of a buy order should be lower than the latest transaction price
'33041': ExchangeError, # The trigger price of a sell order should be higher than the latest transaction price
'33042': ExchangeError, # The limit of price variance is 0 < x <= 1%
'33043': ExchangeError, # The total amount must be larger than 0
'33044': ExchangeError, # The average amount should be 1/1000 * total amount <= x <= total amount
'33045': ExchangeError, # The price should not be 0, including trigger price, order price, and price limit
'33046': ExchangeError, # Price variance should be 0 < x <= 1%
'33047': ExchangeError, # Sweep ratio should be 0 < x <= 100%
'33048': ExchangeError, # Per order limit: Total amount/1000 < x <= Total amount
'33049': ExchangeError, # Total amount should be X > 0
'33050': ExchangeError, # Time interval should be 5 <= x <= 120s
'33051': ExchangeError, # cancel order number not higher limit: plan and track entrust no more than 10, ice and time entrust no more than 6
'33059': BadRequest, # {"code": 33059, "message": "client_oid or order_id is required"}
'33060': BadRequest, # {"code": 33060, "message": "Only fill in either parameter client_oid or order_id"}
'33061': ExchangeError, # Value of a single market price order cannot exceed 100,000 USD
'33062': ExchangeError, # The leverage ratio is too high. The borrowed position has exceeded the maximum position of self leverage ratio. Please readjust the leverage ratio
'33063': ExchangeError, # Leverage multiple is too low, there is insufficient margin in the account, please readjust the leverage ratio
'33064': ExchangeError, # The setting of the leverage ratio cannot be less than 2, please readjust the leverage ratio
'33065': ExchangeError, # Leverage ratio exceeds maximum leverage ratio, please readjust leverage ratio
'33085': InvalidOrder, # The value of the position and buying order has reached the position limit, and no further buying is allowed.
# account
'21009': ExchangeError, # Funds cannot be transferred out within 30 minutes after swap settlement(Funds cannot be transferred out within 30 minutes after swap settlement)
'34001': PermissionDenied, # {"code": 34001, "message": "withdrawal suspended"}
'34002': InvalidAddress, # {"code": 34002, "message": "please add a withdrawal address"}
'34003': ExchangeError, # {"code": 34003, "message": "sorry, self token cannot be withdrawn to xx at the moment"}
'34004': ExchangeError, # {"code": 34004, "message": "withdrawal fee is smaller than minimum limit"}
'34005': ExchangeError, # {"code": 34005, "message": "withdrawal fee exceeds the maximum limit"}
'34006': ExchangeError, # {"code": 34006, "message": "withdrawal amount is lower than the minimum limit"}
'34007': ExchangeError, # {"code": 34007, "message": "withdrawal amount exceeds the maximum limit"}
'34008': InsufficientFunds, # {"code": 34008, "message": "insufficient balance"}
'34009': ExchangeError, # {"code": 34009, "message": "your withdrawal amount exceeds the daily limit"}
'34010': ExchangeError, # {"code": 34010, "message": "transfer amount must be larger than 0"}
'34011': ExchangeError, # {"code": 34011, "message": "conditions not met"}
'34012': ExchangeError, # {"code": 34012, "message": "the minimum withdrawal amount for NEO is 1, and the amount must be an integer"}
'34013': ExchangeError, # {"code": 34013, "message": "please transfer"}
'34014': ExchangeError, # {"code": 34014, "message": "transfer limited"}
'34015': ExchangeError, # {"code": 34015, "message": "subaccount does not exist"}
'34016': PermissionDenied, # {"code": 34016, "message": "transfer suspended"}
'34017': AccountSuspended, # {"code": 34017, "message": "account suspended"}
'34018': AuthenticationError, # {"code": 34018, "message": "incorrect trades password"}
'34019': PermissionDenied, # {"code": 34019, "message": "please bind your email before withdrawal"}
'34020': PermissionDenied, # {"code": 34020, "message": "please bind your funds password before withdrawal"}
'34021': InvalidAddress, # {"code": 34021, "message": "Not verified address"}
'34022': ExchangeError, # {"code": 34022, "message": "Withdrawals are not available for sub accounts"}
'34023': PermissionDenied, # {"code": 34023, "message": "Please enable futures trading before transferring your funds"}
'34026': RateLimitExceeded, # transfer too frequently(transfer too frequently)
'34036': ExchangeError, # Parameter is incorrect, please refer to API documentation
'34037': ExchangeError, # Get the sub-account balance interface, account type is not supported
'34038': ExchangeError, # Since your C2C transaction is unusual, you are restricted from fund transfer. Please contact our customer support to cancel the restriction
'34039': ExchangeError, # You are now restricted from transferring out your funds due to abnormal trades on C2C Market. Please transfer your fund on our website or app instead to verify your identity
# swap
'35001': ExchangeError, # {"code": 35001, "message": "Contract does not exist"}
'35002': ExchangeError, # {"code": 35002, "message": "Contract settling"}
'35003': ExchangeError, # {"code": 35003, "message": "Contract paused"}
'35004': ExchangeError, # {"code": 35004, "message": "Contract pending settlement"}
'35005': AuthenticationError, # {"code": 35005, "message": "User does not exist"}
'35008': InvalidOrder, # {"code": 35008, "message": "Risk ratio too high"}
'35010': InvalidOrder, # {"code": 35010, "message": "Position closing too large"}
'35012': InvalidOrder, # {"code": 35012, "message": "Incorrect order size"}
'35014': InvalidOrder, # {"code": 35014, "message": "Order price is not within limit"}
'35015': InvalidOrder, # {"code": 35015, "message": "Invalid leverage level"}
'35017': ExchangeError, # {"code": 35017, "message": "Open orders exist"}
'35019': InvalidOrder, # {"code": 35019, "message": "Order size too large"}
'35020': InvalidOrder, # {"code": 35020, "message": "Order price too high"}
'35021': InvalidOrder, # {"code": 35021, "message": "Order size exceeded current tier limit"}
'35022': BadRequest, # {"code": 35022, "message": "Contract status error"}
'35024': BadRequest, # {"code": 35024, "message": "Contract not initialized"}
'35025': InsufficientFunds, # {"code": 35025, "message": "No account balance"}
'35026': BadRequest, # {"code": 35026, "message": "Contract settings not initialized"}
'35029': OrderNotFound, # {"code": 35029, "message": "Order does not exist"}
'35030': InvalidOrder, # {"code": 35030, "message": "Order size too large"}
'35031': InvalidOrder, # {"code": 35031, "message": "Cancel order size too large"}
'35032': ExchangeError, # {"code": 35032, "message": "Invalid user status"}
'35037': ExchangeError, # No last traded price in cache
'35039': InsufficientFunds, # {"code": 35039, "message": "Open order quantity exceeds limit"}
'35040': InvalidOrder, # {"error_message":"Invalid order type","result":"true","error_code":"35040","order_id":"-1"}
'35044': ExchangeError, # {"code": 35044, "message": "Invalid order status"}
'35046': InsufficientFunds, # {"code": 35046, "message": "Negative account balance"}
'35047': InsufficientFunds, # {"code": 35047, "message": "Insufficient account balance"}
'35048': ExchangeError, # {"code": 35048, "message": "User contract is frozen and liquidating"}
'35049': InvalidOrder, # {"code": 35049, "message": "Invalid order type"}
'35050': InvalidOrder, # {"code": 35050, "message": "Position settings are blank"}
'35052': InsufficientFunds, # {"code": 35052, "message": "Insufficient cross margin"}
'35053': ExchangeError, # {"code": 35053, "message": "Account risk too high"}
'35055': InsufficientFunds, # {"code": 35055, "message": "Insufficient account balance"}
'35057': ExchangeError, # {"code": 35057, "message": "No last traded price"}
'35058': ExchangeError, # {"code": 35058, "message": "No limit"}
'35059': BadRequest, # {"code": 35059, "message": "client_oid or order_id is required"}
'35060': BadRequest, # {"code": 35060, "message": "Only fill in either parameter client_oid or order_id"}
'35061': BadRequest, # {"code": 35061, "message": "Invalid instrument_id"}
'35062': InvalidOrder, # {"code": 35062, "message": "Invalid match_price"}
'35063': InvalidOrder, # {"code": 35063, "message": "Invalid order_size"}
'35064': InvalidOrder, # {"code": 35064, "message": "Invalid client_oid"}
'35066': InvalidOrder, # Order interval error
'35067': InvalidOrder, # Time-weighted order ratio error
'35068': InvalidOrder, # Time-weighted order range error
'35069': InvalidOrder, # Time-weighted single transaction limit error
'35070': InvalidOrder, # Algo order type error
'35071': InvalidOrder, # Order total must be larger than single order limit
'35072': InvalidOrder, # Maximum 6 unfulfilled time-weighted orders can be held at the same time
'35073': InvalidOrder, # Order price is 0. Market-close-all not available
'35074': InvalidOrder, # Iceberg order single transaction average error
'35075': InvalidOrder, # Failed to cancel order
'35076': InvalidOrder, # LTC 20x leverage. Not allowed to open position
'35077': InvalidOrder, # Maximum 6 unfulfilled iceberg orders can be held at the same time
'35078': InvalidOrder, # Order amount exceeded 100,000
'35079': InvalidOrder, # Iceberg order price variance error
'35080': InvalidOrder, # Callback rate error
'35081': InvalidOrder, # Maximum 10 unfulfilled trail orders can be held at the same time
'35082': InvalidOrder, # Trail order callback rate error
'35083': InvalidOrder, # Each user can only hold a maximum of 10 unfulfilled stop-limit orders at the same time
'35084': InvalidOrder, # Order amount exceeded 1 million
'35085': InvalidOrder, # Order amount is not in the correct range
'35086': InvalidOrder, # Price exceeds 100 thousand
'35087': InvalidOrder, # Price exceeds 100 thousand
'35088': InvalidOrder, # Average amount error
'35089': InvalidOrder, # Price exceeds 100 thousand
'35090': ExchangeError, # No stop-limit orders available for cancelation
'35091': ExchangeError, # No trail orders available for cancellation
'35092': ExchangeError, # No iceberg orders available for cancellation
'35093': ExchangeError, # No trail orders available for cancellation
'35094': ExchangeError, # Stop-limit order last traded price error
'35095': BadRequest, # Instrument_id error
'35096': ExchangeError, # Algo order status error
'35097': ExchangeError, # Order status and order ID cannot exist at the same time
'35098': ExchangeError, # An order status or order ID must exist
'35099': ExchangeError, # Algo order ID error
'35102': RateLimitExceeded, # {"error_message":"The operation that close all at market price is too frequent","result":"true","error_code":"35102","order_id":"-1"}
# option
'36001': BadRequest, # Invalid underlying index.
'36002': BadRequest, # Instrument does not exist.
'36005': ExchangeError, # Instrument status is invalid.
'36101': AuthenticationError, # Account does not exist.
'36102': PermissionDenied, # Account status is invalid.
'36103': PermissionDenied, # Account is suspended due to ongoing liquidation.
'36104': PermissionDenied, # Account is not enabled for options trading.
'36105': PermissionDenied, # Please enable the account for option contract.
'36106': PermissionDenied, # Funds cannot be transferred in or out, as account is suspended.
'36107': PermissionDenied, # Funds cannot be transferred out within 30 minutes after option exercising or settlement.
'36108': InsufficientFunds, # Funds cannot be transferred in or out, as equity of the account is less than zero.
'36109': PermissionDenied, # Funds cannot be transferred in or out during option exercising or settlement.
'36201': PermissionDenied, # New order function is blocked.
'36202': PermissionDenied, # Account does not have permission to short option.
'36203': InvalidOrder, # Invalid format for client_oid.
'36204': ExchangeError, # Invalid format for request_id.
'36205': BadRequest, # Instrument id does not match underlying index.
'36206': BadRequest, # Order_id and client_oid can not be used at the same time.
'36207': InvalidOrder, # Either order price or fartouch price must be present.
'36208': InvalidOrder, # Either order price or size must be present.
'36209': InvalidOrder, # Either order_id or client_oid must be present.
'36210': InvalidOrder, # Either order_ids or client_oids must be present.
'36211': InvalidOrder, # Exceeding max batch size for order submission.
'36212': InvalidOrder, # Exceeding max batch size for oder cancellation.
'36213': InvalidOrder, # Exceeding max batch size for order amendment.
'36214': ExchangeError, # Instrument does not have valid bid/ask quote.
'36216': OrderNotFound, # Order does not exist.
'36217': InvalidOrder, # Order submission failed.
'36218': InvalidOrder, # Order cancellation failed.
'36219': InvalidOrder, # Order amendment failed.
'36220': InvalidOrder, # Order is pending cancel.
'36221': InvalidOrder, # Order qty is not valid multiple of lot size.
'36222': InvalidOrder, # Order price is breaching highest buy limit.
'36223': InvalidOrder, # Order price is breaching lowest sell limit.
'36224': InvalidOrder, # Exceeding max order size.
'36225': InvalidOrder, # Exceeding max open order count for instrument.
'36226': InvalidOrder, # Exceeding max open order count for underlying.
'36227': InvalidOrder, # Exceeding max open size across all orders for underlying
'36228': InvalidOrder, # Exceeding max available qty for instrument.
'36229': InvalidOrder, # Exceeding max available qty for underlying.
'36230': InvalidOrder, # Exceeding max position limit for underlying.
},
'broad': {
},
},
'precisionMode': TICK_SIZE,
'options': {
'fetchOHLCV': {
'type': 'Candles', # Candles or HistoryCandles
},
'createMarketBuyOrderRequiresPrice': True,
'fetchMarkets': ['spot', 'futures', 'swap', 'option'],
'defaultType': 'spot', # 'account', 'spot', 'margin', 'futures', 'swap', 'option'
'auth': {
'time': 'public',
'currencies': 'private',
'instruments': 'public',
'rate': 'public',
'{instrument_id}/constituents': 'public',
},
'warnOnFetchCurrenciesWithoutAuthorization': False,
},
'commonCurrencies': {
# OKEX refers to ERC20 version of Aeternity(AEToken)
'AE': 'AET', # https://github.com/ccxt/ccxt/issues/4981
'BOX': 'DefiBox',
'HOT': 'Hydro Protocol',
'HSR': 'HC',
'MAG': 'Maggie',
'SBTC': 'Super Bitcoin',
'TRADE': 'Unitrade',
'YOYO': 'YOYOW',
'WIN': 'WinToken', # https://github.com/ccxt/ccxt/issues/5701
},
})
async def fetch_time(self, params={}):
response = await self.generalGetTime(params)
#
# {
# "iso": "2015-01-07T23:47:25.201Z",
# "epoch": 1420674445.201
# }
#
return self.parse8601(self.safe_string(response, 'iso'))
async def fetch_markets(self, params={}):
types = self.safe_value(self.options, 'fetchMarkets')
result = []
for i in range(0, len(types)):
markets = await self.fetch_markets_by_type(types[i], params)
result = self.array_concat(result, markets)
return result
def parse_markets(self, markets):
result = []
for i in range(0, len(markets)):
result.append(self.parse_market(markets[i]))
return result
def parse_market(self, market):
#
# spot markets
#
# {
# base_currency: "EOS",
# instrument_id: "EOS-OKB",
# min_size: "0.01",
# quote_currency: "OKB",
# size_increment: "0.000001",
# tick_size: "0.0001"
# }
#
# futures markets
#
# {
# instrument_id: "XRP-USD-200320",
# underlying_index: "XRP",
# quote_currency: "USD",
# tick_size: "0.0001",
# contract_val: "10",
# listing: "2020-03-06",
# delivery: "2020-03-20",
# trade_increment: "1",
# alias: "self_week",
# underlying: "XRP-USD",
# base_currency: "XRP",
# settlement_currency: "XRP",
# is_inverse: "true",
# contract_val_currency: "USD",
# }
#
# swap markets
#
# {
# instrument_id: "BSV-USD-SWAP",
# underlying_index: "BSV",
# quote_currency: "USD",
# coin: "BSV",
# contract_val: "10",
# listing: "2018-12-21T07:53:47.000Z",
# delivery: "2020-03-14T08:00:00.000Z",
# size_increment: "1",
# tick_size: "0.01",
# base_currency: "BSV",
# underlying: "BSV-USD",
# settlement_currency: "BSV",
# is_inverse: "true",
# contract_val_currency: "USD"
# }
#
# options markets
#
# {
# instrument_id: 'BTC-USD-200327-4000-C',
# underlying: 'BTC-USD',
# settlement_currency: 'BTC',
# contract_val: '0.1000',
# option_type: 'C',
# strike: '4000',
# tick_size: '0.0005',
# lot_size: '1.0000',
# listing: '2019-12-25T08:30:36.302Z',
# delivery: '2020-03-27T08:00:00.000Z',
# state: '2',
# trading_start_time: '2019-12-25T08:30:36.302Z',
# timestamp: '2020-03-13T08:05:09.456Z',
# }
#
id = self.safe_string(market, 'instrument_id')
marketType = 'spot'
spot = True
future = False
swap = False
option = False
baseId = self.safe_string(market, 'base_currency')
quoteId = self.safe_string(market, 'quote_currency')
contractVal = self.safe_number(market, 'contract_val')
if contractVal is not None:
if 'option_type' in market:
marketType = 'option'
spot = False
option = True
underlying = self.safe_string(market, 'underlying')
parts = underlying.split('-')
baseId = self.safe_string(parts, 0)
quoteId = self.safe_string(parts, 1)
else:
marketType = 'swap'
spot = False
swap = True
futuresAlias = self.safe_string(market, 'alias')
if futuresAlias is not None:
swap = False
future = True
marketType = 'futures'
baseId = self.safe_string(market, 'underlying_index')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = (base + '/' + quote) if spot else id
lotSize = self.safe_number_2(market, 'lot_size', 'trade_increment')
minPrice = self.safe_string(market, 'tick_size')
precision = {
'amount': self.safe_number(market, 'size_increment', lotSize),
'price': self.parse_number(minPrice),
}
minAmountString = self.safe_string_2(market, 'min_size', 'base_min_size')
minAmount = self.parse_number(minAmountString)
minCost = None
if (minAmount is not None) and (minPrice is not None):
minCost = self.parse_number(Precise.string_mul(minPrice, minAmountString))
active = True
fees = self.safe_value_2(self.fees, marketType, 'trading', {})
return self.extend(fees, {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'info': market,
'type': marketType,
'spot': spot,
'futures': future,
'swap': swap,
'option': option,
'active': active,
'precision': precision,
'limits': {
'amount': {
'min': minAmount,
'max': None,
},
'price': {
'min': precision['price'],
'max': None,
},
'cost': {
'min': minCost,
'max': None,
},
},
})
async def fetch_markets_by_type(self, type, params={}):
if type == 'option':
underlying = await self.optionGetUnderlying(params)
result = []
for i in range(0, len(underlying)):
response = await self.optionGetInstrumentsUnderlying({
'underlying': underlying[i],
})
#
# options markets
#
# [
# {
# instrument_id: 'BTC-USD-200327-4000-C',
# underlying: 'BTC-USD',
# settlement_currency: 'BTC',
# contract_val: '0.1000',
# option_type: 'C',
# strike: '4000',
# tick_size: '0.0005',
# lot_size: '1.0000',
# listing: '2019-12-25T08:30:36.302Z',
# delivery: '2020-03-27T08:00:00.000Z',
# state: '2',
# trading_start_time: '2019-12-25T08:30:36.302Z',
# timestamp: '2020-03-13T08:05:09.456Z',
# },
# ]
#
result = self.array_concat(result, response)
return self.parse_markets(result)
elif (type == 'spot') or (type == 'futures') or (type == 'swap'):
method = type + 'GetInstruments'
response = await getattr(self, method)(params)
#
# spot markets
#
# [
# {
# base_currency: "EOS",
# instrument_id: "EOS-OKB",
# min_size: "0.01",
# quote_currency: "OKB",
# size_increment: "0.000001",
# tick_size: "0.0001"
# }
# ]
#
# futures markets
#
# [
# {
# instrument_id: "XRP-USD-200320",
# underlying_index: "XRP",
# quote_currency: "USD",
# tick_size: "0.0001",
# contract_val: "10",
# listing: "2020-03-06",
# delivery: "2020-03-20",
# trade_increment: "1",
# alias: "self_week",
# underlying: "XRP-USD",
# base_currency: "XRP",
# settlement_currency: "XRP",
# is_inverse: "true",
# contract_val_currency: "USD",
# }
# ]
#
# swap markets
#
# [
# {
# instrument_id: "BSV-USD-SWAP",
# underlying_index: "BSV",
# quote_currency: "USD",
# coin: "BSV",
# contract_val: "10",
# listing: "2018-12-21T07:53:47.000Z",
# delivery: "2020-03-14T08:00:00.000Z",
# size_increment: "1",
# tick_size: "0.01",
# base_currency: "BSV",
# underlying: "BSV-USD",
# settlement_currency: "BSV",
# is_inverse: "true",
# contract_val_currency: "USD"
# }
# ]
#
return self.parse_markets(response)
else:
raise NotSupported(self.id + ' fetchMarketsByType does not support market type ' + type)
async def fetch_currencies(self, params={}):
# despite that their docs say these endpoints are public:
# https://www.okex.com/api/account/v3/withdrawal/fee
# https://www.okex.com/api/account/v3/currencies
# it will still reply with {"code":30001, "message": "OK-ACCESS-KEY header is required"}
# if you attempt to access it without authentication
if not self.check_required_credentials(False):
if self.options['warnOnFetchCurrenciesWithoutAuthorization']:
raise ExchangeError(self.id + ' fetchCurrencies() is a private API endpoint that requires authentication with API keys. Set the API keys on the exchange instance or exchange.options["warnOnFetchCurrenciesWithoutAuthorization"] = False to suppress self warning message.')
return None
else:
response = await self.accountGetCurrencies(params)
#
# [
# {
# name: '',
# currency: 'BTC',
# can_withdraw: '1',
# can_deposit: '1',
# min_withdrawal: '0.0100000000000000'
# },
# ]
#
result = {}
for i in range(0, len(response)):
currency = response[i]
id = self.safe_string(currency, 'currency')
code = self.safe_currency_code(id)
precision = 0.00000001 # default precision, todo: fix "magic constants"
name = self.safe_string(currency, 'name')
canDeposit = self.safe_integer(currency, 'can_deposit')
canWithdraw = self.safe_integer(currency, 'can_withdraw')
active = True if (canDeposit and canWithdraw) else False
result[code] = {
'id': id,
'code': code,
'info': currency,
'type': None,
'name': name,
'active': active,
'fee': None, # todo: redesign
'precision': precision,
'limits': {
'amount': {'min': None, 'max': None},
'withdraw': {
'min': self.safe_number(currency, 'min_withdrawal'),
'max': None,
},
},
}
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = market['type'] + 'GetInstrumentsInstrumentId'
method += 'Depth' if (market['type'] == 'swap') else 'Book'
request = {
'instrument_id': market['id'],
}
if limit is not None:
request['size'] = limit # max 200
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# { asks: [["0.02685268", "0.242571", "1"],
# ["0.02685493", "0.164085", "1"],
# ...
# ["0.02779", "1.039", "1"],
# ["0.027813", "0.0876", "1"] ],
# bids: [["0.02684052", "10.371849", "1"],
# ["0.02684051", "3.707", "4"],
# ...
# ["0.02634963", "0.132934", "1"],
# ["0.02634962", "0.264838", "2"] ],
# timestamp: "2018-12-17T20:24:16.159Z" }
#
# swap
#
# {
# "asks":[
# ["916.21","94","0","1"]
# ],
# "bids":[
# ["916.1","15","0","1"]
# ],
# "time":"2021-04-16T02:04:48.282Z"
# }
#
timestamp = self.parse8601(self.safe_string_2(response, 'timestamp', 'time'))
return self.parse_order_book(response, symbol, timestamp)
def parse_ticker(self, ticker, market=None):
#
# { best_ask: "0.02665472",
# best_bid: "0.02665221",
# instrument_id: "ETH-BTC",
# product_id: "ETH-BTC",
# last: "0.02665472",
# ask: "0.02665472", # missing in the docs
# bid: "0.02665221", # not mentioned in the docs
# open_24h: "0.02645482",
# high_24h: "0.02714633",
# low_24h: "0.02614109",
# base_volume_24h: "572298.901923",
# timestamp: "2018-12-17T21:20:07.856Z",
# quote_volume_24h: "15094.86831261" }
#
timestamp = self.parse8601(self.safe_string(ticker, 'timestamp'))
symbol = None
marketId = self.safe_string(ticker, 'instrument_id')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
elif marketId is not None:
parts = marketId.split('-')
numParts = len(parts)
if numParts == 2:
baseId, quoteId = parts
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
else:
symbol = marketId
if (symbol is None) and (market is not None):
symbol = market['symbol']
last = self.safe_number(ticker, 'last')
open = self.safe_number(ticker, 'open_24h')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high_24h'),
'low': self.safe_number(ticker, 'low_24h'),
'bid': self.safe_number(ticker, 'best_bid'),
'bidVolume': self.safe_number(ticker, 'best_bid_size'),
'ask': self.safe_number(ticker, 'best_ask'),
'askVolume': self.safe_number(ticker, 'best_ask_size'),
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_number(ticker, 'base_volume_24h'),
'quoteVolume': self.safe_number(ticker, 'quote_volume_24h'),
'info': ticker,
}
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
method = market['type'] + 'GetInstrumentsInstrumentIdTicker'
request = {
'instrument_id': market['id'],
}
response = await getattr(self, method)(self.extend(request, params))
#
# { best_ask: "0.02665472",
# best_bid: "0.02665221",
# instrument_id: "ETH-BTC",
# product_id: "ETH-BTC",
# last: "0.02665472",
# ask: "0.02665472",
# bid: "0.02665221",
# open_24h: "0.02645482",
# high_24h: "0.02714633",
# low_24h: "0.02614109",
# base_volume_24h: "572298.901923",
# timestamp: "2018-12-17T21:20:07.856Z",
# quote_volume_24h: "15094.86831261" }
#
return self.parse_ticker(response)
async def fetch_tickers_by_type(self, type, symbols=None, params={}):
await self.load_markets()
method = type + 'GetInstrumentsTicker'
response = await getattr(self, method)(params)
result = {}
for i in range(0, len(response)):
ticker = self.parse_ticker(response[i])
symbol = ticker['symbol']
result[symbol] = ticker
return self.filter_by_array(result, 'symbol', symbols)
async def fetch_tickers(self, symbols=None, params={}):
defaultType = self.safe_string_2(self.options, 'fetchTickers', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
return await self.fetch_tickers_by_type(type, symbols, self.omit(params, 'type'))
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# spot trades
#
# {
# time: "2018-12-17T23:31:08.268Z",
# timestamp: "2018-12-17T23:31:08.268Z",
# trade_id: "409687906",
# price: "0.02677805",
# size: "0.923467",
# side: "sell"
# }
#
# futures trades, swap trades
#
# {
# trade_id: "1989230840021013",
# side: "buy",
# price: "92.42",
# qty: "184", # missing in swap markets
# size: "5", # missing in futures markets
# timestamp: "2018-12-17T23:26:04.613Z"
# }
#
# fetchOrderTrades(private)
#
# spot trades, margin trades
#
# {
# "created_at":"2019-03-15T02:52:56.000Z",
# "exec_type":"T", # whether the order is taker or maker
# "fee":"0.00000082",
# "instrument_id":"BTC-USDT",
# "ledger_id":"3963052721",
# "liquidity":"T", # whether the order is taker or maker
# "order_id":"2482659399697408",
# "price":"3888.6",
# "product_id":"BTC-USDT",
# "side":"buy",
# "size":"0.00055306",
# "timestamp":"2019-03-15T02:52:56.000Z"
# },
#
# futures trades, swap trades
#
# {
# "trade_id":"197429674631450625",
# "instrument_id":"EOS-USD-SWAP",
# "order_id":"6a-7-54d663a28-0",
# "price":"3.633",
# "order_qty":"1.0000",
# "fee":"-0.000551",
# "created_at":"2019-03-21T04:41:58.0Z", # missing in swap trades
# "timestamp":"2019-03-25T05:56:31.287Z", # missing in futures trades
# "exec_type":"M", # whether the order is taker or maker
# "side":"short", # "buy" in futures trades
# }
#
symbol = None
marketId = self.safe_string(trade, 'instrument_id')
base = None
quote = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
base = market['base']
quote = market['quote']
elif marketId is not None:
parts = marketId.split('-')
numParts = len(parts)
if numParts == 2:
baseId, quoteId = parts
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
else:
symbol = marketId
if (symbol is None) and (market is not None):
symbol = market['symbol']
base = market['base']
quote = market['quote']
timestamp = self.parse8601(self.safe_string_2(trade, 'timestamp', 'created_at'))
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string_2(trade, 'size', 'qty')
amountString = self.safe_string(trade, 'order_qty', amountString)
price = self.parse_number(priceString)
amount = self.parse_number(amountString)
cost = self.parse_number(Precise.string_mul(priceString, amountString))
takerOrMaker = self.safe_string_2(trade, 'exec_type', 'liquidity')
if takerOrMaker == 'M':
takerOrMaker = 'maker'
elif takerOrMaker == 'T':
takerOrMaker = 'taker'
side = self.safe_string(trade, 'side')
feeCost = self.safe_number(trade, 'fee')
fee = None
if feeCost is not None:
feeCurrency = base if (side == 'buy') else quote
fee = {
# fee is either a positive number(invitation rebate)
# or a negative number(transaction fee deduction)
# therefore we need to invert the fee
# more about it https://github.com/ccxt/ccxt/issues/5909
'cost': -feeCost,
'currency': feeCurrency,
}
orderId = self.safe_string(trade, 'order_id')
return {
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': self.safe_string_2(trade, 'trade_id', 'ledger_id'),
'order': orderId,
'type': None,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
async def fetch_trades(self, symbol, since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
method = market['type'] + 'GetInstrumentsInstrumentIdTrades'
if (limit is None) or (limit > 100):
limit = 100 # maximum = default = 100
request = {
'instrument_id': market['id'],
'limit': limit,
# from: 'id',
# to: 'id',
}
response = await getattr(self, method)(self.extend(request, params))
#
# spot markets
#
# [
# {
# time: "2018-12-17T23:31:08.268Z",
# timestamp: "2018-12-17T23:31:08.268Z",
# trade_id: "409687906",
# price: "0.02677805",
# size: "0.923467",
# side: "sell"
# }
# ]
#
# futures markets, swap markets
#
# [
# {
# trade_id: "1989230840021013",
# side: "buy",
# price: "92.42",
# qty: "184", # missing in swap markets
# size: "5", # missing in futures markets
# timestamp: "2018-12-17T23:26:04.613Z"
# }
# ]
#
return self.parse_trades(response, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# spot markets
#
# {
# close: "0.02684545",
# high: "0.02685084",
# low: "0.02683312",
# open: "0.02683894",
# time: "2018-12-17T20:28:00.000Z",
# volume: "101.457222"
# }
#
# futures markets
#
# [
# 1545072720000,
# 0.3159,
# 0.3161,
# 0.3144,
# 0.3149,
# 22886,
# 725179.26172331,
# ]
#
if isinstance(ohlcv, list):
numElements = len(ohlcv)
volumeIndex = 6 if (numElements > 6) else 5
timestamp = self.safe_value(ohlcv, 0)
if isinstance(timestamp, basestring):
timestamp = self.parse8601(timestamp)
return [
timestamp, # timestamp
self.safe_number(ohlcv, 1), # Open
self.safe_number(ohlcv, 2), # High
self.safe_number(ohlcv, 3), # Low
self.safe_number(ohlcv, 4), # Close
# self.safe_number(ohlcv, 5), # Quote Volume
# self.safe_number(ohlcv, 6), # Base Volume
self.safe_number(ohlcv, volumeIndex), # Volume, okex will return base volume in the 7th element for future markets
]
else:
return [
self.parse8601(self.safe_string(ohlcv, 'time')),
self.safe_number(ohlcv, 'open'), # Open
self.safe_number(ohlcv, 'high'), # High
self.safe_number(ohlcv, 'low'), # Low
self.safe_number(ohlcv, 'close'), # Close
self.safe_number(ohlcv, 'volume'), # Base Volume
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
duration = self.parse_timeframe(timeframe)
request = {
'instrument_id': market['id'],
'granularity': self.timeframes[timeframe],
}
options = self.safe_value(self.options, 'fetchOHLCV', {})
defaultType = self.safe_string(options, 'type', 'Candles') # Candles or HistoryCandles
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
method = market['type'] + 'GetInstrumentsInstrumentId' + type
if type == 'Candles':
if since is not None:
if limit is not None:
request['end'] = self.iso8601(self.sum(since, limit * duration * 1000))
request['start'] = self.iso8601(since)
else:
if limit is not None:
now = self.milliseconds()
request['start'] = self.iso8601(now - limit * duration * 1000)
request['end'] = self.iso8601(now)
elif type == 'HistoryCandles':
if market['option']:
raise NotSupported(self.id + ' fetchOHLCV does not have ' + type + ' for ' + market['type'] + ' markets')
if since is not None:
if limit is None:
limit = 300 # default
request['start'] = self.iso8601(self.sum(since, limit * duration * 1000))
request['end'] = self.iso8601(since)
else:
if limit is not None:
now = self.milliseconds()
request['end'] = self.iso8601(now - limit * duration * 1000)
request['start'] = self.iso8601(now)
response = await getattr(self, method)(self.extend(request, params))
#
# spot markets
#
# [
# {
# close: "0.02683401",
# high: "0.02683401",
# low: "0.02683401",
# open: "0.02683401",
# time: "2018-12-17T23:47:00.000Z",
# volume: "0"
# },
# {
# close: "0.02684545",
# high: "0.02685084",
# low: "0.02683312",
# open: "0.02683894",
# time: "2018-12-17T20:28:00.000Z",
# volume: "101.457222"
# }
# ]
#
# futures
#
# [
# [
# 1545090660000,
# 0.3171,
# 0.3174,
# 0.3171,
# 0.3173,
# 1648,
# 51930.38579450868
# ],
# [
# 1545072720000,
# 0.3159,
# 0.3161,
# 0.3144,
# 0.3149,
# 22886,
# 725179.26172331
# ]
# ]
#
return self.parse_ohlcvs(response, market, timeframe, since, limit)
def parse_account_balance(self, response):
#
# account
#
# [
# {
# balance: 0,
# available: 0,
# currency: "BTC",
# hold: 0
# },
# {
# balance: 0,
# available: 0,
# currency: "ETH",
# hold: 0
# }
# ]
#
# spot
#
# [
# {
# frozen: "0",
# hold: "0",
# id: "2149632",
# currency: "BTC",
# balance: "0.0000000497717339",
# available: "0.0000000497717339",
# holds: "0"
# },
# {
# frozen: "0",
# hold: "0",
# id: "2149632",
# currency: "ICN",
# balance: "0.00000000925",
# available: "0.00000000925",
# holds: "0"
# }
# ]
#
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(balance, 'balance')
account['used'] = self.safe_string(balance, 'hold')
account['free'] = self.safe_string(balance, 'available')
result[code] = account
return self.safe_balance(result)
def parse_margin_balance(self, response):
#
# [
# {
# "currency:BTC": {
# "available":"0",
# "balance":"0",
# "borrowed":"0",
# "can_withdraw":"0",
# "frozen":"0",
# "hold":"0",
# "holds":"0",
# "lending_fee":"0"
# },
# "currency:USDT": {
# "available":"100",
# "balance":"100",
# "borrowed":"0",
# "can_withdraw":"100",
# "frozen":"0",
# "hold":"0",
# "holds":"0",
# "lending_fee":"0"
# },
# "instrument_id":"BTC-USDT",
# "liquidation_price":"0",
# "product_id":"BTC-USDT",
# "risk_rate":""
# },
# ]
#
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
for i in range(0, len(response)):
balance = response[i]
marketId = self.safe_string(balance, 'instrument_id')
market = self.safe_value(self.markets_by_id, marketId)
symbol = None
if market is None:
baseId, quoteId = marketId.split('-')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
else:
symbol = market['symbol']
omittedBalance = self.omit(balance, [
'instrument_id',
'liquidation_price',
'product_id',
'risk_rate',
'margin_ratio',
'maint_margin_ratio',
'tiers',
])
keys = list(omittedBalance.keys())
accounts = {}
for k in range(0, len(keys)):
key = keys[k]
marketBalance = balance[key]
if key.find(':') >= 0:
parts = key.split(':')
currencyId = parts[1]
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_string(marketBalance, 'balance')
account['used'] = self.safe_string(marketBalance, 'hold')
account['free'] = self.safe_string(marketBalance, 'available')
accounts[code] = account
else:
raise NotSupported(self.id + ' margin balance response format has changed!')
result[symbol] = self.safe_balance(accounts)
return result
def parse_futures_balance(self, response):
#
# {
# "info":{
# "eos":{
# "auto_margin":"0",
# "contracts": [
# {
# "available_qty":"40.37069445",
# "fixed_balance":"0",
# "instrument_id":"EOS-USD-190329",
# "margin_for_unfilled":"0",
# "margin_frozen":"0",
# "realized_pnl":"0",
# "unrealized_pnl":"0"
# },
# {
# "available_qty":"40.37069445",
# "fixed_balance":"14.54895721",
# "instrument_id":"EOS-USD-190628",
# "margin_for_unfilled":"0",
# "margin_frozen":"10.64042157",
# "realized_pnl":"-3.90853564",
# "unrealized_pnl":"-0.259"
# },
# ],
# "equity":"50.75220665",
# "margin_mode":"fixed",
# "total_avail_balance":"40.37069445"
# },
# }
# }
#
# their root field name is "info", so our info will contain their info
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
info = self.safe_value(response, 'info', {})
ids = list(info.keys())
for i in range(0, len(ids)):
id = ids[i]
code = self.safe_currency_code(id)
balance = self.safe_value(info, id, {})
account = self.account()
totalAvailBalance = self.safe_string(balance, 'total_avail_balance')
if self.safe_string(balance, 'margin_mode') == 'fixed':
contracts = self.safe_value(balance, 'contracts', [])
free = totalAvailBalance
for i in range(0, len(contracts)):
contract = contracts[i]
fixedBalance = self.safe_string(contract, 'fixed_balance')
realizedPnl = self.safe_string(contract, 'realized_pnl')
marginFrozen = self.safe_string(contract, 'margin_frozen')
marginForUnfilled = self.safe_string(contract, 'margin_for_unfilled')
margin = Precise.string_sub(Precise.string_sub(Precise.string_add(fixedBalance, realizedPnl), marginFrozen), marginForUnfilled)
free = Precise.string_add(free, margin)
account['free'] = free
else:
realizedPnl = self.safe_string(balance, 'realized_pnl')
unrealizedPnl = self.safe_string(balance, 'unrealized_pnl')
marginFrozen = self.safe_string(balance, 'margin_frozen')
marginForUnfilled = self.safe_string(balance, 'margin_for_unfilled')
positive = Precise.string_add(Precise.string_add(totalAvailBalance, realizedPnl), unrealizedPnl)
account['free'] = Precise.string_sub(Precise.string_sub(positive, marginFrozen), marginForUnfilled)
# it may be incorrect to use total, free and used for swap accounts
account['total'] = self.safe_string(balance, 'equity')
result[code] = account
return self.safe_balance(result)
def parse_swap_balance(self, response):
#
# {
# "info": [
# {
# "equity":"3.0139",
# "fixed_balance":"0.0000",
# "instrument_id":"EOS-USD-SWAP",
# "margin":"0.5523",
# "margin_frozen":"0.0000",
# "margin_mode":"crossed",
# "margin_ratio":"1.0913",
# "realized_pnl":"-0.0006",
# "timestamp":"2019-03-25T03:46:10.336Z",
# "total_avail_balance":"3.0000",
# "unrealized_pnl":"0.0145"
# }
# ]
# }
#
# their root field name is "info", so our info will contain their info
result = {'info': response}
timestamp = None
info = self.safe_value(response, 'info', [])
for i in range(0, len(info)):
balance = info[i]
marketId = self.safe_string(balance, 'instrument_id')
symbol = marketId
if marketId in self.markets_by_id:
symbol = self.markets_by_id[marketId]['symbol']
balanceTimestamp = self.parse8601(self.safe_string(balance, 'timestamp'))
timestamp = balanceTimestamp if (timestamp is None) else max(timestamp, balanceTimestamp)
account = self.account()
# it may be incorrect to use total, free and used for swap accounts
account['total'] = self.safe_string(balance, 'equity')
account['free'] = self.safe_string(balance, 'total_avail_balance')
result[symbol] = account
result['timestamp'] = timestamp
result['datetime'] = self.iso8601(timestamp)
return self.safe_balance(result)
async def fetch_balance(self, params={}):
defaultType = self.safe_string_2(self.options, 'fetchBalance', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
if type is None:
raise ArgumentsRequired(self.id + " fetchBalance() requires a type parameter(one of 'account', 'spot', 'margin', 'futures', 'swap')")
await self.load_markets()
suffix = 'Wallet' if (type == 'account') else 'Accounts'
method = type + 'Get' + suffix
query = self.omit(params, 'type')
response = await getattr(self, method)(query)
#
# account
#
# [
# {
# balance: 0,
# available: 0,
# currency: "BTC",
# hold: 0
# },
# {
# balance: 0,
# available: 0,
# currency: "ETH",
# hold: 0
# }
# ]
#
# spot
#
# [
# {
# frozen: "0",
# hold: "0",
# id: "2149632",
# currency: "BTC",
# balance: "0.0000000497717339",
# available: "0.0000000497717339",
# holds: "0"
# },
# {
# frozen: "0",
# hold: "0",
# id: "2149632",
# currency: "ICN",
# balance: "0.00000000925",
# available: "0.00000000925",
# holds: "0"
# }
# ]
#
# margin
#
# [
# {
# "currency:BTC": {
# "available":"0",
# "balance":"0",
# "borrowed":"0",
# "can_withdraw":"0",
# "frozen":"0",
# "hold":"0",
# "holds":"0",
# "lending_fee":"0"
# },
# "currency:USDT": {
# "available":"100",
# "balance":"100",
# "borrowed":"0",
# "can_withdraw":"100",
# "frozen":"0",
# "hold":"0",
# "holds":"0",
# "lending_fee":"0"
# },
# "instrument_id":"BTC-USDT",
# "liquidation_price":"0",
# "product_id":"BTC-USDT",
# "risk_rate":""
# },
# ]
#
# futures
#
# {
# "info":{
# "eos":{
# "auto_margin":"0",
# "contracts": [
# {
# "available_qty":"40.37069445",
# "fixed_balance":"0",
# "instrument_id":"EOS-USD-190329",
# "margin_for_unfilled":"0",
# "margin_frozen":"0",
# "realized_pnl":"0",
# "unrealized_pnl":"0"
# },
# {
# "available_qty":"40.37069445",
# "fixed_balance":"14.54895721",
# "instrument_id":"EOS-USD-190628",
# "margin_for_unfilled":"0",
# "margin_frozen":"10.64042157",
# "realized_pnl":"-3.90853564",
# "unrealized_pnl":"-0.259"
# },
# ],
# "equity":"50.75220665",
# "margin_mode":"fixed",
# "total_avail_balance":"40.37069445"
# },
# }
# }
#
# swap
#
# {
# "info": [
# {
# "equity":"3.0139",
# "fixed_balance":"0.0000",
# "instrument_id":"EOS-USD-SWAP",
# "margin":"0.5523",
# "margin_frozen":"0.0000",
# "margin_mode":"crossed",
# "margin_ratio":"1.0913",
# "realized_pnl":"-0.0006",
# "timestamp":"2019-03-25T03:46:10.336Z",
# "total_avail_balance":"3.0000",
# "unrealized_pnl":"0.0145"
# }
# ]
# }
#
return self.parse_balance_by_type(type, response)
def parse_balance_by_type(self, type, response):
if (type == 'account') or (type == 'spot'):
return self.parse_account_balance(response)
elif type == 'margin':
return self.parse_margin_balance(response)
elif type == 'futures':
return self.parse_futures_balance(response)
elif type == 'swap':
return self.parse_swap_balance(response)
raise NotSupported(self.id + " fetchBalance does not support the '" + type + "' type(the type must be one of 'account', 'spot', 'margin', 'futures', 'swap')")
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'instrument_id': market['id'],
# 'client_oid': 'abcdef1234567890', # [a-z0-9]{1,32}
# 'order_type': '0', # 0 = Normal limit order, 1 = Post only, 2 = Fill Or Kill, 3 = Immediatel Or Cancel, 4 = Market for futures only
}
clientOrderId = self.safe_string_2(params, 'client_oid', 'clientOrderId')
if clientOrderId is not None:
request['client_oid'] = clientOrderId
params = self.omit(params, ['client_oid', 'clientOrderId'])
method = None
if market['futures'] or market['swap']:
size = self.number_to_string(amount) if market['futures'] else self.amount_to_precision(symbol, amount)
request = self.extend(request, {
'type': type, # 1:open long 2:open short 3:close long 4:close short for futures
'size': size,
# 'match_price': '0', # Order at best counter party price?(0:no 1:yes). The default is 0. If it is set as 1, the price parameter will be ignored. When posting orders at best bid price, order_type can only be 0(regular order).
})
orderType = self.safe_string(params, 'order_type')
# order_type == '4' means a market order
isMarketOrder = (type == 'market') or (orderType == '4')
if isMarketOrder:
request['order_type'] = '4'
else:
request['price'] = self.price_to_precision(symbol, price)
if market['futures']:
request['leverage'] = '10' # or '20'
method = market['type'] + 'PostOrder'
else:
marginTrading = self.safe_string(params, 'margin_trading', '1') # 1 = spot, 2 = margin
request = self.extend(request, {
'side': side,
'type': type, # limit/market
'margin_trading': marginTrading, # 1 = spot, 2 = margin
})
if type == 'limit':
request['price'] = self.price_to_precision(symbol, price)
request['size'] = self.amount_to_precision(symbol, amount)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
notional = self.safe_number(params, 'notional')
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
if createMarketBuyOrderRequiresPrice:
if price is not None:
if notional is None:
notional = amount * price
elif notional is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument or in the 'notional' extra parameter(the exchange-specific behaviour)")
else:
notional = amount if (notional is None) else notional
precision = market['precision']['price']
request['notional'] = self.decimal_to_precision(notional, TRUNCATE, precision, self.precisionMode)
else:
request['size'] = self.amount_to_precision(symbol, amount)
method = 'marginPostOrders' if (marginTrading == '2') else 'spotPostOrders'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "client_oid":"oktspot79",
# "error_code":"",
# "error_message":"",
# "order_id":"2510789768709120",
# "result":true
# }
#
order = self.parse_order(response, market)
return self.extend(order, {
'type': type,
'side': side,
})
async def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = None
if market['futures'] or market['swap']:
type = market['type']
else:
defaultType = self.safe_string_2(self.options, 'cancelOrder', 'defaultType', market['type'])
type = self.safe_string(params, 'type', defaultType)
if type is None:
raise ArgumentsRequired(self.id + " cancelOrder() requires a type parameter(one of 'spot', 'margin', 'futures', 'swap').")
method = type + 'PostCancelOrder'
request = {
'instrument_id': market['id'],
}
if market['futures'] or market['swap']:
method += 'InstrumentId'
else:
method += 's'
clientOrderId = self.safe_string_2(params, 'client_oid', 'clientOrderId')
if clientOrderId is not None:
method += 'ClientOid'
request['client_oid'] = clientOrderId
else:
method += 'OrderId'
request['order_id'] = id
query = self.omit(params, ['type', 'client_oid', 'clientOrderId'])
response = await getattr(self, method)(self.extend(request, query))
result = response if ('result' in response) else self.safe_value(response, market['id'], {})
#
# spot, margin
#
# {
# "btc-usdt": [
# {
# "result":true,
# "client_oid":"a123",
# "order_id": "2510832677225473"
# }
# ]
# }
#
# futures, swap
#
# {
# "result": True,
# "client_oid": "oktfuture10", # missing if requested by order_id
# "order_id": "2517535534836736",
# "instrument_id": "EOS-USD-190628"
# }
#
return self.parse_order(result, market)
def parse_order_status(self, status):
statuses = {
'-2': 'failed',
'-1': 'canceled',
'0': 'open',
'1': 'open',
'2': 'closed',
'3': 'open',
'4': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order_side(self, side):
sides = {
'1': 'buy', # open long
'2': 'sell', # open short
'3': 'sell', # close long
'4': 'buy', # close short
}
return self.safe_string(sides, side, side)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# "client_oid":"oktspot79",
# "error_code":"",
# "error_message":"",
# "order_id":"2510789768709120",
# "result":true
# }
#
# cancelOrder
#
# {
# "result": True,
# "client_oid": "oktfuture10", # missing if requested by order_id
# "order_id": "2517535534836736",
# # instrument_id is missing for spot/margin orders
# # available in futures and swap orders only
# "instrument_id": "EOS-USD-190628",
# }
#
# fetchOrder, fetchOrdersByState, fetchOpenOrders, fetchClosedOrders
#
# # spot and margin orders
#
# {
# "client_oid":"oktspot76",
# "created_at":"2019-03-18T07:26:49.000Z",
# "filled_notional":"3.9734",
# "filled_size":"0.001", # filled_qty in futures and swap orders
# "funds":"", # self is most likely the same as notional
# "instrument_id":"BTC-USDT",
# "notional":"",
# "order_id":"2500723297813504",
# "order_type":"0",
# "price":"4013",
# "product_id":"BTC-USDT", # missing in futures and swap orders
# "side":"buy",
# "size":"0.001",
# "status":"filled",
# "state": "2",
# "timestamp":"2019-03-18T07:26:49.000Z",
# "type":"limit"
# }
#
# # futures and swap orders
#
# {
# "instrument_id":"EOS-USD-190628",
# "size":"10",
# "timestamp":"2019-03-20T10:04:55.000Z",
# "filled_qty":"10", # filled_size in spot and margin orders
# "fee":"-0.00841043",
# "order_id":"2512669605501952",
# "price":"3.668",
# "price_avg":"3.567", # missing in spot and margin orders
# "status":"2",
# "state": "2",
# "type":"4",
# "contract_val":"10",
# "leverage":"10", # missing in swap, spot and margin orders
# "client_oid":"",
# "pnl":"1.09510794", # missing in swap, spo and margin orders
# "order_type":"0"
# }
#
id = self.safe_string(order, 'order_id')
timestamp = self.parse8601(self.safe_string(order, 'timestamp'))
side = self.safe_string(order, 'side')
type = self.safe_string(order, 'type')
if (side != 'buy') and (side != 'sell'):
side = self.parse_order_side(type)
symbol = None
marketId = self.safe_string(order, 'instrument_id')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
else:
symbol = marketId
if market is not None:
if symbol is None:
symbol = market['symbol']
amount = self.safe_string(order, 'size')
filled = self.safe_string_2(order, 'filled_size', 'filled_qty')
remaining = None
if amount is not None:
if filled is not None:
amount = Precise.string_max(amount, filled)
remaining = Precise.string_max('0', Precise.string_sub(amount, filled))
if type == 'market':
remaining = '0'
cost = self.safe_string_2(order, 'filled_notional', 'funds')
price = self.safe_string(order, 'price')
average = self.safe_string(order, 'price_avg')
if cost is None:
if filled is not None and average is not None:
cost = Precise.string_mul(average, filled)
else:
if (average is None) and (filled is not None) and Precise.string_gt(filled, '0'):
average = Precise.string_div(cost, filled)
status = self.parse_order_status(self.safe_string(order, 'state'))
feeCost = self.safe_number(order, 'fee')
fee = None
if feeCost is not None:
feeCurrency = None
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
clientOrderId = self.safe_string(order, 'client_oid')
if (clientOrderId is not None) and (len(clientOrderId) < 1):
clientOrderId = None # fix empty clientOrderId string
stopPrice = self.safe_number(order, 'trigger_price')
return self.safe_order2({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': stopPrice,
'average': average,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': remaining,
'status': status,
'fee': fee,
'trades': None,
}, market)
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
defaultType = self.safe_string_2(self.options, 'fetchOrder', 'defaultType', market['type'])
type = self.safe_string(params, 'type', defaultType)
if type is None:
raise ArgumentsRequired(self.id + " fetchOrder() requires a type parameter(one of 'spot', 'margin', 'futures', 'swap').")
instrumentId = 'InstrumentId' if (market['futures'] or market['swap']) else ''
method = type + 'GetOrders' + instrumentId
request = {
'instrument_id': market['id'],
# 'client_oid': 'abcdef12345', # optional, [a-z0-9]{1,32}
# 'order_id': id,
}
clientOid = self.safe_string(params, 'client_oid')
if clientOid is not None:
method += 'ClientOid'
request['client_oid'] = clientOid
else:
method += 'OrderId'
request['order_id'] = id
query = self.omit(params, 'type')
response = await getattr(self, method)(self.extend(request, query))
#
# spot, margin
#
# {
# "client_oid":"oktspot70",
# "created_at":"2019-03-15T02:52:56.000Z",
# "filled_notional":"3.8886",
# "filled_size":"0.001",
# "funds":"",
# "instrument_id":"BTC-USDT",
# "notional":"",
# "order_id":"2482659399697408",
# "order_type":"0",
# "price":"3927.3",
# "product_id":"BTC-USDT",
# "side":"buy",
# "size":"0.001",
# "status":"filled",
# "state": "2",
# "timestamp":"2019-03-15T02:52:56.000Z",
# "type":"limit"
# }
#
# futures, swap
#
# {
# "instrument_id":"EOS-USD-190628",
# "size":"10",
# "timestamp":"2019-03-20T02:46:38.000Z",
# "filled_qty":"10",
# "fee":"-0.0080819",
# "order_id":"2510946213248000",
# "price":"3.712",
# "price_avg":"3.712",
# "status":"2",
# "state": "2",
# "type":"2",
# "contract_val":"10",
# "leverage":"10",
# "client_oid":"", # missing in swap orders
# "pnl":"0", # missing in swap orders
# "order_type":"0"
# }
#
return self.parse_order(response)
async def fetch_orders_by_state(self, state, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrdersByState() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = None
if market['futures'] or market['swap']:
type = market['type']
else:
defaultType = self.safe_string_2(self.options, 'fetchOrder', 'defaultType', market['type'])
type = self.safe_string(params, 'type', defaultType)
if type is None:
raise ArgumentsRequired(self.id + " fetchOrdersByState() requires a type parameter(one of 'spot', 'margin', 'futures', 'swap').")
request = {
'instrument_id': market['id'],
# '-2': failed,
# '-1': cancelled,
# '0': open ,
# '1': partially filled,
# '2': fully filled,
# '3': submitting,
# '4': cancelling,
# '6': incomplete(open+partially filled),
# '7': complete(cancelled+fully filled),
'state': state,
}
method = type + 'GetOrders'
if market['futures'] or market['swap']:
method += 'InstrumentId'
query = self.omit(params, 'type')
response = await getattr(self, method)(self.extend(request, query))
#
# spot, margin
#
# [
# # in fact, self documented API response does not correspond
# # to their actual API response for spot markets
# # OKEX v3 API returns a plain array of orders(see below)
# [
# {
# "client_oid":"oktspot76",
# "created_at":"2019-03-18T07:26:49.000Z",
# "filled_notional":"3.9734",
# "filled_size":"0.001",
# "funds":"",
# "instrument_id":"BTC-USDT",
# "notional":"",
# "order_id":"2500723297813504",
# "order_type":"0",
# "price":"4013",
# "product_id":"BTC-USDT",
# "side":"buy",
# "size":"0.001",
# "status":"filled",
# "state": "2",
# "timestamp":"2019-03-18T07:26:49.000Z",
# "type":"limit"
# },
# ],
# {
# "before":"2500723297813504",
# "after":"2500650881647616"
# }
# ]
#
# futures, swap
#
# {
# "result":true, # missing in swap orders
# "order_info": [
# {
# "instrument_id":"EOS-USD-190628",
# "size":"10",
# "timestamp":"2019-03-20T10:04:55.000Z",
# "filled_qty":"10",
# "fee":"-0.00841043",
# "order_id":"2512669605501952",
# "price":"3.668",
# "price_avg":"3.567",
# "status":"2",
# "state": "2",
# "type":"4",
# "contract_val":"10",
# "leverage":"10", # missing in swap orders
# "client_oid":"",
# "pnl":"1.09510794", # missing in swap orders
# "order_type":"0"
# },
# ]
# }
#
orders = None
if market['swap'] or market['futures']:
orders = self.safe_value(response, 'order_info', [])
else:
orders = response
responseLength = len(response)
if responseLength < 1:
return []
# in fact, self documented API response does not correspond
# to their actual API response for spot markets
# OKEX v3 API returns a plain array of orders
if responseLength > 1:
before = self.safe_value(response[1], 'before')
if before is not None:
orders = response[0]
return self.parse_orders(orders, market, since, limit)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
# '-2': failed,
# '-1': cancelled,
# '0': open ,
# '1': partially filled,
# '2': fully filled,
# '3': submitting,
# '4': cancelling,
# '6': incomplete(open+partially filled),
# '7': complete(cancelled+fully filled),
return await self.fetch_orders_by_state('6', symbol, since, limit, params)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
# '-2': failed,
# '-1': cancelled,
# '0': open ,
# '1': partially filled,
# '2': fully filled,
# '3': submitting,
# '4': cancelling,
# '6': incomplete(open+partially filled),
# '7': complete(cancelled+fully filled),
return await self.fetch_orders_by_state('7', symbol, since, limit, params)
def parse_deposit_address(self, depositAddress, currency=None):
#
# {
# address: '0x696abb81974a8793352cbd33aadcf78eda3cfdfa',
# currency: 'eth'
# tag: 'abcde12345', # will be missing if the token does not require a deposit tag
# payment_id: 'abcde12345', # will not be returned if the token does not require a payment_id
# # can_deposit: 1, # 0 or 1, documented but missing
# # can_withdraw: 1, # 0 or 1, documented but missing
# }
#
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string_2(depositAddress, 'tag', 'payment_id')
tag = self.safe_string_2(depositAddress, 'memo', 'Memo', tag)
currencyId = self.safe_string(depositAddress, 'currency')
code = self.safe_currency_code(currencyId)
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': depositAddress,
}
async def fetch_deposit_address(self, code, params={}):
await self.load_markets()
parts = code.split('-')
currency = self.currency(parts[0])
request = {
'currency': currency['id'],
}
response = await self.accountGetDepositAddress(self.extend(request, params))
#
# [
# {
# address: '0x696abb81974a8793352cbd33aadcf78eda3cfdfa',
# currency: 'eth'
# }
# ]
#
addressesByCode = self.parse_deposit_addresses(response)
address = self.safe_value(addressesByCode, code)
if address is None:
raise InvalidAddress(self.id + ' fetchDepositAddress cannot return nonexistent addresses, you should create withdrawal addresses with the exchange website first')
return address
async def withdraw(self, code, amount, address, tag=None, params={}):
tag, params = self.handle_withdraw_tag_and_params(tag, params)
self.check_address(address)
await self.load_markets()
currency = self.currency(code)
if tag:
address = address + ':' + tag
fee = self.safe_string(params, 'fee')
if fee is None:
raise ArgumentsRequired(self.id + " withdraw() requires a 'fee' string parameter, network transaction fee must be ≥ 0. Withdrawals to OKCoin or OKEx are fee-free, please set '0'. Withdrawing to external digital asset address requires network transaction fee.")
request = {
'currency': currency['id'],
'to_address': address,
'destination': '4', # 2 = OKCoin International, 3 = OKEx 4 = others
'amount': self.number_to_string(amount),
'fee': fee, # String. Network transaction fee ≥ 0. Withdrawals to OKCoin or OKEx are fee-free, please set as 0. Withdrawal to external digital asset address requires network transaction fee.
}
if 'password' in params:
request['trade_pwd'] = params['password']
elif 'trade_pwd' in params:
request['trade_pwd'] = params['trade_pwd']
elif self.password:
request['trade_pwd'] = self.password
query = self.omit(params, ['fee', 'password', 'trade_pwd'])
if not ('trade_pwd' in request):
raise ExchangeError(self.id + ' withdraw() requires self.password set on the exchange instance or a password / trade_pwd parameter')
response = await self.accountPostWithdrawal(self.extend(request, query))
#
# {
# "amount":"0.1",
# "withdrawal_id":"67485",
# "currency":"btc",
# "result":true
# }
#
return {
'info': response,
'id': self.safe_string(response, 'withdrawal_id'),
}
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
method = 'accountGetDepositHistory'
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
method += 'Currency'
response = await getattr(self, method)(self.extend(request, params))
return self.parse_transactions(response, currency, since, limit, params)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
request = {}
method = 'accountGetWithdrawalHistory'
currency = None
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
method += 'Currency'
response = await getattr(self, method)(self.extend(request, params))
return self.parse_transactions(response, currency, since, limit, params)
def parse_transaction_status(self, status):
#
# deposit statuses
#
# {
# '0': 'waiting for confirmation',
# '1': 'confirmation account',
# '2': 'recharge success'
# }
#
# withdrawal statues
#
# {
# '-3': 'pending cancel',
# '-2': 'cancelled',
# '-1': 'failed',
# '0': 'pending',
# '1': 'sending',
# '2': 'sent',
# '3': 'email confirmation',
# '4': 'manual confirmation',
# '5': 'awaiting identity confirmation'
# }
#
statuses = {
'-3': 'pending',
'-2': 'canceled',
'-1': 'failed',
'0': 'pending',
'1': 'pending',
'2': 'ok',
'3': 'pending',
'4': 'pending',
'5': 'pending',
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# withdraw
#
# {
# "amount":"0.1",
# "withdrawal_id":"67485",
# "currency":"btc",
# "result":true
# }
#
# fetchWithdrawals
#
# {
# amount: "4.72100000",
# withdrawal_id: "1729116",
# fee: "0.01000000eth",
# txid: "0xf653125bbf090bcfe4b5e8e7b8f586a9d87aa7de94598702758c0802b…",
# currency: "ETH",
# from: "7147338839",
# to: "0x26a3CB49578F07000575405a57888681249c35Fd",
# timestamp: "2018-08-17T07:03:42.000Z",
# status: "2"
# }
#
# fetchDeposits
#
# {
# "amount": "4.19511659",
# "txid": "14c9a8c925647cdb7e5b2937ea9aefe2b29b2c273150ad3f44b3b8a4635ed437",
# "currency": "XMR",
# "from": "",
# "to": "48PjH3ksv1fiXniKvKvyH5UtFs5WhfS2Vf7U3TwzdRJtCc7HJWvCQe56dRahyhQyTAViXZ8Nzk4gQg6o4BJBMUoxNy8y8g7",
# "tag": "1234567",
# "deposit_id": 11571659, <-- we can use self
# "timestamp": "2019-10-01T14:54:19.000Z",
# "status": "2"
# }
#
type = None
id = None
address = None
withdrawalId = self.safe_string(transaction, 'withdrawal_id')
addressFrom = self.safe_string(transaction, 'from')
addressTo = self.safe_string(transaction, 'to')
tagTo = self.safe_string(transaction, 'tag')
if withdrawalId is not None:
type = 'withdrawal'
id = withdrawalId
address = addressTo
else:
# the payment_id will appear on new deposits but appears to be removed from the response after 2 months
id = self.safe_string_2(transaction, 'payment_id', 'deposit_id')
type = 'deposit'
address = addressTo
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId)
amount = self.safe_number(transaction, 'amount')
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
txid = self.safe_string(transaction, 'txid')
timestamp = self.parse8601(self.safe_string(transaction, 'timestamp'))
feeCost = None
if type == 'deposit':
feeCost = 0
else:
if currencyId is not None:
feeWithCurrencyId = self.safe_string(transaction, 'fee')
if feeWithCurrencyId is not None:
# https://github.com/ccxt/ccxt/pull/5748
lowercaseCurrencyId = currencyId.lower()
feeWithoutCurrencyId = feeWithCurrencyId.replace(lowercaseCurrencyId, '')
feeCost = float(feeWithoutCurrencyId)
# todo parse tags
return {
'info': transaction,
'id': id,
'currency': code,
'amount': amount,
'addressFrom': addressFrom,
'addressTo': addressTo,
'address': address,
'tagFrom': None,
'tagTo': tagTo,
'tag': tagTo,
'status': status,
'type': type,
'updated': None,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': {
'currency': code,
'cost': feeCost,
},
}
def parse_my_trade(self, pair, market=None):
# check that trading symbols match in both entries
userTrade = self.safe_value(pair, 1)
otherTrade = self.safe_value(pair, 0)
firstMarketId = self.safe_string(otherTrade, 'instrument_id')
secondMarketId = self.safe_string(userTrade, 'instrument_id')
if firstMarketId != secondMarketId:
raise NotSupported(self.id + ' parseMyTrade() received unrecognized response format, differing instrument_ids in one fill, the exchange API might have changed, paste your verbose output: https://github.com/ccxt/ccxt/wiki/FAQ#what-is-required-to-get-help')
marketId = firstMarketId
market = self.safe_market(marketId, market)
symbol = market['symbol']
quoteId = market['quoteId']
side = None
amount = None
cost = None
receivedCurrencyId = self.safe_string(userTrade, 'currency')
feeCurrencyId = None
if receivedCurrencyId == quoteId:
side = self.safe_string(otherTrade, 'side')
amount = self.safe_number(otherTrade, 'size')
cost = self.safe_number(userTrade, 'size')
feeCurrencyId = self.safe_string(otherTrade, 'currency')
else:
side = self.safe_string(userTrade, 'side')
amount = self.safe_number(userTrade, 'size')
cost = self.safe_number(otherTrade, 'size')
feeCurrencyId = self.safe_string(userTrade, 'currency')
id = self.safe_string(userTrade, 'trade_id')
price = self.safe_number(userTrade, 'price')
feeCostFirst = self.safe_number(otherTrade, 'fee')
feeCostSecond = self.safe_number(userTrade, 'fee')
feeCurrencyCodeFirst = self.safe_currency_code(self.safe_string(otherTrade, 'currency'))
feeCurrencyCodeSecond = self.safe_currency_code(self.safe_string(userTrade, 'currency'))
fee = None
fees = None
# fee is either a positive number(invitation rebate)
# or a negative number(transaction fee deduction)
# therefore we need to invert the fee
# more about it https://github.com/ccxt/ccxt/issues/5909
if (feeCostFirst is not None) and (feeCostFirst != 0):
if (feeCostSecond is not None) and (feeCostSecond != 0):
fees = [
{
'cost': -feeCostFirst,
'currency': feeCurrencyCodeFirst,
},
{
'cost': -feeCostSecond,
'currency': feeCurrencyCodeSecond,
},
]
else:
fee = {
'cost': -feeCostFirst,
'currency': feeCurrencyCodeFirst,
}
elif (feeCostSecond is not None) and (feeCostSecond != 0):
fee = {
'cost': -feeCostSecond,
'currency': feeCurrencyCodeSecond,
}
else:
fee = {
'cost': 0,
'currency': self.safe_currency_code(feeCurrencyId),
}
#
# simplified structures to show the underlying semantics
#
# # market/limit sell
#
# {
# "currency":"USDT",
# "fee":"-0.04647925", # ←--- fee in received quote currency
# "price":"129.13", # ←------ price
# "size":"30.98616393", # ←-- cost
# },
# {
# "currency":"ETH",
# "fee":"0",
# "price":"129.13",
# "size":"0.23996099", # ←--- amount
# },
#
# # market/limit buy
#
# {
# "currency":"ETH",
# "fee":"-0.00036049", # ←--- fee in received base currency
# "price":"129.16", # ←------ price
# "size":"0.240322", # ←----- amount
# },
# {
# "currency":"USDT",
# "fee":"0",
# "price":"129.16",
# "size":"31.03998952", # ←-- cost
# }
#
timestamp = self.parse8601(self.safe_string_2(userTrade, 'timestamp', 'created_at'))
takerOrMaker = self.safe_string_2(userTrade, 'exec_type', 'liquidity')
if takerOrMaker == 'M':
takerOrMaker = 'maker'
elif takerOrMaker == 'T':
takerOrMaker = 'taker'
orderId = self.safe_string(userTrade, 'order_id')
result = {
'info': pair,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': orderId,
'type': None,
'takerOrMaker': takerOrMaker,
'side': side,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
if fees is not None:
result['fees'] = fees
return result
def parse_my_trades(self, trades, market=None, since=None, limit=None, params={}):
grouped = self.group_by(trades, 'trade_id')
tradeIds = list(grouped.keys())
result = []
for i in range(0, len(tradeIds)):
tradeId = tradeIds[i]
pair = grouped[tradeId]
# make sure it has exactly 2 trades, no more, no less
numTradesInPair = len(pair)
if numTradesInPair == 2:
trade = self.parse_my_trade(pair)
result.append(trade)
symbol = None
if market is not None:
symbol = market['symbol']
return self.filter_by_symbol_since_limit(result, symbol, since, limit)
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
# okex actually returns ledger entries instead of fills here, so each fill in the order
# is represented by two trades with opposite buy/sell sides, not one :\
# self aspect renders the 'fills' endpoint unusable for fetchOrderTrades
# until either OKEX fixes the API or we workaround self on our side somehow
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
if (limit is not None) and (limit > 100):
limit = 100
request = {
'instrument_id': market['id'],
# 'order_id': id, # string
# 'after': '1', # pagination of data to return records earlier than the requested ledger_id
# 'before': '1', # P=pagination of data to return records newer than the requested ledger_id
# 'limit': limit, # optional, number of results per request, default = maximum = 100
}
defaultType = self.safe_string_2(self.options, 'fetchMyTrades', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
method = type + 'GetFills'
response = await getattr(self, method)(self.extend(request, query))
#
# [
# # sell
# {
# "created_at":"2020-03-29T11:55:25.000Z",
# "currency":"USDT",
# "exec_type":"T",
# "fee":"-0.04647925",
# "instrument_id":"ETH-USDT",
# "ledger_id":"10562924353",
# "liquidity":"T",
# "order_id":"4636470489136128",
# "price":"129.13",
# "product_id":"ETH-USDT",
# "side":"buy",
# "size":"30.98616393",
# "timestamp":"2020-03-29T11:55:25.000Z",
# "trade_id":"18551601"
# },
# {
# "created_at":"2020-03-29T11:55:25.000Z",
# "currency":"ETH",
# "exec_type":"T",
# "fee":"0",
# "instrument_id":"ETH-USDT",
# "ledger_id":"10562924352",
# "liquidity":"T",
# "order_id":"4636470489136128",
# "price":"129.13",
# "product_id":"ETH-USDT",
# "side":"sell",
# "size":"0.23996099",
# "timestamp":"2020-03-29T11:55:25.000Z",
# "trade_id":"18551601"
# },
# # buy
# {
# "created_at":"2020-03-29T11:55:16.000Z",
# "currency":"ETH",
# "exec_type":"T",
# "fee":"-0.00036049",
# "instrument_id":"ETH-USDT",
# "ledger_id":"10562922669",
# "liquidity":"T",
# "order_id": "4636469894136832",
# "price":"129.16",
# "product_id":"ETH-USDT",
# "side":"buy",
# "size":"0.240322",
# "timestamp":"2020-03-29T11:55:16.000Z",
# "trade_id":"18551600"
# },
# {
# "created_at":"2020-03-29T11:55:16.000Z",
# "currency":"USDT",
# "exec_type":"T",
# "fee":"0",
# "instrument_id":"ETH-USDT",
# "ledger_id":"10562922668",
# "liquidity":"T",
# "order_id":"4636469894136832",
# "price":"129.16",
# "product_id":"ETH-USDT",
# "side":"sell",
# "size":"31.03998952",
# "timestamp":"2020-03-29T11:55:16.000Z",
# "trade_id":"18551600"
# }
# ]
#
return self.parse_my_trades(response, market, since, limit, params)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
request = {
# 'instrument_id': market['id'],
'order_id': id,
# 'after': '1', # return the page after the specified page number
# 'before': '1', # return the page before the specified page number
# 'limit': limit, # optional, number of results per request, default = maximum = 100
}
return await self.fetch_my_trades(symbol, since, limit, self.extend(request, params))
async def fetch_position(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
method = None
request = {
'instrument_id': market['id'],
# 'order_id': id, # string
# 'after': '1', # pagination of data to return records earlier than the requested ledger_id
# 'before': '1', # P=pagination of data to return records newer than the requested ledger_id
# 'limit': limit, # optional, number of results per request, default = maximum = 100
}
type = market['type']
if (type == 'futures') or (type == 'swap'):
method = type + 'GetInstrumentIdPosition'
elif type == 'option':
underlying = self.safe_string(params, 'underlying')
if underlying is None:
raise ArgumentsRequired(self.id + ' fetchPosition() requires an underlying parameter for ' + type + ' market ' + symbol)
method = type + 'GetUnderlyingPosition'
else:
raise NotSupported(self.id + ' fetchPosition() does not support ' + type + ' market ' + symbol + ', supported market types are futures, swap or option')
response = await getattr(self, method)(self.extend(request, params))
#
# futures
#
# crossed margin mode
#
# {
# "result": True,
# "holding": [
# {
# "long_qty": "2",
# "long_avail_qty": "2",
# "long_avg_cost": "8260",
# "long_settlement_price": "8260",
# "realised_pnl": "0.00020928",
# "short_qty": "2",
# "short_avail_qty": "2",
# "short_avg_cost": "8259.99",
# "short_settlement_price": "8259.99",
# "liquidation_price": "113.81",
# "instrument_id": "BTC-USD-191227",
# "leverage": "10",
# "created_at": "2019-09-25T07:58:42.129Z",
# "updated_at": "2019-10-08T14:02:51.029Z",
# "margin_mode": "crossed",
# "short_margin": "0.00242197",
# "short_pnl": "6.63E-6",
# "short_pnl_ratio": "0.002477997",
# "short_unrealised_pnl": "6.63E-6",
# "long_margin": "0.00242197",
# "long_pnl": "-6.65E-6",
# "long_pnl_ratio": "-0.002478",
# "long_unrealised_pnl": "-6.65E-6",
# "long_settled_pnl": "0",
# "short_settled_pnl": "0",
# "last": "8257.57"
# }
# ],
# "margin_mode": "crossed"
# }
#
# fixed margin mode
#
# {
# "result": True,
# "holding": [
# {
# "long_qty": "4",
# "long_avail_qty": "4",
# "long_margin": "0.00323844",
# "long_liqui_price": "7762.09",
# "long_pnl_ratio": "0.06052306",
# "long_avg_cost": "8234.43",
# "long_settlement_price": "8234.43",
# "realised_pnl": "-0.00000296",
# "short_qty": "2",
# "short_avail_qty": "2",
# "short_margin": "0.00241105",
# "short_liqui_price": "9166.74",
# "short_pnl_ratio": "0.03318052",
# "short_avg_cost": "8295.13",
# "short_settlement_price": "8295.13",
# "instrument_id": "BTC-USD-191227",
# "long_leverage": "15",
# "short_leverage": "10",
# "created_at": "2019-09-25T07:58:42.129Z",
# "updated_at": "2019-10-08T13:12:09.438Z",
# "margin_mode": "fixed",
# "short_margin_ratio": "0.10292507",
# "short_maint_margin_ratio": "0.005",
# "short_pnl": "7.853E-5",
# "short_unrealised_pnl": "7.853E-5",
# "long_margin_ratio": "0.07103743",
# "long_maint_margin_ratio": "0.005",
# "long_pnl": "1.9841E-4",
# "long_unrealised_pnl": "1.9841E-4",
# "long_settled_pnl": "0",
# "short_settled_pnl": "0",
# "last": "8266.99"
# }
# ],
# "margin_mode": "fixed"
# }
#
# swap
#
# crossed margin mode
#
# {
# "margin_mode": "crossed",
# "timestamp": "2019-09-27T03:49:02.018Z",
# "holding": [
# {
# "avail_position": "3",
# "avg_cost": "59.49",
# "instrument_id": "LTC-USD-SWAP",
# "last": "55.98",
# "leverage": "10.00",
# "liquidation_price": "4.37",
# "maint_margin_ratio": "0.0100",
# "margin": "0.0536",
# "position": "3",
# "realized_pnl": "0.0000",
# "unrealized_pnl": "0",
# "settled_pnl": "-0.0330",
# "settlement_price": "55.84",
# "side": "long",
# "timestamp": "2019-09-27T03:49:02.018Z"
# },
# ]
# }
#
# fixed margin mode
#
# {
# "margin_mode": "fixed",
# "timestamp": "2019-09-27T03:47:37.230Z",
# "holding": [
# {
# "avail_position": "20",
# "avg_cost": "8025.0",
# "instrument_id": "BTC-USD-SWAP",
# "last": "8113.1",
# "leverage": "15.00",
# "liquidation_price": "7002.6",
# "maint_margin_ratio": "0.0050",
# "margin": "0.0454",
# "position": "20",
# "realized_pnl": "-0.0001",
# "unrealized_pnl": "0",
# "settled_pnl": "0.0076",
# "settlement_price": "8279.2",
# "side": "long",
# "timestamp": "2019-09-27T03:47:37.230Z"
# }
# ]
# }
#
# option
#
# {
# "holding":[
# {
# "instrument_id":"BTC-USD-190927-12500-C",
# "position":"20",
# "avg_cost":"3.26",
# "avail_position":"20",
# "settlement_price":"0.017",
# "total_pnl":"50",
# "pnl_ratio":"0.3",
# "realized_pnl":"40",
# "unrealized_pnl":"10",
# "pos_margin":"100",
# "option_value":"70",
# "created_at":"2019-08-30T03:09:20.315Z",
# "updated_at":"2019-08-30T03:40:18.318Z"
# },
# {
# "instrument_id":"BTC-USD-190927-12500-P",
# "position":"20",
# "avg_cost":"3.26",
# "avail_position":"20",
# "settlement_price":"0.019",
# "total_pnl":"50",
# "pnl_ratio":"0.3",
# "realized_pnl":"40",
# "unrealized_pnl":"10",
# "pos_margin":"100",
# "option_value":"70",
# "created_at":"2019-08-30T03:09:20.315Z",
# "updated_at":"2019-08-30T03:40:18.318Z"
# }
# ]
# }
#
# todo unify parsePosition/parsePositions
return response
async def fetch_positions(self, symbols=None, params={}):
await self.load_markets()
method = None
defaultType = self.safe_string_2(self.options, 'fetchPositions', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
if (type == 'futures') or (type == 'swap'):
method = type + 'GetPosition'
elif type == 'option':
underlying = self.safe_string(params, 'underlying')
if underlying is None:
raise ArgumentsRequired(self.id + ' fetchPositions() requires an underlying parameter for ' + type + ' markets')
method = type + 'GetUnderlyingPosition'
else:
raise NotSupported(self.id + ' fetchPositions() does not support ' + type + ' markets, supported market types are futures, swap or option')
params = self.omit(params, 'type')
response = await getattr(self, method)(params)
#
# futures
#
# ...
#
#
# swap
#
# ...
#
# option
#
# {
# "holding":[
# {
# "instrument_id":"BTC-USD-190927-12500-C",
# "position":"20",
# "avg_cost":"3.26",
# "avail_position":"20",
# "settlement_price":"0.017",
# "total_pnl":"50",
# "pnl_ratio":"0.3",
# "realized_pnl":"40",
# "unrealized_pnl":"10",
# "pos_margin":"100",
# "option_value":"70",
# "created_at":"2019-08-30T03:09:20.315Z",
# "updated_at":"2019-08-30T03:40:18.318Z"
# },
# {
# "instrument_id":"BTC-USD-190927-12500-P",
# "position":"20",
# "avg_cost":"3.26",
# "avail_position":"20",
# "settlement_price":"0.019",
# "total_pnl":"50",
# "pnl_ratio":"0.3",
# "realized_pnl":"40",
# "unrealized_pnl":"10",
# "pos_margin":"100",
# "option_value":"70",
# "created_at":"2019-08-30T03:09:20.315Z",
# "updated_at":"2019-08-30T03:40:18.318Z"
# }
# ]
# }
#
# todo unify parsePosition/parsePositions
return response
async def fetch_ledger(self, code=None, since=None, limit=None, params={}):
await self.load_markets()
defaultType = self.safe_string_2(self.options, 'fetchLedger', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
query = self.omit(params, 'type')
suffix = '' if (type == 'account') else 'Accounts'
argument = ''
request = {
# 'from': 'id',
# 'to': 'id',
}
if limit is not None:
request['limit'] = limit
currency = None
if type == 'spot':
if code is None:
raise ArgumentsRequired(self.id + " fetchLedger() requires a currency code argument for '" + type + "' markets")
argument = 'Currency'
currency = self.currency(code)
request['currency'] = currency['id']
elif type == 'futures':
if code is None:
raise ArgumentsRequired(self.id + " fetchLedger() requires an underlying symbol for '" + type + "' markets")
argument = 'Underlying'
market = self.market(code) # we intentionally put a market inside here for the margin and swap ledgers
marketInfo = self.safe_value(market, 'info', {})
settlementCurrencyId = self.safe_string(marketInfo, 'settlement_currency')
settlementCurrencyCode = self.safe_currency_code(settlementCurrencyId)
currency = self.currency(settlementCurrencyCode)
underlyingId = self.safe_string(marketInfo, 'underlying')
request['underlying'] = underlyingId
elif (type == 'margin') or (type == 'swap'):
if code is None:
raise ArgumentsRequired(self.id + " fetchLedger() requires a code argument(a market symbol) for '" + type + "' markets")
argument = 'InstrumentId'
market = self.market(code) # we intentionally put a market inside here for the margin and swap ledgers
currency = self.currency(market['base'])
request['instrument_id'] = market['id']
#
# if type == 'margin':
# #
# # 3. Borrow
# # 4. Repayment
# # 5. Interest
# # 7. Buy
# # 8. Sell
# # 9. From capital account
# # 10. From C2C
# # 11. From Futures
# # 12. From Spot
# # 13. From ETT
# # 14. To capital account
# # 15. To C2C
# # 16. To Spot
# # 17. To Futures
# # 18. To ETT
# # 19. Mandatory Repayment
# # 20. From Piggybank
# # 21. To Piggybank
# # 22. From Perpetual
# # 23. To Perpetual
# # 24. Liquidation Fee
# # 54. Clawback
# # 59. Airdrop Return.
# #
# request['type'] = 'number' # All types will be returned if self filed is left blank
# }
#
elif type == 'account':
if code is not None:
currency = self.currency(code)
request['currency'] = currency['id']
#
# #
# # 1. deposit
# # 2. withdrawal
# # 13. cancel withdrawal
# # 18. into futures account
# # 19. out of futures account
# # 20. into sub account
# # 21. out of sub account
# # 28. claim
# # 29. into ETT account
# # 30. out of ETT account
# # 31. into C2C account
# # 32. out of C2C account
# # 33. into margin account
# # 34. out of margin account
# # 37. into spot account
# # 38. out of spot account
# #
# request['type'] = 'number'
#
else:
raise NotSupported(self.id + " fetchLedger does not support the '" + type + "' type(the type must be one of 'account', 'spot', 'margin', 'futures', 'swap')")
method = type + 'Get' + suffix + argument + 'Ledger'
response = await getattr(self, method)(self.extend(request, query))
#
# transfer funds transfer in/out
# trade funds moved as a result of a trade, spot and margin accounts only
# rebate fee rebate as per fee schedule, spot and margin accounts only
# match open long/open short/close long/close short(futures) or a change in the amount because of trades(swap)
# fee fee, futures only
# settlement settlement/clawback/settle long/settle short
# liquidation force close long/force close short/deliver close long/deliver close short
# funding funding fee, swap only
# margin a change in the amount after adjusting margin, swap only
#
# account
#
# [
# {
# "amount":0.00051843,
# "balance":0.00100941,
# "currency":"BTC",
# "fee":0,
# "ledger_id":8987285,
# "timestamp":"2018-10-12T11:01:14.000Z",
# "typename":"Get from activity"
# }
# ]
#
# spot
#
# [
# {
# "timestamp":"2019-03-18T07:08:25.000Z",
# "ledger_id":"3995334780",
# "created_at":"2019-03-18T07:08:25.000Z",
# "currency":"BTC",
# "amount":"0.0009985",
# "balance":"0.0029955",
# "type":"trade",
# "details":{
# "instrument_id":"BTC-USDT",
# "order_id":"2500650881647616",
# "product_id":"BTC-USDT"
# }
# }
# ]
#
# margin
#
# [
# [
# {
# "created_at":"2019-03-20T03:45:05.000Z",
# "ledger_id":"78918186",
# "timestamp":"2019-03-20T03:45:05.000Z",
# "currency":"EOS",
# "amount":"0", # ?
# "balance":"0.59957711",
# "type":"transfer",
# "details":{
# "instrument_id":"EOS-USDT",
# "order_id":"787057",
# "product_id":"EOS-USDT"
# }
# }
# ],
# {
# "before":"78965766",
# "after":"78918186"
# }
# ]
#
# futures
#
# [
# {
# "ledger_id":"2508090544914461",
# "timestamp":"2019-03-19T14:40:24.000Z",
# "amount":"-0.00529521",
# "balance":"0",
# "currency":"EOS",
# "type":"fee",
# "details":{
# "order_id":"2506982456445952",
# "instrument_id":"EOS-USD-190628"
# }
# }
# ]
#
# swap
#
# [
# {
# "amount":"0.004742",
# "fee":"-0.000551",
# "type":"match",
# "instrument_id":"EOS-USD-SWAP",
# "ledger_id":"197429674941902848",
# "timestamp":"2019-03-25T05:56:31.286Z"
# },
# ]
#
responseLength = len(response)
if responseLength < 1:
return []
isArray = isinstance(response[0], list)
isMargin = (type == 'margin')
entries = response[0] if (isMargin and isArray) else response
if type == 'swap':
ledgerEntries = self.parse_ledger(entries)
return self.filter_by_symbol_since_limit(ledgerEntries, code, since, limit)
return self.parse_ledger(entries, currency, since, limit)
def parse_ledger_entry_type(self, type):
types = {
'transfer': 'transfer', # # funds transfer in/out
'trade': 'trade', # funds moved as a result of a trade, spot and margin accounts only
'rebate': 'rebate', # fee rebate as per fee schedule, spot and margin accounts only
'match': 'trade', # open long/open short/close long/close short(futures) or a change in the amount because of trades(swap)
'fee': 'fee', # fee, futures only
'settlement': 'trade', # settlement/clawback/settle long/settle short
'liquidation': 'trade', # force close long/force close short/deliver close long/deliver close short
'funding': 'fee', # funding fee, swap only
'margin': 'margin', # a change in the amount after adjusting margin, swap only
}
return self.safe_string(types, type, type)
def parse_ledger_entry(self, item, currency=None):
#
#
# account
#
# {
# "amount":0.00051843,
# "balance":0.00100941,
# "currency":"BTC",
# "fee":0,
# "ledger_id":8987285,
# "timestamp":"2018-10-12T11:01:14.000Z",
# "typename":"Get from activity"
# }
#
# spot
#
# {
# "timestamp":"2019-03-18T07:08:25.000Z",
# "ledger_id":"3995334780",
# "created_at":"2019-03-18T07:08:25.000Z",
# "currency":"BTC",
# "amount":"0.0009985",
# "balance":"0.0029955",
# "type":"trade",
# "details":{
# "instrument_id":"BTC-USDT",
# "order_id":"2500650881647616",
# "product_id":"BTC-USDT"
# }
# }
#
# margin
#
# {
# "created_at":"2019-03-20T03:45:05.000Z",
# "ledger_id":"78918186",
# "timestamp":"2019-03-20T03:45:05.000Z",
# "currency":"EOS",
# "amount":"0", # ?
# "balance":"0.59957711",
# "type":"transfer",
# "details":{
# "instrument_id":"EOS-USDT",
# "order_id":"787057",
# "product_id":"EOS-USDT"
# }
# }
#
# futures
#
# {
# "ledger_id":"2508090544914461",
# "timestamp":"2019-03-19T14:40:24.000Z",
# "amount":"-0.00529521",
# "balance":"0",
# "currency":"EOS",
# "type":"fee",
# "details":{
# "order_id":"2506982456445952",
# "instrument_id":"EOS-USD-190628"
# }
# }
#
# swap
#
# {
# "amount":"0.004742",
# "fee":"-0.000551",
# "type":"match",
# "instrument_id":"EOS-USD-SWAP",
# "ledger_id":"197429674941902848",
# "timestamp":"2019-03-25T05:56:31.286Z"
# },
#
id = self.safe_string(item, 'ledger_id')
account = None
details = self.safe_value(item, 'details', {})
referenceId = self.safe_string(details, 'order_id')
referenceAccount = None
type = self.parse_ledger_entry_type(self.safe_string(item, 'type'))
code = self.safe_currency_code(self.safe_string(item, 'currency'), currency)
amount = self.safe_number(item, 'amount')
timestamp = self.parse8601(self.safe_string(item, 'timestamp'))
fee = {
'cost': self.safe_number(item, 'fee'),
'currency': code,
}
before = None
after = self.safe_number(item, 'balance')
status = 'ok'
marketId = self.safe_string(item, 'instrument_id')
symbol = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
return {
'info': item,
'id': id,
'account': account,
'referenceId': referenceId,
'referenceAccount': referenceAccount,
'type': type,
'currency': code,
'symbol': symbol,
'amount': amount,
'before': before, # balance before
'after': after, # balance after
'status': status,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
}
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
isArray = isinstance(params, list)
request = '/api/' + api + '/' + self.version + '/'
request += path if isArray else self.implode_params(path, params)
query = params if isArray else self.omit(params, self.extract_params(path))
url = self.implode_hostname(self.urls['api']['rest']) + request
type = self.get_path_authentication_type(path)
if (type == 'public') or (type == 'information'):
if query:
url += '?' + self.urlencode(query)
elif type == 'private':
self.check_required_credentials()
timestamp = self.iso8601(self.milliseconds())
headers = {
'OK-ACCESS-KEY': self.apiKey,
'OK-ACCESS-PASSPHRASE': self.password,
'OK-ACCESS-TIMESTAMP': timestamp,
# 'OK-FROM': '',
# 'OK-TO': '',
# 'OK-LIMIT': '',
}
auth = timestamp + method + request
if method == 'GET':
if query:
urlencodedQuery = '?' + self.urlencode(query)
url += urlencodedQuery
auth += urlencodedQuery
else:
if isArray or query:
body = self.json(query)
auth += body
headers['Content-Type'] = 'application/json'
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha256, 'base64')
headers['OK-ACCESS-SIGN'] = signature
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def get_path_authentication_type(self, path):
# https://github.com/ccxt/ccxt/issues/6651
# a special case to handle the optionGetUnderlying interefering with
# other endpoints containing self keyword
if path == 'underlying':
return 'public'
auth = self.safe_value(self.options, 'auth', {})
key = self.find_broadly_matched_key(auth, path)
return self.safe_string(auth, key, 'private')
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return # fallback to default error handler
feedback = self.id + ' ' + body
if code == 503:
# {"message":"name resolution failed"}
raise ExchangeNotAvailable(feedback)
#
# {"error_message":"Order does not exist","result":"true","error_code":"35029","order_id":"-1"}
#
message = self.safe_string(response, 'message')
errorCode = self.safe_string_2(response, 'code', 'error_code')
nonEmptyMessage = ((message is not None) and (message != ''))
nonZeroErrorCode = (errorCode is not None) and (errorCode != '0')
if nonEmptyMessage:
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
if nonZeroErrorCode:
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
if nonZeroErrorCode or nonEmptyMessage:
raise ExchangeError(feedback) # unknown message
| 47.45133 | 521 | 0.472685 |
4a1df2c43725772399718246e5945f6772873f84 | 5,562 | py | Python | caserec/evaluation/base_evaluation.py | caserec2018/CaseRecommender | 1b63fe79aa26786c99f35e6b8f0a0dd9e591811b | [
"MIT"
] | 16 | 2018-09-19T07:29:24.000Z | 2022-03-30T07:32:36.000Z | caserec/evaluation/base_evaluation.py | caserec2018/CaseRecommender | 1b63fe79aa26786c99f35e6b8f0a0dd9e591811b | [
"MIT"
] | 1 | 2018-09-10T17:43:56.000Z | 2018-09-10T17:43:56.000Z | caserec/evaluation/base_evaluation.py | caserec2018/CaseRecommender | 1b63fe79aa26786c99f35e6b8f0a0dd9e591811b | [
"MIT"
] | 2 | 2019-07-11T10:13:24.000Z | 2020-03-12T10:09:39.000Z | # coding=utf-8
""""
This class is base for evaluation strategies
Types of evaluation:
- Simple: Evaluation with traditional strategy
- All-but-one Protocol: Considers only one pair (u, i) from the test set to evaluate the ranking
"""
# © 2018. Case Recommender (MIT License)
from collections import defaultdict
from caserec.utils.process_data import ReadFile
__author__ = 'removed for double blind review'
class BaseEvaluation(object):
def __init__(self, sep='\t', metrics=None, all_but_one_eval=False, verbose=True, as_table=False, table_sep='\t'):
"""
Class to be base for evaluation strategies
:param sep: Delimiter for input files
:type sep: str, default '\t'
:param metrics: List of evaluation metrics
:type metrics: list, default None
:param all_but_one_eval: If True, considers only one pair (u, i) from the test set to evaluate the ranking
:type all_but_one_eval: bool, default False
:param verbose: Print the evaluation results
:type verbose: bool, default True
:param as_table: Print the evaluation results as table (only work with verbose=True)
:type as_table: bool, default False
:param table_sep: Delimiter for print results (only work with verbose=True and as_table=True)
:type table_sep: str, default '\t'
"""
self.sep = sep
self.all_but_one_eval = all_but_one_eval
self.metrics = metrics
self.verbose = verbose
self.as_table = as_table
self.table_sep = table_sep
def evaluate(self, predictions, test_set):
"""
Method to be implemented for each strategy using their respective metrics.
Use read() in ReadFile to transform your file in a dict
:param predictions: Dictionary with ranking information
:type predictions: dict
:param test_set: Dictionary with test set information.
:type test_set: dict
"""
raise NotImplemented
def evaluate_with_files(self, prediction_file, test_file):
"""
Method to evaluate predictions using files
:param prediction_file: Predictions file with at least 2 columns for item recommendation
(eg. user item [score (optional)]) and 3 columns for rating prediction (eg. user item rating)
:type prediction_file: str
:param test_file: Test file
:type test_file: str
:return: Dictionary with all evaluation metrics and results
:rtype: dict
"""
predict = ReadFile(prediction_file, sep=self.sep).read()
test_set = ReadFile(test_file, sep=self.sep).read()
return self.evaluate(predict['feedback'], test_set)
def evaluate_recommender(self, predictions, test_set):
"""
Method to evaluate recommender results. This method should be called by item recommender algorithms
:param predictions: List with recommender output. e.g. [[user, item, score], [user, item2, score] ...]
:type predictions: list
:param test_set: Dictionary with test set information.
:type test_set: dict
:return: Dictionary with all evaluation metrics and results
:rtype: dict
"""
predictions_dict = {}
for sample in predictions:
predictions_dict.setdefault(sample[0], {}).update({sample[1]: sample[2]})
return self.evaluate(predictions_dict, test_set)
def evaluate_folds(self, folds_dir, predictions_file_name, test_file_name, k_folds=10):
"""
Evaluate ranking in a set of folds. The name of folds needs to be integer and start with 0. e.g.
Exist a dir '/home/user/folds', in which contains folds 0, 1, ..., 10.
:param folds_dir: Directory of folds
:type folds_dir: str
:param k_folds: Number of folds
:type k_folds: int, default 10
:param predictions_file_name: Name of the ranking file
:type predictions_file_name: str
:param test_file_name: Name of the test file
:type test_file_name: str
:return: Dictionary with all evaluation metrics and results
:rtype: dict
"""
folds_results = defaultdict()
for fold in range(k_folds):
predictions_file = folds_dir + str(fold) + '/' + predictions_file_name
test_file = folds_dir + str(fold) + '/' + test_file_name
for key, value in self.evaluate_with_files(predictions_file, test_file).items():
folds_results[key] = folds_results.get(key, 0) + value
folds_results = {k: round(v / k_folds, 6) for k, v in folds_results.items()}
if self.verbose:
self.print_results(folds_results)
return folds_results
def print_results(self, evaluation_results):
"""
Method to print the results
:param evaluation_results: Dictionary with results. e.g. {metric: value}
:type evaluation_results: dict
"""
if self.as_table:
header = ''
values = ''
for metric in self.metrics:
header += metric.upper() + self.table_sep
values += str(evaluation_results[metric.upper()]) + self.table_sep
print(header)
print(values)
else:
evaluation = 'Eval:: '
for metrics in self.metrics:
evaluation += metrics.upper() + ': ' + str(evaluation_results[metrics.upper()]) + ' '
print(evaluation)
| 32.911243 | 117 | 0.63826 |
4a1df300130e98c361cfe736b4618b24cd2c0a67 | 3,109 | py | Python | sdks/python/appcenter_sdk/models/DateTimeProperty.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | null | null | null | sdks/python/appcenter_sdk/models/DateTimeProperty.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 6 | 2019-10-23T06:38:53.000Z | 2022-01-22T07:57:58.000Z | sdks/python/appcenter_sdk/models/DateTimeProperty.py | Brantone/appcenter-sdks | eeb063ecf79908b6e341fb00196d2cd9dc8f3262 | [
"MIT"
] | 2 | 2019-10-23T06:31:05.000Z | 2021-08-21T17:32:47.000Z | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: [email protected]
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
import pprint
import re # noqa: F401
import six
class DateTimeProperty(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'value': 'string'
}
attribute_map = {
'value': 'value'
}
def __init__(self, value=None): # noqa: E501
"""DateTimeProperty - a model defined in Swagger""" # noqa: E501
self._value = None
self.discriminator = None
self.value = value
@property
def value(self):
"""Gets the value of this DateTimeProperty. # noqa: E501
Date time property value. # noqa: E501
:return: The value of this DateTimeProperty. # noqa: E501
:rtype: string
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this DateTimeProperty.
Date time property value. # noqa: E501
:param value: The value of this DateTimeProperty. # noqa: E501
:type: string
"""
if value is None:
raise ValueError("Invalid value for `value`, must not be `None`") # noqa: E501
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, DateTimeProperty):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 27.758929 | 91 | 0.559022 |
4a1df320e5f7f99fd7cea334cfe987920dd52df4 | 30,855 | py | Python | pkgs/conf-pkg/src/genie/libs/conf/lisp/lisp.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 94 | 2018-04-30T20:29:15.000Z | 2022-03-29T13:40:31.000Z | pkgs/conf-pkg/src/genie/libs/conf/lisp/lisp.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 67 | 2018-12-06T21:08:09.000Z | 2022-03-29T18:00:46.000Z | pkgs/conf-pkg/src/genie/libs/conf/lisp/lisp.py | miott/genielibs | 6464642cdd67aa2367bdbb12561af4bb060e5e62 | [
"Apache-2.0"
] | 49 | 2018-06-29T18:59:03.000Z | 2022-03-10T02:07:59.000Z |
__all__ = (
'Lisp',
)
# Python
from enum import Enum
# Genie
from genie.utils.cisco_collections import typedset
from genie.decorator import managedattribute
from genie.conf.base import ConfigurableBase
from genie.conf.base.config import CliConfig
from genie.conf.base.base import DeviceFeature, InterfaceFeature, LinkFeature
from genie.libs.conf.base import Routing
from genie.libs.conf.vrf import Vrf, VrfSubAttributes
from genie.conf.base.attributes import DeviceSubAttributes, SubAttributesDict,\
AttributesHelper, KeyedSubAttributes,\
InterfaceSubAttributes
# LISP Hierarchy
# --------------
# Lisp
# +- DeviceAttributes
# +- InterfaceAttributes
# | +- MobilityDynamicEidAttributes
# +- RouterInstanceAttributes
# +- LocatorSetAttributes
# | +- InterfaceAttributes
# | +- InterfacdTypeAttributes
# +- ServiceAttributes
# | +- ItrMrAttributes
# | +- EtrMsAttributes
# | +- ProxyItrAttributes
# +- InstanceAttributes
# | +- DynamicEidAttributes
# | +- DbMappingAttributes
# | +- ServiceAttributes
# | +- DbMappingAttributes
# | +- UsePetrAttributes
# | +- MapCacheAttributes
# +- SiteAttributes
# | +- InstanceIdAttributes
# | +- EidRecordAttributes
# +- ExtranetAttributes
# +- InstanceIdAttributes
# +- EidRecordProviderAttributes
# +- EidRecordSubscriberAttributes
# ==========================================================================
# GLOBAL ENUM TYPES
# ==========================================================================
class ENCAP(Enum):
lisp = 'lisp'
vxlan = 'vxlan'
class ETR_AUTH_KEY_TYPE(Enum):
none = None
sha1 = 'hmac-sha-1-96'
sha2 = 'hmac-sha-256-128'
class Lisp(Routing, DeviceFeature):
# ==========================================================================
# CONF CLASS STRUCTURE
# ==========================================================================
# +- DeviceAttributes
class DeviceAttributes(DeviceSubAttributes):
# +- DeviceAttributes
# +- InterfaceAttributes
class InterfaceAttributes(InterfaceSubAttributes):
# +- DeviceAttributes
# +- InterfaceAttributes
# +- MobilityDynamicEidAttributes
class MobilityDynamicEidAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.if_mobility_dynamic_eid_name = key
super().__init__(parent)
mobility_dynamic_eid_attr = managedattribute(
name='mobility_dynamic_eid_attr',
read_only=True,
doc=MobilityDynamicEidAttributes.__doc__)
@mobility_dynamic_eid_attr.initter
def mobility_dynamic_eid_attr(self):
return SubAttributesDict(self.MobilityDynamicEidAttributes, parent=self)
intf_attr = managedattribute(
name='intf_attr',
read_only=True,
doc=InterfaceAttributes.__doc__)
@intf_attr.initter
def intf_attr(self):
return SubAttributesDict(self.InterfaceAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
class RouterInstanceAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.lisp_router_instance_id = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- LocatorSetAttributes
class LocatorSetAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.locator_set_name = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- LocatorSetAttributes
# +- InterfaceAttributes
class InterfaceAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ls_interface = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- LocatorSetAttributes
# +- InterfaceAttributes
# +- InterfaceTypeAttributes
class InterfaceTypeAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
assert key in ['ipv4', 'ipv6', 'ethernet'],\
"'{key}' is not supported for locator_set_intf_type_attr, only 'ipv4' and 'ipv6' are supported".format(key=key)
self.ls_interface_type = key
super().__init__(parent)
locator_set_intf_type_attr = managedattribute(
name='locator_set_intf_type_attr',
read_only=True,
doc=InterfaceTypeAttributes.__doc__)
@locator_set_intf_type_attr.initter
def locator_set_intf_type_attr(self):
return SubAttributesDict(self.InterfaceTypeAttributes, parent=self)
locator_set_intf_attr = managedattribute(
name='InterfaceAttributes',
read_only=True,
doc=InterfaceAttributes.__doc__)
@locator_set_intf_attr.initter
def locator_set_intf_attr(self):
return SubAttributesDict(self.InterfaceAttributes, parent=self)
locator_set_attr = managedattribute(
name='locator_set_attr',
read_only=True,
doc=LocatorSetAttributes.__doc__)
@locator_set_attr.initter
def locator_set_attr(self):
return SubAttributesDict(self.LocatorSetAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
class ServiceAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
assert key in ['ipv4', 'ipv6', 'ethernet'],\
"'{key}' is not supported for service_attr, only 'ipv4', 'ipv6' and 'ethernet' are supported".format(key=key)
self.service = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
# +- ItrMrAttributes
class ItrMrAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.itr_map_resolver = key
super().__init__(parent)
itr_mr_attr = managedattribute(
name='itr_mr_attr',
read_only=True,
doc=ItrMrAttributes.__doc__)
@itr_mr_attr.initter
def itr_mr_attr(self):
return SubAttributesDict(self.ItrMrAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
# +- EtrMsAttributes
class EtrMsAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.etr_map_server = key
super().__init__(parent)
etr_ms_attr = managedattribute(
name='etr_ms_attr',
read_only=True,
doc=EtrMsAttributes.__doc__)
@etr_ms_attr.initter
def etr_ms_attr(self):
return SubAttributesDict(self.EtrMsAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
# +- ProxyItrAttributes
class ProxyItrAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.proxy_itr = key
super().__init__(parent)
proxy_attr = managedattribute(
name='proxy_attr',
read_only=True,
doc=ProxyItrAttributes.__doc__)
@proxy_attr.initter
def proxy_attr(self):
return SubAttributesDict(self.ProxyItrAttributes, parent=self)
service_attr = managedattribute(
name='service_attr',
read_only=True,
doc=ServiceAttributes.__doc__)
@service_attr.initter
def service_attr(self):
return SubAttributesDict(self.ServiceAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
class InstanceAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.instance_id = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- DynamicEidAttributes
class DynamicEidAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.inst_dyn_eid = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- DynamicEidAttributes
# +- DbMappingAttributes
class DbMappingAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.etr_dyn_eid_id = key
super().__init__(parent)
db_mapping_attr = managedattribute(
name='db_mapping_attr',
read_only=True,
doc=DbMappingAttributes.__doc__)
@db_mapping_attr.initter
def db_mapping_attr(self):
return SubAttributesDict(self.DbMappingAttributes, parent=self)
dynamic_eid_attr = managedattribute(
name='dynamic_eid_attr',
read_only=True,
doc=DynamicEidAttributes.__doc__)
@dynamic_eid_attr.initter
def dynamic_eid_attr(self):
return SubAttributesDict(self.DynamicEidAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
class ServiceAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.inst_service = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- DbMappingAttributes
class DbMappingAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.etr_eid_id = key
super().__init__(parent)
service_db_mapping_attr = managedattribute(
name='service_db_mapping_attr',
read_only=True,
doc=DbMappingAttributes.__doc__)
@service_db_mapping_attr.initter
def service_db_mapping_attr(self):
return SubAttributesDict(self.DbMappingAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- UsePetrAttributes
class UsePetrAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.etr_use_petr = key
super().__init__(parent)
use_petr_attr = managedattribute(
name='use_petr_attr',
read_only=True,
doc=UsePetrAttributes.__doc__)
@use_petr_attr.initter
def use_petr_attr(self):
return SubAttributesDict(self.UsePetrAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- MapCacheAttributes
class MapCacheAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.itr_mc_id = key
super().__init__(parent)
map_cache_attr = managedattribute(
name='map_cache_attr',
read_only=True,
doc=MapCacheAttributes.__doc__)
@map_cache_attr.initter
def map_cache_attr(self):
return SubAttributesDict(self.MapCacheAttributes, parent=self)
inst_service_attr = managedattribute(
name='inst_service_attr',
read_only=True,
doc=ServiceAttributes.__doc__)
@inst_service_attr.initter
def inst_service_attr(self):
return SubAttributesDict(self.ServiceAttributes, parent=self)
instance_id_attr = managedattribute(
name='instance_id_attr',
read_only=True,
doc=InstanceAttributes.__doc__)
@instance_id_attr.initter
def instance_id_attr(self):
return SubAttributesDict(self.InstanceAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- SiteAttributes
class SiteAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ms_site_id = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- SiteAttributes
# +- InstanceIdAttributes
class InstanceIdAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.site_inst_id = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- SiteAttributes
# +- InstanceIdAttributes
# +- EidRecordAttributes
class EidRecordAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ms_eid_id = key
super().__init__(parent)
eid_record_attr = managedattribute(
name='eid_record_attr',
read_only=True,
doc=EidRecordAttributes.__doc__)
@eid_record_attr.initter
def eid_record_attr(self):
return SubAttributesDict(self.EidRecordAttributes, parent=self)
site_inst_id_attr = managedattribute(
name='site_inst_id_attr',
read_only=True,
doc=InstanceIdAttributes.__doc__)
@site_inst_id_attr.initter
def site_inst_id_attr(self):
return SubAttributesDict(self.InstanceIdAttributes, parent=self)
site_attr = managedattribute(
name='site_attr',
read_only=True,
doc=SiteAttributes.__doc__)
@site_attr.initter
def site_attr(self):
return SubAttributesDict(self.SiteAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ExtranetAttributes
class ExtranetAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ms_extranet = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ExtranetAttributes
# +- InstanceIdAttributes
class InstanceIdAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.extranet_inst_id = key
super().__init__(parent)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ExtranetAttributes
# +- InstanceIdAttributes
# +- EidRecordProviderAttributes
class EidRecordProviderAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ms_extranet_provider_eid = key
super().__init__(parent)
eid_record_provider_attr = managedattribute(
name='eid_record_provider_attr',
read_only=True,
doc=EidRecordProviderAttributes.__doc__)
@eid_record_provider_attr.initter
def eid_record_provider_attr(self):
return SubAttributesDict(self.EidRecordProviderAttributes, parent=self)
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ExtranetAttributes
# +- InstanceIdAttributes
# +- EidRecordSubscriberAttributes
class EidRecordSubscriberAttributes(KeyedSubAttributes):
def __init__(self, parent, key):
self.ms_extranet_subscriber_eid = key
super().__init__(parent)
eid_record_subscriber_attr = managedattribute(
name='eid_record_subscriber_attr',
read_only=True,
doc=EidRecordSubscriberAttributes.__doc__)
@eid_record_subscriber_attr.initter
def eid_record_subscriber_attr(self):
return SubAttributesDict(self.EidRecordSubscriberAttributes, parent=self)
extranet_inst_id_attr = managedattribute(
name='extranet_inst_id_attr',
read_only=True,
doc=InstanceIdAttributes.__doc__)
@extranet_inst_id_attr.initter
def extranet_inst_id_attr(self):
return SubAttributesDict(self.InstanceIdAttributes, parent=self)
extranet_attr = managedattribute(
name='extranet_attr',
read_only=True,
doc=ExtranetAttributes.__doc__)
@extranet_attr.initter
def extranet_attr(self):
return SubAttributesDict(self.ExtranetAttributes, parent=self)
router_instance_attr = managedattribute(
name='router_instance_attr',
read_only=True,
doc=RouterInstanceAttributes.__doc__)
@router_instance_attr.initter
def router_instance_attr(self):
return SubAttributesDict(self.RouterInstanceAttributes, parent=self)
device_attr = managedattribute(
name='device_attr',
read_only=True,
doc=DeviceAttributes.__doc__)
@device_attr.initter
def device_attr(self):
return SubAttributesDict(self.DeviceAttributes, parent=self)
# ==========================================================================
# MANAGED ATTRIBUTES
# ==========================================================================
# enabled
enabled = managedattribute(
name='enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ==========================================================================
# +- DeviceAttributes
# +- InterfaceAttributes
# ==========================================================================
# if_mobility_liveness_test_disabled
if_mobility_liveness_test_disabled = managedattribute(
name='if_mobility_liveness_test_disabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- LocatorSetAttributes
# +- InterfaceAttributes
# +- InterfaceTypeAttributes
# ==========================================================================
# ls_priority
ls_priority = managedattribute(
name='ls_priority',
default=None,
type=(None, managedattribute.test_istype(int)))
# ls_weight
ls_weight = managedattribute(
name='ls_weight',
default=None,
type=(None, managedattribute.test_istype(int)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
# ==========================================================================
# itr_enabled
itr_enabled = managedattribute(
name='itr_enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# etr_enabled
etr_enabled = managedattribute(
name='etr_enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ms_enabled
ms_enabled = managedattribute(
name='ms_enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# mr_enabled
mr_enabled = managedattribute(
name='mr_enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# proxy_etr_enabled
proxy_etr_enabled = managedattribute(
name='proxy_etr_enabled',
default=None,
type=(None, managedattribute.test_istype(bool)))
# locator_vrf
locator_vrf = managedattribute(
name='locator_vrf',
default=None,
type=(None, managedattribute.test_istype(str)))
# encapsulation
encapsulation = managedattribute(
name='encapsulation',
default=ENCAP.lisp,
type=(None, ENCAP))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- ServiceAttributes
# +- EtrMsAttributes
# ==========================================================================
# etr_auth_key
etr_auth_key = managedattribute(
name='etr_auth_key',
default=None,
type=(None, managedattribute.test_istype(str)))
# etr_auth_key_type
etr_auth_key_type = managedattribute(
name='etr_auth_key_type',
default=ETR_AUTH_KEY_TYPE.none,
type=(None, ETR_AUTH_KEY_TYPE))
# etr_proxy_reply
etr_proxy_reply = managedattribute(
name='etr_proxy_reply',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- DynamicEidAttributes
# +- DbMappingAttributes
# ==========================================================================
# etr_dyn_eid_rlocs
etr_dyn_eid_rlocs = managedattribute(
name='etr_dyn_eid_rlocs',
default=None,
type=(None, managedattribute.test_istype(str)))
# etr_dyn_eid_loopback_address
etr_dyn_eid_loopback_address = managedattribute(
name='etr_dyn_eid_loopback_address',
default=None,
type=(None, managedattribute.test_istype(str)))
# etr_dyn_eid_priority
etr_dyn_eid_priority = managedattribute(
name='etr_dyn_eid_priority',
default=None,
type=(None, managedattribute.test_istype(int)))
# etr_dyn_eid_weight
etr_dyn_eid_weight = managedattribute(
name='etr_dyn_eid_weight',
default=None,
type=(None, managedattribute.test_istype(int)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# ==========================================================================
# etr_eid_vrf
etr_eid_vrf = managedattribute(
name='etr_eid_vrf',
default=None,
type=(None, managedattribute.test_istype(str)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- DbMappingAttributes
# ==========================================================================
# etr_eid_rlocs
etr_eid_rlocs = managedattribute(
name='etr_eid_rlocs',
default=None,
type=(None, managedattribute.test_istype(str)))
# etr_eid_loopback_address
etr_eid_loopback_address = managedattribute(
name='etr_eid_loopback_address',
default=None,
type=(None, managedattribute.test_istype(str)))
# etr_eid_priority
etr_eid_priority = managedattribute(
name='etr_eid_priority',
default=None,
type=(None, managedattribute.test_istype(int)))
# etr_eid_weight
etr_eid_weight = managedattribute(
name='etr_eid_weight',
default=None,
type=(None, managedattribute.test_istype(int)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- UsePetrAttributes
# ==========================================================================
# etr_use_petr_priority
etr_use_petr_priority = managedattribute(
name='etr_use_petr_priority',
default=None,
type=(None, managedattribute.test_istype(int)))
# etr_use_petr_weight
etr_use_petr_weight = managedattribute(
name='etr_use_petr_weight',
default=None,
type=(None, managedattribute.test_istype(int)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- InstanceAttributes
# +- ServiceAttributes
# +- MapCacheAttributes
# ==========================================================================
# itr_mc_map_request
itr_mc_map_request = managedattribute(
name='itr_mc_map_request',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- SiteAttributes
# ==========================================================================
# ms_site_auth_key
ms_site_auth_key = managedattribute(
name='ms_site_auth_key',
default=None,
type=(None, managedattribute.test_istype(str)))
# ==========================================================================
# +- DeviceAttributes
# +- RouterInstanceAttributes
# +- SiteAttributes
# +- InstanceIdAttributes
# +- EidRecordAttributes
# ==========================================================================
# ms_eid_accept_more_specifics
ms_eid_accept_more_specifics = managedattribute(
name='ms_eid_accept_more_specifics',
default=None,
type=(None, managedattribute.test_istype(bool)))
# ==========================================================================
# BUILD_CONFIG & BUILD_UNCONFIG
# ==========================================================================
def build_config(self, devices=None, apply=True, attributes=None,
**kwargs):
cfgs = {}
assert not kwargs, kwargs
attributes = AttributesHelper(self, attributes)
if devices is None:
devices = self.devices
devices = set(devices)
for key, sub, attributes2 in attributes.mapping_items(
'device_attr',
keys=devices, sort=True):
cfgs[key] = sub.build_config(apply=False, attributes=attributes2)
if apply:
self.testbed.config_on_devices(cfgs, fail_invalid=True)
else:
return cfgs
def build_unconfig(self, devices=None, apply=True, attributes=None,
**kwargs):
cfgs = {}
attributes = AttributesHelper(self, attributes)
if devices is None:
devices = self.devices
devices = set(devices)
for key, sub, attributes2 in attributes.mapping_items(
'device_attr',
keys=devices, sort=True):
cfgs[key] = sub.build_unconfig(apply=False, attributes=attributes2)
if apply:
self.testbed.config_on_devices(cfgs, fail_invalid=True)
else:
return cfgs
| 36.952096 | 143 | 0.506012 |
4a1df322b84731b89aeae752a25a60dac76d431b | 7,731 | py | Python | demo/DecodeVideoDemo.py | ZoroWang/python-barcode | a5fb175661dda768c5102ddbb236f0f831d708d3 | [
"OLDAP-2.5"
] | 13 | 2020-05-18T13:13:17.000Z | 2021-09-30T12:33:26.000Z | demo/DecodeVideoDemo.py | ZoroWang/python-barcode | a5fb175661dda768c5102ddbb236f0f831d708d3 | [
"OLDAP-2.5"
] | 3 | 2020-08-17T02:16:14.000Z | 2021-10-08T00:59:27.000Z | demo/DecodeVideoDemo.py | ZoroWang/python-barcode | a5fb175661dda768c5102ddbb236f0f831d708d3 | [
"OLDAP-2.5"
] | 10 | 2020-05-11T08:03:58.000Z | 2021-07-17T21:18:29.000Z | import os
import sys
import time
from dbr import *
import cv2
reader = BarcodeReader()
results = None
Template_Settings = {
'1': '{"ImageParameter":{'
'"Name":"BestCoverage",'
'"DeblurLevel":9,'
'"ExpectedBarcodesCount":512,'
'"ScaleDownThreshold":100000,'
'"LocalizationModes":['
'{"Mode":"LM_CONNECTED_BLOCKS"},'
'{"Mode":"LM_SCAN_DIRECTLY"},'
'{"Mode":"LM_STATISTICS"},'
'{"Mode":"LM_LINES"},'
'{"Mode":"LM_STATISTICS_MARKS"}],'
'"GrayscaleTransformationModes":['
'{"Mode":"GTM_ORIGINAL"},'
'{"Mode":"GTM_INVERTED"}]'
'}'
'}',
'2': '{"ImageParameter":{'
'"Name":"BestSpeed",'
'"DeblurLevel":3,'
'"ExpectedBarcodesCount":512,'
'"LocalizationModes":['
'{"Mode":"LM_SCAN_DIRECTLY"}],'
'"TextFilterModes":['
'{"MinImageDimension":262144,"Mode":"TFM_GENERAL_CONTOUR"}]'
'}'
'}',
'3': '{"ImageParameter":{'
'"Name":"Balance",'
'"DeblurLevel":5,'
'"ExpectedBarcodesCount":512,'
'"LocalizationModes":['
'{"Mode":"LM_CONNECTED_BLOCKS"},'
'{"Mode":"LM_STATISTICS"}]'
'}'
'}'
}
class SubTextResultResultCallBack(TextResultResultCallBack):
@staticmethod
def text_results_callback_func(frame_id, t_results, user_data):
print(frame_id)
global results
results = t_results
def init_runtime_settings():
while True:
print()
print("Step 1: Choose a template settings : ")
print("\t 1: Best Coverage Settings")
print("\t 2: Best Speed Settings")
print("\t 3: Balance Settings")
item = input()
if str(item) == 'q' or str(item) == 'Q':
print('Bye, looking forward to your next use.')
exit()
if str(item) not in Template_Settings.keys():
print('Please choose a valid number.')
continue
else:
reader.init_runtime_settings_with_string(Template_Settings[item])
break
def get_video_path():
while True:
print()
print("Step 2: Choose the way to get video : ")
print("\t 1: Camera")
print("\t 2: Video file")
print()
item = input()
if str(item) is 'q' or str(item) is 'Q':
print('Bye, looking forward to your next use.')
exit()
elif str(item) is not '1' and str(item) is not '2':
print('Please choose a valid number.')
continue
else:
if str(item) is '1':
video_file = 0
return video_file
else:
while True:
print()
print("Step 3: Input the path to the video file : ")
print()
video_file = input()
if video_file == 'q' or video_file == 'Q':
print('Bye, looking forward to your next use.')
exit()
if not os.path.exists(video_file):
print("The video file doesn't exist , please input a valid path.")
continue
else:
return video_file
def read_barcode():
global results
video_width = 0
video_height = 0
video_file = get_video_path()
vc = cv2.VideoCapture(video_file)
video_width = vc.get(cv2.CAP_PROP_FRAME_WIDTH)
video_height = vc.get(cv2.CAP_PROP_FRAME_HEIGHT)
vc.set(3, video_width) #set width
vc.set(4, video_height) #set height
stride = 0
if vc.isOpened():
rval, frame = vc.read()
stride = frame.strides[0]
else:
return
windowName = "Barcode Reader"
parameters = reader.init_frame_decoding_parameters()
# you can modify these following parameters.
parameters.max_queue_length = 30
parameters.max_result_queue_length = 30
parameters.width = video_width
parameters.height = video_height
parameters.stride = stride
parameters.image_pixel_format = EnumImagePixelFormat.IPF_RGB_888
parameters.region_top = 0
parameters.region_bottom = 100
parameters.region_left = 0
parameters.region_right = 100
parameters.region_measured_by_percentage = 1
parameters.threshold = 0.01
parameters.fps = 0
parameters.auto_filter = 1
reader.start_video_mode(parameters, SubTextResultResultCallBack.text_results_callback_func)
while True:
if results != None:
thickness = 2
color = (0,255,0)
for result in results:
text_result = TextResult(result)
print("Barcode Format : ")
print(text_result.barcode_format_string)
print("Barcode Text : ")
print(text_result.barcode_text)
print("Localization Points : ")
print(text_result.localization_result.localization_points)
print("Exception : ")
print(text_result.exception)
print("-------------")
points = text_result.localization_result.localization_points
cv2.line(frame, points[0], points[1], color, thickness)
cv2.line(frame, points[1], points[2], color, thickness)
cv2.line(frame, points[2], points[3], color, thickness)
cv2.line(frame, points[3], points[0], color, thickness)
cv2.putText(frame, text_result.barcode_text, points[0], cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,255))
results = None
cv2.imshow(windowName, frame)
rval, frame = vc.read()
if rval == False:
break
try:
ret = reader.append_video_frame(frame)
except:
pass
# 'ESC' for quit
key = cv2.waitKey(1)
if key == 27:
break
reader.stop_video_mode()
cv2.destroyWindow(windowName)
if __name__ == '__main__':
# you can change the following variables' value to your own value.
license_key = "Input your own license"
# Apply for a trial license: https://www.dynamsoft.com/customer/license/trialLicense?product=dbr&utm_source=github
reader.init_license(license_key)
## The code snippet below shows how to use the full license in DBR 8.x:
# connection_paras = BarcodeReader.init_lts_connection_parameters()
## If DBR service is already built on your server, you can fill in the address of your server, or leave this property's default value.
# connection_paras.main_server_url = "Input your own server url"
# connection_paras.handshake_code = "Input your own handshake"
# connection_paras.deployment_type = EnumDMDeploymentType.DM_DT_DESKTOP
# connection_paras.uuid_generation_method = EnumDMUUIDGenerationMethod.DM_UUIDGM_RANDOM
# try:
# error = BarcodeReader.init_license_from_lts(connection_paras)
# if error[0] != EnumErrorCode.DBR_OK:
# print(error[1])
# except BarcodeReaderError as bre:
# print(bre)
print("*************************************************")
print("Welcome to Dynamsoft Barcode Reader DecodeVideoDemo")
print("*************************************************")
print("Hints: Please input 'Q'or 'q' to quit the application.")
print()
init_runtime_settings()
read_barcode()
print("-------------------over------------------------") | 33.323276 | 138 | 0.559436 |
4a1df39d7b14f0de11034afafc11eff2d62bc480 | 3,423 | py | Python | build_tools/gen_hotfix_res.py | xiuzhifu/KSFramework | 87a5f4bd597971517d04b9e60caeb00baed57d21 | [
"Apache-2.0"
] | null | null | null | build_tools/gen_hotfix_res.py | xiuzhifu/KSFramework | 87a5f4bd597971517d04b9e60caeb00baed57d21 | [
"Apache-2.0"
] | null | null | null | build_tools/gen_hotfix_res.py | xiuzhifu/KSFramework | 87a5f4bd597971517d04b9e60caeb00baed57d21 | [
"Apache-2.0"
] | null | null | null | """
Author: zhaoqingqing([email protected])
Date: 2021/3/3 20:01
Desc:发布热更新资源到cdn
压缩lua和setting目录为zip文件
拷贝ab文件到cdn目录下
在python3.7.4+win10下测试通过
"""
# coding=utf-8
import os
import shutil
from shutil import copytree, ignore_patterns
import sys
import zipfile
import gen_filelist
filelist_name = 'filelist.txt'
version_name = 'version.txt'
def isIgnore(name):
if name.endswith(".py"):
return True
if name.endswith(".bat"):
return True
if name.endswith(".py"):
return True
if name.endswith(".meta"):
return True
bname = os.path.basename(name)
if bname.find(".") < 0:
return True
if bname.startswith("."):
return True
return False
def zip_dir(dirname, zipfilename, backup):
if os.path.exists(zipfilename):
if backup:
shutil.copyfile(zipfilename, zipfilename + '.bak')
os.remove(zipfilename)
filelist = []
if os.path.isfile(dirname):
if not isIgnore(dirname):
filelist.append(dirname)
else:
for root, dirs, files in os.walk(dirname):
for dir in dirs:
filelist.append(os.path.join(root, dir))
for name in files:
if not isIgnore(name):
filelist.append(os.path.join(root, name))
zf = zipfile.ZipFile(zipfilename, "w", zipfile.zlib.DEFLATED)
for tar in filelist:
arcname = tar[len(dirname):]
# 参数二:zip文件夹内的名字,可以保留到去掉根目录的层级
zf.write(tar, arcname)
zf.close()
def genVersion(fname, name_list):
f = open(fname, "w")
for name in name_list:
if os.path.exists(name):
bname = os.path.basename(name)
version = gen_filelist.GetFileMd5(name)
size = str(os.path.getsize(name))
line = "{0},{1},{2}{3}".format(bname, version, size, "\n")
f.write(line)
else:
print("genVersion路径不存在", name)
f.close()
print("version更新完成")
if __name__ == "__main__":
try:
# start_path = sys.argv[0]
start_path = r'E:\Code\KSFramework\build_tools\\'
platform = "Windows"
if (len(sys.argv) >= 3):
start_path = sys.argv[1]
platform = sys.argv[2]
dir = os.path.abspath(os.path.dirname(start_path))
dst_root = dir + "\cdn\\"
src_path = dir + r'\..\KSFramework\Product\\'
if not os.path.exists(dst_root):
os.makedirs(dst_root)
print("not exist path,create", dst_root)
# print(dst_root,src_path)
# 压缩为zip,经测试对同一目录多次zip,md5相同(文件未改变的情况)
zip_dir(src_path + 'Lua', dst_root + 'lua.zip', True)
zip_dir(src_path + 'Setting', dst_root + 'setting.zip', True)
print("生成zip文件完成")
src_ab = src_path + 'Bundles\\' + platform
dst_ab = dst_root + 'Bundles\\' + platform
if os.path.exists(dst_ab):
shutil.rmtree(dst_ab)
print("exist path,delete", dst_ab)
shutil.copytree(src_ab, dst_ab, ignore=ignore_patterns('*.meta', '*.py', '*.bat'))
print("同步ab文件{0}->{1} 完成".format(src_ab, dst_ab))
ver_list = [dst_root + "lua.zip", dst_root + 'setting.zip', dst_ab + '\\filelist.txt']
genVersion(dst_root + platform + "-" + version_name, ver_list)
except Exception as ex:
print
'Exception:\r\n'
print
ex
os.system("pause")
| 29.508621 | 94 | 0.584575 |
4a1df4f040b4670f858d2d577c1a2b66e7866764 | 2,839 | py | Python | bin/make-book.py | tsibley/software-carpentry-2015-01-15-uw | 1bd3eab80b5058af2111d2f922bd4ea6038117e1 | [
"CC-BY-3.0"
] | null | null | null | bin/make-book.py | tsibley/software-carpentry-2015-01-15-uw | 1bd3eab80b5058af2111d2f922bd4ea6038117e1 | [
"CC-BY-3.0"
] | null | null | null | bin/make-book.py | tsibley/software-carpentry-2015-01-15-uw | 1bd3eab80b5058af2111d2f922bd4ea6038117e1 | [
"CC-BY-3.0"
] | null | null | null | from __future__ import print_function
import sys
import os.path
import re
# Header required to make this a Jekyll file.
HEADER = '''---
layout: book
title: "Software Carpentry Volume 1: Basics"
root: .
---'''
def main():
print(HEADER)
for filename in sys.argv[1:]:
with open(filename, 'r') as reader:
lines = reader.readlines()
title = None
if lines[0].startswith('---'):
lines, skipped = skip(filename, lines, '---', '---')
title = extract_title(filename, skipped)
lines, _ = skip(filename, lines, '<div class="toc"', '</div>')
lines = fix_image_paths(filename, lines)
lines = fix_gloss(filename, lines)
if title:
print(format_title(filename, title))
for line in lines:
print(line.rstrip())
print()
def skip(filename, lines, open, close):
'''Skip a block of lines starting with open and ending with close.'''
i_open = None
i_close = None
for (i, ln) in enumerate(lines):
if (i_open is None) and ln.startswith(open):
i_open = i
elif (i_open is not None) and ln.startswith(close):
i_close = i
return lines[:i_open] + lines[i_close+1:], lines[i_open:i_close]
else:
return lines, None
def fix_image_paths(filename, lines):
'''Modify image paths to include directory.'''
front, _ = os.path.split(filename)
front = front.replace('cached/', '')
# Regex for Markdown
md_regex = r'(!\[.*\]\()(.*\))'
md_regex_replace = r'\1{0}/\2'.format(front)
# "Regex" for img HTML tag
src = '<img src="'
dst = '<img src="{0}/'.format(front)
for (i, ln) in enumerate(lines):
# If using Markdown extension
ln = re.sub(md_regex, md_regex_replace, ln)
# If using img HTML tag
lines[i] = ln.replace(src, dst)
return lines
def fix_gloss(filename, lines):
'''Fix up glossary entries.'''
is_glossary = 'gloss.md' in filename
for (i, ln) in enumerate(lines):
lines[i] = ln.replace('href="../../gloss.html#', 'href="#g:')
if is_glossary:
lines[i] = ln.replace('](#', '](#g:').replace('<a name="', '<a name="g:')
return lines
def extract_title(filename, lines):
'''Extract title from YAML header.'''
for ln in lines:
if ln.startswith('title:'):
return ln.split(':', 1)[1].strip()
return None
def format_title(filename, title):
title = '## {0}\n'.format(title)
f = os.path.split(filename)[-1]
if f in ('index.md', 'intro.md'):
return '\n'.join(['<div class="chapter" markdown="1">', title, '</div>'])
else:
return title
if __name__ == '__main__':
main()
| 29.884211 | 86 | 0.557943 |
4a1df7c8c4bb4c2ddaaf5d9d39b33e63504dbf91 | 2,172 | py | Python | bingo/bingo-elastic/python/tests/model/test_helpers.py | f1nzer/Indigo | 59efbd0be0b42f449f706c3a3c8d094e483e5ef4 | [
"Apache-2.0"
] | null | null | null | bingo/bingo-elastic/python/tests/model/test_helpers.py | f1nzer/Indigo | 59efbd0be0b42f449f706c3a3c8d094e483e5ef4 | [
"Apache-2.0"
] | null | null | null | bingo/bingo-elastic/python/tests/model/test_helpers.py | f1nzer/Indigo | 59efbd0be0b42f449f706c3a3c8d094e483e5ef4 | [
"Apache-2.0"
] | null | null | null | from pathlib import Path
import bingo_elastic.model.helpers as helpers
from bingo_elastic.model.record import (
IndigoRecordMolecule,
IndigoRecordReaction,
)
def test_iterate_sdf(resource_loader):
results = []
for step in range(0, 2):
if 0 == step:
sdf = helpers.iterate_sdf(
resource_loader("molecules/rand_queries_small.sdf")
)
else:
sdf = helpers.iterate_file(
Path(resource_loader("molecules/rand_queries_small.sdf"))
)
i = 0
for i, _ in enumerate(sdf, start=1):
pass
results.append(i)
assert results[0] == results[1]
def test_iterate_smiles(resource_loader):
results = []
for step in range(0, 2):
if 0 == step:
smiles = helpers.iterate_smiles(
resource_loader("molecules/pubchem_slice_50.smi")
)
else:
smiles = helpers.iterate_file(
Path(resource_loader("molecules/pubchem_slice_50.smi"))
)
i = 0
for i, _ in enumerate(smiles, start=1):
pass
results.append(i)
assert results[0] == results[1]
def test_iterate_cml(resource_loader):
results = []
for step in range(0, 2):
if 0 == step:
cml = helpers.iterate_cml(
resource_loader("molecules/tetrahedral-all.cml")
)
else:
cml = helpers.iterate_file(
Path(resource_loader("molecules/tetrahedral-all.cml"))
)
i = 0
for i, _ in enumerate(cml, start=1):
pass
results.append(i)
assert results[0] == results[1]
def test_load_reaction(indigo_fixture, resource_loader) -> None:
reaction = helpers.load_reaction(
resource_loader("reactions/rheadb/58029.rxn"), indigo_fixture
)
assert isinstance(reaction, IndigoRecordReaction)
def test_load_molucule(indigo_fixture, resource_loader) -> None:
molecule = helpers.load_molecule(
resource_loader("molecules/composition1.mol"), indigo_fixture
)
assert isinstance(molecule, IndigoRecordMolecule)
| 28.578947 | 73 | 0.603591 |
4a1df8067e1c0494761885beb2ee57876a743c0d | 2,855 | py | Python | release/stubs.min/Autodesk/Revit/DB/__init___parts/ScheduleSheetInstance.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/Autodesk/Revit/DB/__init___parts/ScheduleSheetInstance.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | release/stubs.min/Autodesk/Revit/DB/__init___parts/ScheduleSheetInstance.py | YKato521/ironpython-stubs | b1f7c580de48528490b3ee5791b04898be95a9ae | [
"MIT"
] | null | null | null | class ScheduleSheetInstance(Element, IDisposable):
""" An element that represents a particular placement of a schedule on a sheet. """
@staticmethod
def Create(document, viewSheetId, scheduleId, origin):
"""
Create(document: Document,viewSheetId: ElementId,scheduleId: ElementId,origin: XYZ) -> ScheduleSheetInstance
Create an instance of a schedule on a sheet.
document: The document
viewSheetId: The id of the sheet where the schedule will be placed.
scheduleId: The id of the schedule view.
origin: Location on the sheet where the schedule will be placed.
Returns: The new ScheduleInstance.
"""
pass
def Dispose(self):
""" Dispose(self: Element,A_0: bool) """
pass
def getBoundingBox(self, *args):
""" getBoundingBox(self: Element,view: View) -> BoundingBoxXYZ """
pass
def ReleaseUnmanagedResources(self, *args):
""" ReleaseUnmanagedResources(self: Element,disposing: bool) """
pass
def setElementType(self, *args):
""" setElementType(self: Element,type: ElementType,incompatibleExceptionMessage: str) """
pass
def __enter__(self, *args):
""" __enter__(self: IDisposable) -> object """
pass
def __exit__(self, *args):
""" __exit__(self: IDisposable,exc_type: object,exc_value: object,exc_back: object) """
pass
def __init__(self, *args):
""" x.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signaturex.__init__(...) initializes x; see x.__class__.__doc__ for signature """
pass
IsTitleblockRevisionSchedule = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Identifies if this ScheduleSheetInstance is a revision schedule in a titleblock family.
Get: IsTitleblockRevisionSchedule(self: ScheduleSheetInstance) -> bool
"""
Point = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Location on the sheet where the ScheduleInstance is placed (in sheet coordinates).
Get: Point(self: ScheduleSheetInstance) -> XYZ
Set: Point(self: ScheduleSheetInstance)=value
"""
Rotation = property(lambda self: object(), lambda self, v: None, lambda self: None)
"""Rotation of the ScheduleInstance.
Get: Rotation(self: ScheduleSheetInstance) -> ViewportRotation
Set: Rotation(self: ScheduleSheetInstance)=value
"""
ScheduleId = property(
lambda self: object(), lambda self, v: None, lambda self: None
)
"""Id of the "master" schedule that generates this ScheduleInstance.
Get: ScheduleId(self: ScheduleSheetInstance) -> ElementId
"""
| 26.933962 | 221 | 0.656392 |
4a1df8aa137f0d0505f1c4cd297efe3613d41362 | 13,659 | py | Python | cnn_models/convolutional_sutskever.py | maburto00/ndsgo | 9cd27adcdf937cdf9863c158e039ad131d6b24eb | [
"MIT"
] | 1 | 2018-02-20T15:51:05.000Z | 2018-02-20T15:51:05.000Z | cnn_models/convolutional_sutskever.py | maburto00/ndsgo | 9cd27adcdf937cdf9863c158e039ad131d6b24eb | [
"MIT"
] | 2 | 2020-02-11T13:11:08.000Z | 2020-02-12T16:59:11.000Z | cnn_models/convolutional_sutskever.py | maburto00/ndsgo | 9cd27adcdf937cdf9863c158e039ad131d6b24eb | [
"MIT"
] | null | null | null | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import sys
import time
import struct
import numpy
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
IMAGE_SIZE = 9
NUM_LABELS = IMAGE_SIZE * IMAGE_SIZE
NUM_CHANNELS = 4
NUM_FILTERS = 32 #use 64,128,....
#VALIDATION_SIZE = 5000 # Size of the validation set.
SEED = 66478 # Set to None for random seed.
BATCH_SIZE = 64
NUM_EPOCHS = 10000
EVAL_BATCH_SIZE = 64
EVAL_FREQUENCY = 10 # Number of steps between evaluations.
tf.app.flags.DEFINE_boolean("self_test", False, "True if running a self test.")
tf.app.flags.DEFINE_boolean('use_fp16', False,
"Use half floats instead of full floats if True.")
FLAGS = tf.app.flags.FLAGS
def data_type():
"""Return the type of the activations, weights, and placeholder variables."""
if FLAGS.use_fp16:
return tf.float16
else:
return tf.float32
def extract_data(filename):
"""Extract the images into a 4D tensor [image index, y, x, channels].
Values are rescaled from [0, 255] down to [-0.5, 0.5].
"""
# TODO: read magic number from file and determine attributes from there (NUM_IMAGES, ETC)
print('Extracting', filename)
with gzip.open(filename) as bytestream:
# read header...
bytestream.read(4) #magic number
num_images = struct.unpack('>I',bytestream.read(4))[0]
global NUM_CHANNELS
NUM_CHANNELS = struct.unpack('>I',bytestream.read(4))[0] #
global IMAGE_SIZE
IMAGE_SIZE = struct.unpack('>I',bytestream.read(4))[0]
global NUM_LABELS
NUM_LABELS = IMAGE_SIZE * IMAGE_SIZE
bytestream.read(4) # it is the IMAGE SIZE AGAIN
# bytestream.read(16)
buf = bytestream.read(num_images * NUM_CHANNELS * IMAGE_SIZE * IMAGE_SIZE)
data = numpy.frombuffer(buf, dtype=numpy.uint8)
print('buf len {} data len:{} IMAGE_SIZE:{} NUM_CHANNESL:{}'.format(len(buf),
len(data),IMAGE_SIZE,NUM_CHANNELS))
# data = (data - (PIXEL_DEPTH / 2.0)) / PIXEL_DEPTH
# Convert from [depth, height, width] to [height, width, depth].
# result.uint8image = tf.transpose(depth_major, [1, 2, 0])
# data = data.reshape(num_images, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS)
data = data.reshape(num_images, NUM_CHANNELS, IMAGE_SIZE, IMAGE_SIZE)
data = data.transpose(0,2,3,1)
#numpy.transpose()
return data
def extract_labels(filename):
"""Extract the labels into a vector of int64 label IDs."""
print('Extracting', filename)
with gzip.open(filename) as bytestream:
#bytestream.read(8)
bytestream.read(4) #magic number
num_images = bytestream.read(4)
buf = bytestream.read(2 * num_images)
dt = numpy.dtype(numpy.uint16)
dt = dt.newbyteorder('>')
labels = numpy.frombuffer(buf, dtype=dt)
return labels
#def fake_data(num_images):
# """Generate a fake dataset that matches the dimensions of MNIST."""
# data = numpy.ndarray(
# shape=(num_images, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS),
# dtype=numpy.float32)
# labels = numpy.zeros(shape=(num_images,), dtype=numpy.int64)
# for image in xrange(num_images):
# label = image % 2
# data[image, :, :, 0] = label - 0.5
# labels[image] = label
# return data, labels
def error_rate(predictions, labels):
"""Return the error rate based on dense predictions and sparse labels."""
return 100.0 - (
100.0 *
numpy.sum(numpy.argmax(predictions, 1) == labels) /
predictions.shape[0])
def main(argv=None): # pylint: disable=unused-argument
# if FLAGS.self_test:
# print('Running self-test.')
# train_data, train_labels = fake_data(256)
# validation_data, validation_labels = fake_data(EVAL_BATCH_SIZE)
# test_data, test_labels = fake_data(EVAL_BATCH_SIZE)
# num_epochs = 1
#else:
# Get the data.
#filename='9x9_10games'
filename = 'gogod_9x9_games'
#filename = 'KGS2001'
train_data_filename = filename + '-train-vectors-idx4-ubyte.gz'
train_labels_filename = filename + '-train-labels-idx1-ubyte.gz'
test_data_filename = filename + '-test-vectors-idx4-ubyte.gz'
test_labels_filename = filename + '-test-labels-idx1-ubyte.gz'
# Extract it into numpy arrays.
train_data = extract_data(train_data_filename)
train_labels = extract_labels(train_labels_filename)
test_data = extract_data(test_data_filename)
test_labels = extract_labels(test_labels_filename)
# Generate a validation set.
global VALIDATION_SIZE
VALIDATION_SIZE=int(len(train_data)*.15)
validation_data = train_data[:VALIDATION_SIZE, ...]
validation_labels = train_labels[:VALIDATION_SIZE]
train_data = train_data[VALIDATION_SIZE:, ...]
train_labels = train_labels[VALIDATION_SIZE:]
num_epochs = NUM_EPOCHS
train_size = train_labels.shape[0]
# This is where training samples and labels are fed to the graph.
# These placeholder nodes will be fed a batch of training data at each
# training step using the {feed_dict} argument to the Run() call below.
train_data_node = tf.placeholder(
data_type(),
#shape=(BATCH_SIZE, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS))
shape=(BATCH_SIZE, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS))
train_labels_node = tf.placeholder(tf.int64, shape=(BATCH_SIZE,))
eval_data = tf.placeholder(
data_type(),
shape=(EVAL_BATCH_SIZE, IMAGE_SIZE, IMAGE_SIZE, NUM_CHANNELS))
# The variables below hold all the trainable weights. They are passed an
# initial value which will be assigned when we call:
# {tf.initialize_all_variables().run()}
conv1_weights = tf.Variable(
tf.truncated_normal([5, 5, NUM_CHANNELS, 32], # 5x5 filter, depth 32.
stddev=0.1,
seed=SEED, dtype=data_type()))
conv1_biases = tf.Variable(tf.zeros([32], dtype=data_type()))
conv2_weights = tf.Variable(tf.truncated_normal(
[1, 1, 32, 1], stddev=0.1,
seed=SEED, dtype=data_type()))
conv2_biases = tf.Variable(tf.constant(0.1, shape=[IMAGE_SIZE*IMAGE_SIZE], dtype=data_type()))
# We will replicate the model structure for the training subgraph, as well
# as the evaluation subgraphs, while sharing the trainable parameters.
def model(data, train=False):
"""The Model definition."""
# 2D convolution, with 'SAME' padding (i.e. the output feature map has
# the same size as the input). Note that {strides} is a 4D array whose
# shape matches the data layout: [image index, y, x, depth].
conv = tf.nn.conv2d(data,
conv1_weights,
strides=[1, 1, 1, 1],
padding='SAME')
# Bias and rectified linear non-linearity.
relu = tf.nn.relu(tf.nn.bias_add(conv, conv1_biases))
# Max pooling. The kernel size spec {ksize} also follows the layout of
# the data. Here we have a pooling window of 2, and a stride of 2.
conv = tf.nn.conv2d(relu,
conv2_weights,
strides=[1, 1, 1, 1],
padding='SAME')
#relu = tf.nn.relu(tf.nn.bias_add(conv, conv2_biases))
# Reshape the feature map cuboid into a 2D matrix to feed it to the
# fully connected layers.
conv_shape = conv.get_shape().as_list()
reshape = tf.reshape(
conv,
[conv_shape[0], conv_shape[1] * conv_shape[2] * conv_shape[3]])
# Fully connected layer. Note that the '+' operation automatically
# broadcasts the biases.
hidden = tf.nn.relu(reshape + conv2_biases)
# Add a 50% dropout during training only. Dropout also scales
# activations such that no rescaling is needed at evaluation time.
#if train:
#hidden = tf.nn.dropout(hidden, 0.5, seed=SEED)
return hidden
# Training computation: logits + cross-entropy loss.
logits = model(train_data_node, True)
loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits, train_labels_node))
# L2 regularization for the fully connected parameters.
regularizers = (tf.nn.l2_loss(fc1_weights) + tf.nn.l2_loss(fc1_biases) +
# tf.nn.l2_loss(fc2_weights) + tf.nn.l2_loss(fc2_biases))
Add the regularization term to the loss.
loss += 5e-4 * regularizers
# Optimizer: set up a variable that's incremented once per batch and
# controls the learning rate decay.
batch = tf.Variable(0, dtype=data_type())
# Decay once per epoch, using an exponential schedule starting at 0.01.
learning_rate = tf.train.exponential_decay(
0.1, # Base learning rate.
batch * BATCH_SIZE, # Current index into the dataset.
train_size, # Decay step.
0.95, # Decay rate.
staircase=True)
# Use simple momentum for the optimization.
#optimizer = tf.train.MomentumOptimizer(learning_rate,
# 0.9).minimize(loss,
# global_step=batch)
optimizer=tf.train.GradientDescentOptimizer(learning_rate).minimize(loss,global_step=batch)
# Predictions for the current training minibatch.
train_prediction = tf.nn.softmax(logits)
# Predictions for the test and validation, which we'll compute less often.
eval_prediction = tf.nn.softmax(model(eval_data))
saver = tf.train.Saver(tf.all_variables())
# Small utility function to evaluate a dataset by feeding batches of data to
# {eval_data} and pulling the results from {eval_predictions}.
# Saves memory and enables this to run on smaller GPUs.
def eval_in_batches(data, sess):
"""Get all predictions for a dataset by running it in small batches."""
size = data.shape[0]
if size < EVAL_BATCH_SIZE:
raise ValueError("batch size for evals larger than dataset: %d" % size)
predictions = numpy.ndarray(shape=(size, NUM_LABELS), dtype=numpy.float32)
for begin in xrange(0, size, EVAL_BATCH_SIZE):
end = begin + EVAL_BATCH_SIZE
if end <= size:
predictions[begin:end, :] = sess.run(
eval_prediction,
feed_dict={eval_data: data[begin:end, ...]})
else:
batch_predictions = sess.run(
eval_prediction,
feed_dict={eval_data: data[-EVAL_BATCH_SIZE:, ...]})
predictions[begin:, :] = batch_predictions[begin - size:, :]
return predictions
# Create a local session to run the training.
start_time = time.time()
with tf.Session() as sess:
# Run all the initializers to prepare the trainable parameters.
tf.initialize_all_variables().run()
# saver.restore(sess,'model.ckpt-77100')
print('Initialized!')
# Loop through training steps.
for step in xrange(int(num_epochs * train_size) // BATCH_SIZE):
# Compute the offset of the current minibatch in the data.
# Note that we could use better randomization across epochs.
offset = (step * BATCH_SIZE) % (train_size - BATCH_SIZE)
batch_data = train_data[offset:(offset + BATCH_SIZE), ...]
batch_labels = train_labels[offset:(offset + BATCH_SIZE)]
# This dictionary maps the batch data (as a numpy array) to the
# node in the graph it should be fed to.
feed_dict = {train_data_node: batch_data,
train_labels_node: batch_labels}
# Run the graph and fetch some of the nodes.
_, l, lr, predictions = sess.run(
[optimizer, loss, learning_rate, train_prediction],
feed_dict=feed_dict)
if step % EVAL_FREQUENCY == 0:
elapsed_time = time.time() - start_time
start_time = time.time()
print('Step %d (epoch %.2f), %.1f ms' %
(step, float(step) * BATCH_SIZE / train_size,
1000 * elapsed_time / EVAL_FREQUENCY))
print('Minibatch loss: %.3f, learning rate: %.6f' % (l, lr))
print('Minibatch error: %.1f%%' % error_rate(predictions, batch_labels))
print('Validation error: %.1f%%' % error_rate(
eval_in_batches(validation_data, sess), validation_labels))
sys.stdout.flush()
# Save the model checkpoint periodically.
if step % EVAL_FREQUENCY*10 == 0:
checkpoint_path = os.path.join('','model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
#save final parameters
checkpoint_path = os.path.join('', 'model.ckpt')
saver.save(sess, checkpoint_path, global_step=step)
# Finally print the result!
test_error = error_rate(eval_in_batches(test_data, sess), test_labels)
print('Test error: %.1f%%' % test_error)
if FLAGS.self_test:
print('test_error', test_error)
assert test_error == 0.0, 'expected 0.0 test_error, got %.2f' % (
test_error,)
if __name__ == '__main__':
tf.app.run()
| 42.287926 | 111 | 0.63028 |
4a1dfa4b88f84d7416e86488890e925cc43bcaa6 | 9,685 | py | Python | search/routes/classic_api/tests/test_classic.py | ID2797370/arxiv-search | 889402e8eef9a2faaa8e900978cd27ff2784ce33 | [
"MIT"
] | 35 | 2018-12-18T02:51:09.000Z | 2022-03-30T04:43:20.000Z | search/routes/classic_api/tests/test_classic.py | ID2797370/arxiv-search | 889402e8eef9a2faaa8e900978cd27ff2784ce33 | [
"MIT"
] | 172 | 2018-02-02T14:35:11.000Z | 2018-12-04T15:35:30.000Z | search/routes/classic_api/tests/test_classic.py | ID2797370/arxiv-search | 889402e8eef9a2faaa8e900978cd27ff2784ce33 | [
"MIT"
] | 13 | 2019-01-10T22:01:48.000Z | 2021-11-05T12:25:08.000Z | """Tests for API routes."""
import os
from http import HTTPStatus
from xml.etree import ElementTree
from unittest import TestCase, mock, skip
from arxiv.users import helpers, auth
from arxiv.users.domain import Scope
from search import consts
from search import factory
from search import domain
from search.tests import mocks
class TestClassicAPISearchRequests(TestCase):
"""Requests against the classic search API."""
def setUp(self):
"""Instantiate and configure an API app."""
jwt_secret = "foosecret"
os.environ["JWT_SECRET"] = jwt_secret
self.app = factory.create_classic_api_web_app()
self.app.config["JWT_SECRET"] = jwt_secret
self.client = self.app.test_client()
self.auth_header = {
"Authorization": helpers.generate_token(
"1234",
"[email protected]",
"foouser",
scope=[auth.scopes.READ_PUBLIC],
)
}
@staticmethod
def mock_classic_controller(controller, method="query", **kwargs):
docs: domain.DocumentSet = {
"results": [mocks.document()],
"metadata": {"start": 0, "end": 1, "size": 50, "total": 1},
}
r_data = domain.ClassicSearchResponseData(
results=docs,
query=domain.ClassicAPIQuery(
**(kwargs or {"search_query": "all:electron"})
),
)
getattr(controller, method).return_value = r_data, HTTPStatus.OK, {}
@skip("auth scope currently disabled for classic API")
def test_request_without_token(self):
"""No auth token is provided on the request."""
response = self.client.get("/query?search_query=au:copernicus")
self.assertEqual(response.status_code, HTTPStatus.UNAUTHORIZED)
@skip("auth scope currently disabled for classic API")
def test_with_token_lacking_scope(self):
"""Client auth token lacks required public read scope."""
token = helpers.generate_token(
"1234",
"[email protected]",
"foouser",
scope=[Scope("something", "read")],
)
response = self.client.get(
"/query?search_query=au:copernicus",
headers={"Authorization": token},
)
self.assertEqual(response.status_code, HTTPStatus.FORBIDDEN)
@mock.patch(f"{factory.__name__}.classic_api.classic_api")
def test_with_valid_token(self, mock_controller):
"""Client auth token has required public read scope."""
self.mock_classic_controller(mock_controller, id_list=["1234.5678"])
response = self.client.get(
"/query?search_query=au:copernicus", headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.OK)
@mock.patch(f"{factory.__name__}.classic_api.classic_api")
def test_paper_retrieval(self, mock_controller):
"""Test single-paper retrieval."""
self.mock_classic_controller(mock_controller, method="paper")
response = self.client.get("/1234.56789v6", headers=self.auth_header)
self.assertEqual(response.status_code, HTTPStatus.OK)
# Validation errors
def _fix_path(self, path):
return "/".join(
[
"{{http://www.w3.org/2005/Atom}}{}".format(p)
for p in path.split("/")
]
)
def _node(self, et: ElementTree, path: str):
"""Return the node."""
return et.find(self._fix_path(path))
def _text(self, et: ElementTree, path: str):
"""Return the text content of the node."""
return et.findtext(self._fix_path(path))
def check_validation_error(self, response, error, link):
et = ElementTree.fromstring(response.get_data(as_text=True))
self.assertEqual(self._text(et, "entry/id"), link)
self.assertEqual(self._text(et, "entry/title"), "Error")
self.assertEqual(self._text(et, "entry/summary"), error)
link_attrib = self._node(et, "entry/link").attrib
self.assertEqual(link_attrib["href"], link)
def test_start_not_a_number(self):
response = self.client.get(
"/query?search_query=au:copernicus&start=non_number",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
"start must be an integer",
"https://arxiv.org/api/errors#start_must_be_an_integer",
)
def test_start_negative(self):
response = self.client.get(
"/query?search_query=au:copernicus&start=-1",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
"start must be non-negative",
"https://arxiv.org/api/errors#start_must_be_non-negative",
)
def test_max_results_not_a_number(self):
response = self.client.get(
"/query?search_query=au:copernicus&" "max_results=non_number",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
"max_results must be an integer",
"https://arxiv.org/api/errors#max_results_must_be_an_integer",
)
def test_max_results_negative(self):
response = self.client.get(
"/query?search_query=au:copernicus&max_results=-1",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
"max_results must be non-negative",
"https://arxiv.org/api/errors#max_results_must_be_non-negative",
)
@mock.patch(f"{factory.__name__}.classic_api.classic_api")
def test_sort_by_valid_values(self, mock_controller):
self.mock_classic_controller(mock_controller)
for value in domain.SortBy:
response = self.client.get(
f"/query?search_query=au:copernicus&" f"sortBy={value}",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.OK)
def test_sort_by_invalid_values(self):
response = self.client.get(
"/query?search_query=au:copernicus&sortBy=foo",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
f"sortBy must be in: {', '.join(domain.SortBy)}",
"https://arxiv.org/help/api/user-manual#sort",
)
@mock.patch(f"{factory.__name__}.classic_api.classic_api")
def test_sort_direction_valid_values(self, mock_controller):
self.mock_classic_controller(mock_controller)
for value in domain.SortDirection:
response = self.client.get(
f"/query?search_query=au:copernicus&" f"sortOrder={value}",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.OK)
def test_sort_direction_invalid_values(self):
response = self.client.get(
"/query?search_query=au:copernicus&sortOrder=foo",
headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
f"sortOrder must be in: {', '.join(domain.SortDirection)}",
"https://arxiv.org/help/api/user-manual#sort",
)
def test_sort_order(self):
# Default
sort_order = domain.SortOrder(by=None)
self.assertEqual(sort_order.to_es(), consts.DEFAULT_SORT_ORDER)
# Relevance/Score
sort_order = domain.SortOrder(by=domain.SortBy.relevance)
self.assertEqual(sort_order.to_es(), [{"_score": {"order": "desc"}}])
sort_order = domain.SortOrder(
by=domain.SortBy.relevance,
direction=domain.SortDirection.ascending,
)
self.assertEqual(sort_order.to_es(), [{"_score": {"order": "asc"}}])
# Submitted date/Publication date
sort_order = domain.SortOrder(by=domain.SortBy.submitted_date)
self.assertEqual(
sort_order.to_es(), [{"submitted_date": {"order": "desc"}}]
)
sort_order = domain.SortOrder(
by=domain.SortBy.submitted_date,
direction=domain.SortDirection.ascending,
)
self.assertEqual(
sort_order.to_es(), [{"submitted_date": {"order": "asc"}}]
)
# Last update date/Update date
sort_order = domain.SortOrder(by=domain.SortBy.last_updated_date)
self.assertEqual(
sort_order.to_es(), [{"updated_date": {"order": "desc"}}]
)
sort_order = domain.SortOrder(
by=domain.SortBy.last_updated_date,
direction=domain.SortDirection.ascending,
)
self.assertEqual(
sort_order.to_es(), [{"updated_date": {"order": "asc"}}]
)
def test_invalid_arxiv_id(self):
response = self.client.get(
"/query?id_list=cond—mat/0709123", headers=self.auth_header,
)
self.assertEqual(response.status_code, HTTPStatus.BAD_REQUEST)
self.check_validation_error(
response,
"incorrect id format for cond—mat/0709123",
"https://arxiv.org/api/errors#"
"incorrect_id_format_for_cond—mat/0709123",
)
| 37.832031 | 77 | 0.621786 |
4a1dfa6f946e4eac519d3991b0c76b21a0eaf60a | 754 | py | Python | release/stubs.min/System/Windows/Forms/__init___parts/ToolStripDropDownClosingEventArgs.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Windows/Forms/__init___parts/ToolStripDropDownClosingEventArgs.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | release/stubs.min/System/Windows/Forms/__init___parts/ToolStripDropDownClosingEventArgs.py | tranconbv/ironpython-stubs | a601759e6c6819beff8e6b639d18a24b7e351851 | [
"MIT"
] | null | null | null | class ToolStripDropDownClosingEventArgs(CancelEventArgs):
"""
Provides data for the System.Windows.Forms.ToolStripDropDown.Closing event.
ToolStripDropDownClosingEventArgs(reason: ToolStripDropDownCloseReason)
"""
def Instance(self):
""" This function has been arbitrarily put into the stubs"""
return ToolStripDropDownClosingEventArgs()
@staticmethod
def __new__(self,reason):
""" __new__(cls: type,reason: ToolStripDropDownCloseReason) """
pass
CloseReason=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the reason that the System.Windows.Forms.ToolStripDropDown is closing.
Get: CloseReason(self: ToolStripDropDownClosingEventArgs) -> ToolStripDropDownCloseReason
"""
| 32.782609 | 91 | 0.762599 |
4a1dfaed85ec187b98ffe39338c11d7ff497edba | 7,064 | py | Python | lib/cell_subtype_hierarchy.py | calico/2019_murine_cell_aging | 11ab15b82df886c4a51f73b25c061a9360862c2e | [
"Apache-2.0"
] | 1 | 2020-12-03T21:28:05.000Z | 2020-12-03T21:28:05.000Z | lib/cell_subtype_hierarchy.py | calico/2019_murine_cell_aging | 11ab15b82df886c4a51f73b25c061a9360862c2e | [
"Apache-2.0"
] | null | null | null | lib/cell_subtype_hierarchy.py | calico/2019_murine_cell_aging | 11ab15b82df886c4a51f73b25c061a9360862c2e | [
"Apache-2.0"
] | 1 | 2021-04-16T12:01:16.000Z | 2021-04-16T12:01:16.000Z | '''Define cell type subtype hierarchy'''
import numpy as np
import torch
'''
The following hash table provides a formal description of our
cell type::cell state ontology, which we combine to describe
unique cell identities.
We name canonical cell states (e.g. CD4 T cell) where possible,
and otherwise name transcriptionally distinct cell states based
on their most prominent marker gene (e.g. Gucy1a3 stromal cell).
'''
# names too long to type
tub_name = 'kidney proximal straight tubule epithelial cell'
# Key: Cell Type
# Value: tuple of valid subtypes
cell_types = {
'macrophage': (
'CD4 macrophage',
'CD8 macrophage',
'activated macrophage'),
'T cell': (
'CD4 T cell',
'CD8 T cell',
'memory T cell'),
'stromal cell': (
'Npnt stromal cell',
'Dcn stromal cell',
'Hhip stromal cell',
'Gucy1a3 stromal cell',
'Npnt stromal cell'),
'kidney collecting duct epithelial cell': (
'Cald1 kidney collecting duct epithelial cell',
'Aqp3 kidney collecting duct epithelial cell',
'Slc12a3 kidney collecting duct epithelial cell'),
'kidney capillary endothelial cell': (
'Pvlap kidney capillary endothelial cell',
'Vim kidney capillary endothelial cell',
'Ehd3 kidney capillary endothelial cell',),
tub_name: (
'Fabp3 %s' % tub_name,
'Prlr %s' % tub_name,
'Kap %s' % tub_name,),
}
def find_likely_subtype(
cell_type_assignment: np.ndarray,
subtype_order: np.ndarray,
scores: np.ndarray,
hierarchy: dict,
) -> np.ndarray:
'''Find the most likely valid subtype for each cell using
given subtype classifier output and a type::subtype hierarchy.
Parameters
----------
cell_type_assignment : np.ndarray
[Cells,] cell type assignments. must match keys in `hierarchy`.
subtype_order : np.ndarray
[Subtypes,] subtype labels represented by each column of `scores`.
must match values listed in `hierarchy`.
scores : np.ndarray
[Cells, Subtypes] scores from a classification model.
NOT softmax scaled. A softmax activation is applied to only
the scores of valid subtypes when identifying likely subtypes.
hierarchy : dict
keys are cell types, values are tuples of valid subtype names.
defines which subtypes are valid for a cell type.
Returns
-------
predicted_subtypes : np.ndarray
[Cells,] subtype assignments with only valid subtypes allowed.
'''
uniq_cell_types = np.unique(cell_type_assignment)
uniq_cell_types_w_subtypes = np.array(
[x for x in uniq_cell_types if x in hierarchy.keys()])
predicted_subtypes = np.array(cell_type_assignment).copy()
if not subtype_order.shape[0] == scores.shape[1]:
msg = f'{subtype_order.shape[0]} subtypes does not match {scores.shape[1]} scores.'
raise ValueError(msg)
for i, ct in enumerate(uniq_cell_types_w_subtypes):
ct_bidx = cell_type_assignment == ct
# find valid subtype columns
valid_subtypes = hierarchy[ct]
st_bidx = np.array(
[True if x in valid_subtypes else False for x in subtype_order])
ct_valid_scores = scores[ct_bidx, :][:, st_bidx]
# softmax transform
sm_valid_scores = torch.nn.functional.softmax(
torch.from_numpy(ct_valid_scores),
dim=1)
# likeliest subtype idx
_, pred = torch.max(sm_valid_scores, dim=1)
# order the subtypes in the same order they appear in the scores
subtypes_ordered = subtype_order[st_bidx]
pred_idx = pred.detach().squeeze().numpy().astype(np.int)
subtype_name = subtypes_ordered[pred_idx]
predicted_subtypes[ct_bidx] = subtype_name
return predicted_subtypes
'''Short naming scheme
These hash tables describe the mapping from official OBO cell ontology classes
to short, colloquial names we use for cell types in our paper.
`tissue_shortener` removes tissue names in contexts where the tissue can be
assumed (e.g. a UMAP plot of the kidney can omit kidney prefixes).
'''
shortener = {'natural killer cell': 'NK cell',
'kidney collecting duct epithelial cell': 'kidney duct epi.',
'kidney capillary endothelial cell': 'kidney cap. endo.',
'kidney loop of Henle ascending limb epithelial cell': 'kidney loop. epi.',
'kidney proximal straight tubule epithelial cell': 'kidney tub. epi.',
'ciliated columnar cell of tracheobronchial tree': 'columnar tracheo.',
'stromal cell': 'stromal',
'lung endothelial cell': 'lung endothelial',
}
tissue_shortener = {
'natural killer cell': 'NK cell',
'kidney collecting duct epithelial cell': 'duct epi.',
'kidney capillary endothelial cell': 'cap. endo.',
'kidney loop of Henle ascending limb epithelial cell': 'loop. epi.',
'kidney proximal straight tubule epithelial cell': 'tub. epi.',
'ciliated columnar cell of tracheobronchial tree': 'ciliated tracheobronch.',
'stromal cell': 'stromal',
'lung endothelial cell': 'endothelial',
}
def cellonto2shortname(long_names: list, shortener: dict) -> list:
'''Convert cell ontology classes to short names
Parameters
----------
long_names : list
str long names in the cell ontology class hierarchy.
shortener : dict
keyed by strings to replace in `long_names`, values with their
short replacements.
i.e. 'natural killer cell' : 'NK cell'
Returns
-------
names : list
shortened list of names.
'''
for k in shortener:
long_names = [x.replace(k, shortener[k]) for x in long_names]
return long_names
'''Marker genes for cell types.
Garnered from the Tabula Muris supplement.
https://static-content.springer.com/esm/art%3A10.1038%2Fs41586-018-0590-4/MediaObjects/41586_2018_590_MOESM3_ESM.pdf
This hash table formally describes the marker genes we used to check out cell
type identifications against cell types in the Tabula Muris.
'''
marker_genes = {
'T cell': ['Cd3e', 'Cd4', 'Cd8a', ],
'B cell': ['Cd79a', 'Cd22', 'Cd19'],
'natural killer cell': ['Nkg7', 'Klrb1c'],
'classical monocyte': ['Cd14', 'Fcgr1'],
'non-classical monocyte': ['Fcgr3', 'Tnfrsf1b'],
'macrophage': ['Cd68', 'Emr1'],
'kidney collecting duct epithelial cell': ['Slc12a3', 'Pvalb'],
'kidney capillary endothelial cell': ['Pecam1', 'Podxl', ],
'kidney proximal straight tubule epithelial cell': ['Vil1', 'Slc22a12'],
'mesangial cell': ['Des', 'Gucy1a3', 'Cspg4', 'Pdgfrb', 'Acta2', 'Vim', 'Gsn', 'Dcn'],
'kidney cell': ['Itgam'],
'type II pneumocyte': ['Ager', 'Epcam', 'Sftpb'],
'stromal cell': ['Npnt', 'Gucy1a3', 'Pdpn', 'Col1a1'],
'mast cell': ['Cd200r3', ],
'lung endothelial cell': ['Pecam1'],
'alveolar macrophage': ['Mrc1', 'Itgax'],
'ciliated columnar cell of tracheobronchial tree': ['Cd24a'], }
| 36.791667 | 116 | 0.659117 |
4a1dfb620aa87b790969554d8674dd1580828a8f | 2,800 | py | Python | airflow/utils/state.py | diggzhang/airflow-dingit | 41482b83130d5815b772840681fb36eb9bfa69b9 | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 5 | 2019-05-17T01:30:27.000Z | 2021-06-17T21:03:30.000Z | airflow/utils/state.py | curest0x1021/incubator-airflow | e6d3160a061dbaa6042d524095dcd1cbc15e0bcd | [
"Apache-2.0"
] | 4 | 2020-12-08T05:25:08.000Z | 2021-06-25T15:40:57.000Z | airflow/utils/state.py | curest0x1021/incubator-airflow | e6d3160a061dbaa6042d524095dcd1cbc15e0bcd | [
"Apache-2.0"
] | 9 | 2017-08-24T15:47:44.000Z | 2022-02-14T03:30:49.000Z | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from builtins import object
class State(object):
"""
Static class with task instance states constants and color method to
avoid hardcoding.
"""
# scheduler
NONE = None
REMOVED = "removed"
SCHEDULED = "scheduled"
# set by the executor (t.b.d.)
# LAUNCHED = "launched"
# set by a task
QUEUED = "queued"
RUNNING = "running"
SUCCESS = "success"
SHUTDOWN = "shutdown" # External request to shut down
FAILED = "failed"
UP_FOR_RETRY = "up_for_retry"
UPSTREAM_FAILED = "upstream_failed"
SKIPPED = "skipped"
task_states = (
SUCCESS,
RUNNING,
FAILED,
UPSTREAM_FAILED,
UP_FOR_RETRY,
QUEUED,
)
dag_states = (
SUCCESS,
RUNNING,
FAILED,
)
state_color = {
QUEUED: 'gray',
RUNNING: 'lime',
SUCCESS: 'green',
SHUTDOWN: 'blue',
FAILED: 'red',
UP_FOR_RETRY: 'gold',
UPSTREAM_FAILED: 'orange',
SKIPPED: 'pink',
REMOVED: 'lightgrey',
SCHEDULED: 'white',
}
@classmethod
def color(cls, state):
if state in cls.state_color:
return cls.state_color[state]
else:
return 'white'
@classmethod
def color_fg(cls, state):
color = cls.color(state)
if color in ['green', 'red']:
return 'white'
else:
return 'black'
@classmethod
def finished(cls):
"""
A list of states indicating that a task started and completed a
run attempt. Note that the attempt could have resulted in failure or
have been interrupted; in any case, it is no longer running.
"""
return [
cls.SUCCESS,
cls.SHUTDOWN,
cls.FAILED,
cls.SKIPPED,
]
@classmethod
def unfinished(cls):
"""
A list of states indicating that a task either has not completed
a run or has not even started.
"""
return [
cls.NONE,
cls.SCHEDULED,
cls.QUEUED,
cls.RUNNING,
cls.UP_FOR_RETRY
]
| 24.561404 | 76 | 0.584286 |
4a1dfc63120f0b1cbfb3b2dda2a35d15b946ba45 | 4,693 | py | Python | src/utils.py | kk19990709/pytorch-AAPR | 1bd1cbae704f6cb6313da015cbfd2cba80b06638 | [
"OML"
] | null | null | null | src/utils.py | kk19990709/pytorch-AAPR | 1bd1cbae704f6cb6313da015cbfd2cba80b06638 | [
"OML"
] | null | null | null | src/utils.py | kk19990709/pytorch-AAPR | 1bd1cbae704f6cb6313da015cbfd2cba80b06638 | [
"OML"
] | null | null | null | import nltk
import re
# nltk.download(['wordnet', "stopwords", "tagsets", "averaged_perceptron_tagger", "punkt", "vader_lexicon"])
english_stopwords = nltk.corpus.stopwords.words("english")
def tokenizer_author(text):
return text.split(',')
def tokenizer_category(text):
return text.split(',')
def clean_math(string):
while string.count('$') > 1:
pos0 = string.find('$')
pos1 = string.find('$', pos0+1)
string = (string[:pos0] + string[pos1+1:]).strip()
return string
def clean_str(string):
"""
Input:
string: One line in a latex file.
Return:
string cleaned.
"""
# Remove mathematical formulas between $$
string = clean_math(string)
# Remove "ref"
string = re.sub(r'~(.*)}', '', string)
string = re.sub(r'\\cite(.*)}', '', string)
string = re.sub(r'\\newcite(.*)}', '', string)
string = re.sub(r'\\ref(.*)}', '', string)
# Remove stopwords
texts_tokenized = [word.lower() for word in nltk.tokenize.word_tokenize(string)]
texts_filtered_stopwords = [word for word in texts_tokenized if word not in english_stopwords]
string = ' '.join(texts_filtered_stopwords)
string = string.replace(',', '')
string = string.replace('.', '')
string = string.replace('?', '')
string = string.replace('!', '')
string = string.replace('/', '')
string = string.replace('$', '')
string = string.replace('~', '')
string = string.replace('\\', '')
string = string.replace('{', '')
string = string.replace('}', '')
string = string.replace('#', '')
string = string.replace('&', '')
string = string.replace('@', '')
string = string.replace('%', '')
string = string.replace('^', '')
string = string.replace('*', '')
string = string.replace('-', '')
string = string.replace('=', '')
string = string.replace('[', '')
string = string.replace(']', '')
string = string.replace('+', '')
string = string.replace('(', '')
string = string.replace(')', '')
return string
def process_text_list(text_list):
"""
Input:
text_list: Content of a latex file and each element represents a line.
Return:
A list, which is the cleaned content of a latex file.
"""
result = ''
for line in text_list:
line = line.strip()
if line.startswith('%') or line.startswith('\\') or line == '': # TODO 去掉注释,但是\\不知道什么时候用
pass
elif line[0].isdigit(): # TODO 不知道为什么要去掉数字
pass
else:
result += clean_str(line)
return result
# Extract Introduction, related work, etc.================================================================
def extract_low(text):
text = ' '.join(text)
sections = []
for section in ('Intro', 'Relat', 'Concl'):
temp = re.search(r'\\Section({' + section + '.*?)\\Section', text, flags=re.S | re.I)
if temp is None:
sections.append('')
else:
sections.append(temp.group(1))
intro, relat, concl = sections
methods = text.replace(intro, '').replace(relat, '').replace(concl, '')
return list(map(process_text_list, [intro.split('\n'), relat.split('\n'), methods.split('\n'), concl.split('\n')]))
# def resolve(text):
# text = ' '.join(text)
# text = text.lower()
# items = re.findall(r'(?<=\\Section{)(.*?)}', text, flags=re.S | re.I)
# return items
def extract(text):
text = ' '.join(text)
intro_flag = ['intro', 'preli', 'backg', 'overv']
relat_flag = ['relat', 'prior', 'previo']
concl_flag = ['concl', 'summa', 'discu', 'futur', 'append', 'applica']
# model_flag = ''
# exper_flag = ''
# resul_flag = '(resul|perfo|)'
relat = ''
for flag in relat_flag:
temp = re.search(r'(?<=\\Section){.*?'+flag+'.*?}(.*?)(?=\\Section|$)', text, flags=re.S | re.I)
if temp:
relat += temp.group(0) + ' '
intro = ''
for flag in intro_flag:
temp = re.search(r'(?<=\\Section){.*?'+flag+'.*?}(.*?)(?=\\Section|$)', text, flags=re.S | re.I)
if temp:
intro += temp.group(0) + ' '
concl = ''
for flag in concl_flag:
temp = re.search(r'(?<=\\Section){.*?'+flag+'.*?}(.*?)(?=\\Section|$)', text, flags=re.S | re.I)
if temp:
concl += temp.group(0) + ' '
metho = text.replace(intro, '').replace(relat, '').replace(concl, '')
return list(map(process_text_list, [intro.split('\n'), relat.split('\n'), metho.split('\n'), concl.split('\n')]))
| 33.049296 | 120 | 0.530791 |
4a1dfc84d9c48088cfddf9ea4d6b71f241f4d363 | 1,131 | py | Python | applePicker.py | DarkTheCross/rl_experiments | 3b448d946e18b8d8e40b45b71f4da2fba4e6eb66 | [
"MIT"
] | null | null | null | applePicker.py | DarkTheCross/rl_experiments | 3b448d946e18b8d8e40b45b71f4da2fba4e6eb66 | [
"MIT"
] | null | null | null | applePicker.py | DarkTheCross/rl_experiments | 3b448d946e18b8d8e40b45b71f4da2fba4e6eb66 | [
"MIT"
] | 1 | 2021-02-27T07:40:48.000Z | 2021-02-27T07:40:48.000Z | import numpy as np
import random
class ApplePicker:
def __init__(self):
self.gameState = {"board": np.random.choice(5, 1), "y": 0, "x": np.random.choice(7, 1)}
def render(self):
gameScene = np.zeros((7,7), dtype=np.float32)
gameScene[6, int(self.gameState["board"]):int(self.gameState["board"])+1] = 1
gameScene[self.gameState["y"], self.gameState["x"]] = 1
return gameScene
def reset(self):
self.gameState["y"] = 0
self.gameState["x"] = random.randint(0, 6)
return self.render()
def step(self, act):
rwd = 0
done = False
self.gameState["board"] += act-1
self.gameState["board"] = max(self.gameState["board"], 0)
self.gameState["board"] = min(self.gameState["board"], 6)
self.gameState["y"] += 1
if self.gameState["y"] == 6:
if self.gameState["x"] >= self.gameState["board"] and self.gameState["x"] < self.gameState["board"] + 1:
rwd = 1
else:
rwd = -1
done = True
obs = self.render()
return obs, rwd, done
| 33.264706 | 116 | 0.543767 |
4a1dfddf96857edb95f960f83bf9b8a930f51071 | 1,862 | py | Python | tempest/api_schema/compute/parameter_types.py | rcbops-qe/tempest | 88960aa32c473b64072671541a136dbae41b1d4c | [
"Apache-2.0"
] | 3 | 2015-03-03T15:43:06.000Z | 2016-10-24T06:12:40.000Z | tempest/api_schema/compute/parameter_types.py | rcbops-qe/tempest | 88960aa32c473b64072671541a136dbae41b1d4c | [
"Apache-2.0"
] | null | null | null | tempest/api_schema/compute/parameter_types.py | rcbops-qe/tempest | 88960aa32c473b64072671541a136dbae41b1d4c | [
"Apache-2.0"
] | 1 | 2019-02-14T23:36:55.000Z | 2019-02-14T23:36:55.000Z | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
links = {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'href': {
'type': 'string',
'format': 'uri'
},
'rel': {'type': 'string'}
},
'required': ['href', 'rel']
}
}
mac_address = {
'type': 'string',
'pattern': '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
}
access_ip_v4 = {
'type': 'string',
'anyOf': [{'format': 'ipv4'}, {'enum': ['']}]
}
access_ip_v6 = {
'type': 'string',
'anyOf': [{'format': 'ipv6'}, {'enum': ['']}]
}
addresses = {
'type': 'object',
'patternProperties': {
# NOTE: Here is for 'private' or something.
'^[a-zA-Z0-9-_.]+$': {
'type': 'array',
'items': {
'type': 'object',
'properties': {
'version': {'type': 'integer'},
'addr': {
'type': 'string',
'anyOf': [
{'format': 'ipv4'},
{'format': 'ipv6'}
]
}
},
'required': ['version', 'addr']
}
}
}
}
| 27.382353 | 78 | 0.459184 |
4a1dfe6ceb6fbba544f7a14479bb135a89eab35b | 9,937 | py | Python | scripts/Odometry_class.py | MostafaOsman144/moving_horizon_estimation_localization | 5d8ba1575b54a867bc8c1b9b1e482b1fbc2799ef | [
"MIT"
] | 5 | 2021-07-09T08:01:00.000Z | 2021-12-30T12:26:16.000Z | scripts/Odometry_class.py | MostafaOsman144/moving_horizon_estimation_localization | 5d8ba1575b54a867bc8c1b9b1e482b1fbc2799ef | [
"MIT"
] | null | null | null | scripts/Odometry_class.py | MostafaOsman144/moving_horizon_estimation_localization | 5d8ba1575b54a867bc8c1b9b1e482b1fbc2799ef | [
"MIT"
] | 3 | 2021-11-14T06:33:26.000Z | 2021-12-30T12:26:36.000Z | #!/usr/bin/env python
"""
Moving Horizon Estimation Localization
Copyright © 2020 Mostafa Osman
Permission is hereby granted, free of charge,
to any person obtaining a copy of this software
and associated documentation files (the “Software”),
to deal in the Software without restriction,
including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom
the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import rospy
import math
import numpy as np
from tf.transformations import euler_from_quaternion
# ROS messages
from std_msgs.msg import Float64
from geometry_msgs.msg import Twist
from geometry_msgs.msg import Pose
from nav_msgs.msg import Odometry
class OdometryInterface:
def __init__(self):
self._Id = 0
self._topic_name = ''
self._odom_config = []
self._number_of_measurements = 0
self._C_matrix = None
self._measurement_memory = None
self.first_call = True
self._prev_measurement = None
self._measurement = 0
self.updated = False
self.type = ""
self._frame_id = ''
self._timestamp = 0
self._measurement_information = None
self._measurement_covariance = None
self.measurement_type = ''
self.mahalanobis_threshold = 0
def Odometry_initialize(self, Id, topic_name, odom_config, N_mhe, mahalanobis_threshold):
# Initializing the Odometry with its topic name and Id number
self._Id = Id
self._topic_name = topic_name
self._odom_config = odom_config
self._number_of_measurements = int(np.sum(odom_config[0:3])) + int(np.sum(odom_config[6:15]))
if odom_config[3] or odom_config[4] or odom_config[5]:
self._number_of_measurements += 3
self._C_matrix = np.zeros((self._number_of_measurements, 15))
self.form_C_matrix()
self._measurement_memory = np.zeros((self._number_of_measurements, N_mhe))
self.first_call = True
self._prev_measurement = np.zeros(self._number_of_measurements)
self._measurement = np.zeros(self._number_of_measurements)
self.updated = False
self.mahalanobis_threshold = mahalanobis_threshold
self.measurement_type = 'odom'
def Imu_initialize(self, Id, topic_name, odom_config, N_mhe, mahalanobis_threshold):
# Initializing the Odometry with its topic name and Id number
self._Id = Id
self._topic_name = topic_name
self._odom_config = odom_config
self._number_of_measurements = int(np.sum(odom_config))
self._C_matrix = np.zeros((self._number_of_measurements, 15))
self.form_C_matrix()
self._measurement_memory = np.zeros((self._number_of_measurements, N_mhe))
self.first_call = True
self._prev_measurement = np.zeros(self._number_of_measurements)
self._measurement = np.zeros(self._number_of_measurements)
self.updated = False
self.mahalanobis_threshold = mahalanobis_threshold
self.measurement_type = 'imu'
def form_C_matrix(self):
C_matrix_dummy = np.zeros((15, 15))
for i in range(0, 15):
C_matrix_dummy[i, i] = self._odom_config[i]
if self._odom_config[3] or self._odom_config[4] or self._odom_config[5]:
C_matrix_dummy[3, 3] = 1.0
C_matrix_dummy[4, 4] = 1.0
C_matrix_dummy[5, 5] = 1.0
k = 0
for i in range(0, 15):
if not np.sum(C_matrix_dummy[i, :]) == 0:
self._C_matrix[k, :] = C_matrix_dummy[i, :]
k += 1
# Limit any angle from 0 to 360 degrees
@staticmethod
def limit_angles(unlimited_angle):
return math.atan2(math.sin(unlimited_angle), math.cos(unlimited_angle))
def odometryCb(self, data):
# Reading the Odometry Data and pushing them to the data vectors
# Making a tuple in order to be able to use the euler_from_quaternion function
states = []
if self.measurement_type == 'odom':
quat = (data.pose.pose.orientation.x,
data.pose.pose.orientation.y,
data.pose.pose.orientation.z,
data.pose.pose.orientation.w)
(roll, pitch, yaw) = euler_from_quaternion(quat)
if self._odom_config[3] == 0:
roll = 0
if self._odom_config[4] == 0:
pitch = 0
if self._odom_config[5] == 0:
yaw = 0
# Need to build the measurement matrix using the odometry configuration
states = [data.pose.pose.position.x, data.pose.pose.position.y, data.pose.pose.position.z,
self.limit_angles(roll), self.limit_angles(pitch), self.limit_angles(yaw),
data.twist.twist.linear.x, data.twist.twist.linear.y, data.twist.twist.linear.z,
data.twist.twist.angular.x, data.twist.twist.angular.y, data.twist.twist.angular.z,
0, 0, 0]
elif self.measurement_type == 'imu':
quat = (data.orientation.x,
data.orientation.y,
data.orientation.z,
data.orientation.w)
(roll, pitch, yaw) = euler_from_quaternion(quat)
if self._odom_config[3] == 0:
roll = 0
if self._odom_config[4] == 0:
pitch = 0
if self._odom_config[5] == 0:
yaw = 0
# Need to build the measurement matrix using the odometry configuration
states = [0, 0, 0,
self.limit_angles(roll), self.limit_angles(pitch), self.limit_angles(yaw),
0, 0, 0,
data.angular_velocity.x, data.angular_velocity.y, data.angular_velocity.z,
data.linear_acceleration.x, data.linear_acceleration.y, data.linear_acceleration.z]
self._measurement = np.matmul(self._C_matrix, states)
# Storing the parent frame_id and the timestamp
self._frame_id = data.header.frame_id
self._timestamp = data.header.stamp
# self._timestamp = rospy.get_time()
if self.measurement_type == 'odom':
# Need to build the covariance matrix using the odometry configuration
covariance = np.asarray(data.pose.covariance)
covariance = np.reshape(covariance, (6, 6))
velocity_covariance = np.asarray(data.twist.covariance)
velocity_covariance = np.reshape(velocity_covariance, (6, 6))
acceleration_covariance = np.identity(3)
# covariance = np.concatenate((covariance, np.zeros((6, 6))), axis=0)
# velocity_covariance = np.concatenate((np.zeros((6, 6)), velocity_covariance), axis=0)
full_covariance = np.identity(15)
full_covariance[0:6, 0:6] = covariance
full_covariance[6:12, 6:12] = velocity_covariance
full_covariance[12:15, 12:15] = acceleration_covariance
# full_covariance = casadi.horzcat(covariance, velocity_covariance)
# full_covariance = np.concatenate((covariance, velocity_covariance), axis=1)
self._measurement_covariance = np.matmul(self._C_matrix, np.matmul(full_covariance,
np.transpose(self._C_matrix)))
#
# for i in range(0, self._number_of_measurements):
# for j in range(0, self._number_of_measurements):
# if not i == j:
# self._measurement_covariance[i, j] = 0
# print self._measurement_covariance
elif self.measurement_type == 'imu':
# Need to build the covariance matrix using the odometry configuration
orientation_covariance = np.asarray(data.orientation_covariance)
orientation_covariance = np.reshape(orientation_covariance, (3, 3))
angular_velocity_covariance = np.asarray(data.angular_velocity_covariance)
angular_velocity_covariance = np.reshape(angular_velocity_covariance, (3, 3))
acceleration_covariance = np.asarray(data.linear_acceleration_covariance)
acceleration_covariance = np.reshape(acceleration_covariance, (3, 3))
full_covariance = np.identity(15)
full_covariance[3:6, 3:6] = orientation_covariance
full_covariance[9:12, 9:12] = angular_velocity_covariance
full_covariance[12:15, 12:15] = acceleration_covariance
self._measurement_covariance = np.matmul(self._C_matrix, np.matmul(full_covariance,
np.transpose(self._C_matrix)))
for i in range(0, self._number_of_measurements):
if self._measurement_covariance[i, i] < 1e-15:
self._measurement_covariance[i, i] = 1e-4
self._measurement_information = np.linalg.inv(self._measurement_covariance)
self.updated = True
# Change the first_cb flag
self.first_call = False
| 41.577406 | 109 | 0.633592 |
4a1dffc1258dd80c57f5b288ff0a0fb55525e32f | 23,557 | py | Python | lingvo/tasks/car/pillars.py | HubBucket-Team/lingvo | fb929def2f27cf73a6ee1b1eaa8bee982bd92987 | [
"Apache-2.0"
] | 1 | 2019-10-10T06:08:35.000Z | 2019-10-10T06:08:35.000Z | lingvo/tasks/car/pillars.py | VonRosenchild/lingvo | fb929def2f27cf73a6ee1b1eaa8bee982bd92987 | [
"Apache-2.0"
] | null | null | null | lingvo/tasks/car/pillars.py | VonRosenchild/lingvo | fb929def2f27cf73a6ee1b1eaa8bee982bd92987 | [
"Apache-2.0"
] | 1 | 2019-10-10T06:08:39.000Z | 2019-10-10T06:08:39.000Z | # Lint as: python2, python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""PointPillars implementation.
[1] PointPillars. https://arxiv.org/abs/1812.05784
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import enum
from lingvo import compat as tf
from lingvo.core import base_layer
from lingvo.core import layers
from lingvo.core import optimizer
from lingvo.core import py_utils
from lingvo.tasks.car import builder_lib
from lingvo.tasks.car import detection_3d_lib
from lingvo.tasks.car import point_detector
import numpy as np
def SparseToDense(grid_shape, locations, feats):
"""Converts a sparse representation back to the dense grid.
Args:
grid_shape: (nx, ny, nz). The shape of the grid.
locations: [b, p, 3]. Locations of the pillars.
feats: [b, p, fdims]. Extracted features for pillars.
Returns:
grid_feats of shape [b, nx, ny, nz * fdims].
"""
nx, ny, nz = grid_shape
b, p, _ = py_utils.GetShape(locations, 3)
feats = py_utils.HasShape(feats, [b, p, -1])
_, _, fdims = py_utils.GetShape(feats, 3)
indices = tf.concat(
[tf.tile(tf.range(b)[:, tf.newaxis, tf.newaxis], [1, p, 1]), locations],
axis=2)
grid = tf.scatter_nd(indices, feats, [b, nx, ny, nz, fdims])
return tf.reshape(grid, [b, nx, ny, nz * fdims])
class PointsToGridFeaturizer(base_layer.BaseLayer):
"""Layer for processing points to grid outputs."""
@classmethod
def Params(cls, num_laser_features):
p = super(PointsToGridFeaturizer, cls).Params()
p.Define('num_laser_features', num_laser_features,
'The number of (non-xyz) laser features of the input.')
builder = Builder()
total_num_laser_features = 9 + num_laser_features
p.Define('featurizer',
builder.Featurizer('feat', [total_num_laser_features, 64]),
'Point cloud feature extractor.')
return p
@base_layer.initializer
def __init__(self, params):
super(PointsToGridFeaturizer, self).__init__(params)
p = self.params
with tf.variable_scope(p.name):
self.CreateChild('featurizer', p.featurizer)
def FProp(self, theta, input_batch):
# pyformat: disable
"""Compute features for the pillars and convert them back to a dense grid.
Args:
theta: A `.NestedMap` object containing variable values of this task.
input_batch: A `.NestedMap` object containing input tensors. Following
keys are required:
- grid_num_points: Integer tensor with shape [batch size, nx, ny, nz],
where nx, ny, nz corresponds to the grid sizes (i.e., number of voxels
in each axis dimension).
- pillar_points: Float tensor with shape [batch size, num_pillars,
num_points_per_pillar, 3 + num_laser_features]
- pillar_centers: Float tensor with shape [batch size, num_pillars,
num_points_per_pillar, 3]
- pillar_locations: Float tensor with shape [batch size, num_pillars, 3]
Returns:
The dense features with shape [b, nx, ny, nz * fdims].
"""
# pyformat: enable
p = self.params
bs, nx, ny, nz = py_utils.GetShape(input_batch.grid_num_points, 4)
# Process points to concatenate a set of fixed features (e.g.,
# add means, centers, normalize points to means).
num_features = 3 + p.num_laser_features
pillar_points = py_utils.HasShape(input_batch.pillar_points,
[bs, -1, -1, num_features])
_, npillars, npoints, _ = py_utils.GetShape(pillar_points, 4)
pillar_xyz = pillar_points[..., :3]
# Compute number of points per pillar and prepare for broadcasting.
pillar_num_points = tf.gather_nd(
input_batch.grid_num_points, input_batch.pillar_locations, batch_dims=1)
pillar_num_points = pillar_num_points[..., tf.newaxis, tf.newaxis]
# Compute mean by computing sum and dividing by number of points. Clip the
# denominator by 1.0 to gracefully handle empty pillars.
pillar_sum = tf.reduce_sum(pillar_xyz, axis=2, keep_dims=True)
pillar_means = pillar_sum / tf.maximum(tf.to_float(pillar_num_points), 1.0)
pillar_feats = pillar_points[..., 3:]
pillar_centers = py_utils.HasShape(input_batch.pillar_centers,
[bs, -1, 1, 3])
pillar_concat = tf.concat(
axis=3,
values=[
pillar_xyz - pillar_means, pillar_feats,
tf.tile(pillar_means, [1, 1, npoints, 1]),
tf.tile(pillar_centers, [1, 1, npoints, 1])
])
# Featurize pillars.
pillar_features = self.featurizer.FProp(theta.featurizer, pillar_concat)
# Convert back to the dense grid.
pillar_locations = py_utils.HasShape(input_batch.pillar_locations,
[bs, npillars, 3])
dense_features = SparseToDense(
grid_shape=(nx, ny, nz),
locations=pillar_locations,
feats=pillar_features)
return dense_features
# pyformat: disable
class Builder(builder_lib.ModelBuilderBase):
"""Builder for the Pillars model."""
def __init__(self):
super(Builder, self).__init__()
self.conv_init_method = builder_lib.KaimingUniformFanInRelu
self.linear_params_init = py_utils.WeightInit.KaimingUniformFanInRelu()
self.bn_params_init = py_utils.WeightInit.UniformPositive()
def Featurizer(self, name, dims):
return self._Seq(
name,
self._MLP('mlp', dims),
self._Max('max'))
def _Deconv(self, name, filter_shape, stride):
return layers.DeconvLayer.Params().Set(
name=name,
filter_shape=filter_shape,
filter_stride=(stride, stride))
def _Block(self, name, stride, repeats, idims, odims):
"""[1]. Sec 2.2."""
return self._Seq(
name,
self._Conv('c3x3', (3, 3, idims, odims), stride),
self._Rep(
'rep',
repeats,
self._Conv('c3x3', (3, 3, odims, odims))),
self._Fetch('final'))
def _TopDown(self, name, strides=(2, 2, 2)):
"""[1]. Sec 2.2."""
if len(strides) != 3:
raise ValueError('`strides` expected to be list/tuple of len 3.')
return self._Seq(
name,
self._Block('b0', strides[0], 3, 64, 64),
self._Block('b1', strides[1], 5, 64, 128),
self._Block('b2', strides[2], 5, 128, 256))
def _Upsample(self, name, stride, idims, odims):
"""[1]. Sec 2.2."""
# Match the kernel size to the stride in order to ensure that the output
# activation map has no holes and to minimize any checkerboard artifacts.
# TODO(shlens): Consider replacing this in the future with a bilinear
# interpolation followed by a 3x3 convolution.
kernel = stride
return self._Seq(
name,
self._Deconv('deconv', (kernel, kernel, odims, idims), stride),
self._BN('bn', odims),
self._Relu('relu'))
def Contract(self, down_strides=(2, 2, 2)):
"""Contracting part of [1] Sec 2.2."""
return self._Branch(
'branch',
self._TopDown('topdown', strides=down_strides),
['b1.final', 'b0.final'])
def Expand(self, odims):
"""Expanding part of [1] Sec 2.2."""
# Note that the resulting output will be 3*odims
return self._Concat(
'concat',
self._Seq(
'b2',
self._ArgIdx('idx', [0]),
self._Upsample('ups', 4, 256, odims)),
self._Seq(
'b1',
self._ArgIdx('idx', [1]),
self._Upsample('ups', 2, 128, odims)),
self._Seq(
'b0',
self._ArgIdx('idx', [2]),
self._Upsample('ups', 1, 64, odims)))
def Backbone(self, odims, down_strides=(2, 2, 2)):
"""[1]. Sec 2.2."""
# We assume (H, W) are multiple of 8. So that we can concat
# multiple-scale feature maps together after upsample.
return self._Seq(
'backbone',
self.Contract(down_strides),
self.Expand(odims))
def Detector(self, name, idims, odims, bias_params_init=None):
# Implemented according to VoxelNet
# https://arxiv.org/pdf/1711.06396.pdf
# May add more Conv2D layers before predictor for better performance.
return self._Seq(
name,
self._ConvPlain('predict', (3, 3, idims, odims)),
self._Bias('predict_bias', odims, bias_params_init))
# pyformat: enable
class LossNormType(enum.Enum):
NO_NORM = 0
NORM_BY_NUM_POSITIVES = 1
class ModelV1(point_detector.PointDetectorBase):
"""PointPillars model.
Base class implements common Decoder functions, though they can be
overridden if desired.
"""
NUM_OUTPUT_CHANNELS = 128
@classmethod
def Params(cls,
grid_size_z=1,
num_anchors=2,
num_classes=1,
num_laser_features=1):
p = super(ModelV1, cls).Params(num_classes=num_classes)
p.Define('grid_size_z', grid_size_z, 'The grid size along the z-axis.')
p.Define('num_anchors', num_anchors, 'The number of anchor boxes.')
p.Define('num_laser_features', num_laser_features,
'The number of (non-xyz) laser features of the input.')
p.Define('input_featurizer',
PointsToGridFeaturizer.Params(num_laser_features),
'Point cloud feature extractor.')
builder = Builder()
p.Define('backbone', builder.Backbone(cls.NUM_OUTPUT_CHANNELS),
'Dense features pyramid.')
# Backbone() concatenates 3 different scales of features.
idims = 3 * cls.NUM_OUTPUT_CHANNELS
# 7: predicted (dx, dy, dz, dw, dl, dh, dt).
class_odims = grid_size_z * num_anchors * num_classes
reg_odims = grid_size_z * num_anchors * 7
rot_odims = grid_size_z * num_anchors * 2
# Although theoretically a single conv layer can generate both the
# regression and classification logits, we try to implement the paper
# faithfully, which uses two different layers.
p.Define('class_detector', builder.Detector('class', idims, class_odims),
'Dense features to class logits.')
p.Define('regression_detector', builder.Detector('reg', idims, reg_odims),
'Dense features to regression logits.')
p.Define('direction_classifier', builder.Detector('dir', idims, rot_odims),
'Dense features to rotation direction classifier.')
# We disable the direction classifier by default since it has
# weird discontinous optimization objectives around the threshold
# and it doesn't improve mAP.
p.Define(
'direction_classifier_weight', 0.0,
'If > 0, adds a direction classifier to the model and adds '
'to the total loss with this weight.')
p.Define(
'squash_rotation_predictions', False,
'Apply tanh squashing to rotation predictions to ensure outputs '
'are between (-pi, pi).')
p.Define('focal_loss_alpha', 0.25, 'The alpha parameter in focal loss '
'(see paper eq. 4).')
p.Define('focal_loss_gamma', 2.0, 'The gamma parameter in focal loss '
'(see paper eq. 4).')
p.Define(
'localization_loss_weight', 2.0,
'Localization loss weight factor between localization and '
'class loss contributions.')
p.Define(
'classification_loss_weight', 1.0,
'Classification loss weight factor between localization and '
'class loss contributions.')
p.Define(
'location_loss_weight', 1.0,
'Weight multiplier for contribution of location loss '
'to full localization/regression loss')
p.Define(
'dimension_loss_weight', 1.0,
'Weight multiplier for contribution of dimension loss '
'to full localization/regression loss')
p.Define(
'rotation_loss_weight', 1.0,
'Weight multiplier for contribution of rotation loss '
'to full localization/regression loss')
p.Define('loss_norm_type', LossNormType.NORM_BY_NUM_POSITIVES,
'Normalization function for class and regularization weights.')
p.Define('oracle_location', False,
'If true, the model predicts the ground truth for location.')
p.Define('oracle_dimension', False,
'If true, the model predicts the ground truth for dimension.')
p.Define('oracle_rotation', False,
'If true, the model predicts the ground truth for rotation.')
tp = p.train
tp.learning_rate = 0.001
tp.optimizer = optimizer.Momentum.Params().Set(alpha=0.9)
return p
@base_layer.initializer
def __init__(self, params):
super(ModelV1, self).__init__(params)
p = self.params
self._utils = detection_3d_lib.Utils3D()
with tf.variable_scope(p.name):
self.CreateChild('input_featurizer', p.input_featurizer)
self.CreateChild('backbone', p.backbone)
self.CreateChild('class_detector', p.class_detector)
self.CreateChild('regression_detector', p.regression_detector)
if p.direction_classifier_weight > 0.0:
self.CreateChild('direction_classifier', p.direction_classifier)
def ComputePredictions(self, theta, input_batch):
"""Computes predictions for `input_batch`.
Args:
theta: A `.NestedMap` object containing variable values of this task.
input_batch: A `.NestedMap` object containing input tensors to this tower.
Returns:
A `.NestedMap` contains
logits - [b, nx, ny, nz, na, 7 + num_classes]
"""
p = self.params
input_batch.Transform(lambda x: (x.shape, x.shape.num_elements())).VLog(
0, 'input_batch shapes: ')
# Make pillars representation from input_batch.
dense_features = self.input_featurizer.FProp(theta.input_featurizer,
input_batch)
# Backbone
tf.logging.vlog(1, 'dense_features.shape = %s', dense_features.shape)
act = self.backbone.FProp(theta.backbone, dense_features)
tf.logging.vlog(1, 'act.shape = %s', act.shape)
# Convert the output of the backbone into class logits and regression
# residuals using two different layers.
class_detection = self.class_detector.FProp(theta.class_detector, act)
reg_detection = self.regression_detector.FProp(theta.regression_detector,
act)
bs, nx, ny, _ = py_utils.GetShape(class_detection, 4)
predicted_classification_logits = tf.reshape(
class_detection,
[bs, nx, ny, p.grid_size_z, p.num_anchors, p.num_classes])
predicted_residuals = tf.reshape(
reg_detection, [bs, nx, ny, p.grid_size_z, p.num_anchors, 7])
if p.squash_rotation_predictions:
predicted_rotations = predicted_residuals[..., 6:]
predicted_rotations = np.pi * tf.tanh(predicted_rotations)
predicted_residuals = tf.concat(
[predicted_residuals[..., :6], predicted_rotations], axis=-1)
if p.oracle_location or p.oracle_dimension or p.oracle_rotation:
gt_residuals = py_utils.HasShape(
input_batch.anchor_localization_residuals,
[bs, nx, ny, p.grid_size_z, p.num_anchors, 7])
# Replace the predicted components with the ground truth if needed.
if p.oracle_location:
location = gt_residuals[..., 0:3]
else:
location = predicted_residuals[..., 0:3]
if p.oracle_dimension:
dimension = gt_residuals[..., 3:6]
else:
dimension = predicted_residuals[..., 3:6]
if p.oracle_rotation:
rotation = gt_residuals[..., 6:]
else:
rotation = predicted_residuals[..., 6:]
predicted_residuals = tf.concat([location, dimension, rotation], axis=-1)
ret = py_utils.NestedMap({
'residuals': predicted_residuals,
'classification_logits': predicted_classification_logits,
})
if p.direction_classifier_weight > 0.0:
predicted_dir = self.direction_classifier.FProp(
theta.direction_classifier, act)
predicted_dir = tf.reshape(predicted_dir,
[bs, nx, ny, p.grid_size_z, p.num_anchors, 2])
ret.predicted_dir = predicted_dir
return ret
def ComputeLoss(self, theta, predictions, input_batch):
"""Computes loss and other metrics for the given predictions.
Args:
theta: A `.NestedMap` object containing variable values of this task.
predictions: The output of `ComputePredictions`, contains: logits - [b,
nx, ny, nz, na, 7 + num_classes]. na is the number of anchor
boxes per cell. [..., :7] are (dx, dy, dz, dw, dl, dh, dt).
input_batch: The input batch from which we accesses the groundtruth.
Returns:
Two dicts defined as BaseTask.ComputeLoss.
"""
p = self.params
predicted_residuals = py_utils.HasShape(predictions.residuals,
[-1, -1, -1, -1, p.num_anchors, 7])
predicted_class_logits = py_utils.HasShape(
predictions.classification_logits,
[-1, -1, -1, -1, p.num_anchors, p.num_classes])
bs, nx, ny, nz, na, _ = py_utils.GetShape(predicted_class_logits, 6)
# Compute class and regression weights.
class_weights = input_batch.assigned_cls_mask
class_weights = py_utils.HasShape(class_weights, [bs, nx, ny, nz, na])
reg_weights = input_batch.assigned_reg_mask
reg_weights = py_utils.HasShape(reg_weights, [bs, nx, ny, nz, na])
reg_weights = tf.expand_dims(reg_weights, -1)
if p.loss_norm_type == LossNormType.NORM_BY_NUM_POSITIVES:
# Compute number of positive anchors per example.
foreground_mask = py_utils.HasShape(input_batch.assigned_reg_mask,
[bs, nx, ny, nz, na])
# Sum to get the number of foreground anchors for each example.
loss_normalization = tf.reduce_sum(foreground_mask, axis=[1, 2, 3, 4])
loss_normalization = tf.maximum(loss_normalization,
tf.ones_like(loss_normalization))
# Reshape for broadcasting.
loss_normalization = tf.reshape(loss_normalization, [bs, 1, 1, 1, 1, 1])
class_weights /= loss_normalization
reg_weights /= loss_normalization
# Classification loss.
assigned_gt_labels = py_utils.HasShape(input_batch.assigned_gt_labels,
[bs, nx, ny, nz, na])
class_loss = py_utils.SigmoidCrossEntropyFocalLoss(
logits=predicted_class_logits,
labels=tf.one_hot(assigned_gt_labels, p.num_classes),
alpha=p.focal_loss_alpha,
gamma=p.focal_loss_gamma)
class_loss *= class_weights[..., tf.newaxis]
class_loss_sum = tf.reduce_sum(class_loss)
# Regression loss.
anchor_localization_residuals = py_utils.HasShape(
input_batch.anchor_localization_residuals, [bs, nx, ny, nz, na, 7])
# Location and dimensions loss.
reg_loc_and_dims_loss = self._utils.ScaledHuberLoss(
predictions=py_utils.HasShape(predicted_residuals[..., :6],
[bs, nx, ny, nz, na, 6]),
labels=anchor_localization_residuals[..., :6],
delta=1 / (3.**2))
# Rotation loss with SmoothL1(sin(delta)).
rot_delta = (
predicted_residuals[..., 6:] -
input_batch.anchor_localization_residuals[..., 6:])
reg_rot_loss = self._utils.ScaledHuberLoss(
predictions=tf.sin(rot_delta),
labels=tf.zeros_like(rot_delta),
delta=1 / (3.**2))
# Direction loss
if p.direction_classifier_weight > 0.0:
# The target rotations are in the assigned_gt_bbox tensor,
# which already has assigned a gt bounding box to every anchor.
rot_target = input_batch.assigned_gt_bbox[..., 6]
# If rotation is > 0, the class is 1, else it is 0.
rot_dir = tf.to_int32(rot_target > 0.)
# Compute one-hot labels as a target.
rot_dir_onehot = tf.one_hot(rot_dir, 2)
# Manually handle loss reduction.
dir_loss = tf.losses.softmax_cross_entropy(
onehot_labels=rot_dir_onehot,
logits=predictions.predicted_dir,
weights=tf.squeeze(reg_weights, axis=-1),
reduction=tf.losses.Reduction.NONE)
# Reduce across all dimensions (we'll divide by the batch size below).
dir_loss_sum = tf.reduce_sum(dir_loss)
else:
dir_loss_sum = 0.0
# Compute loss contribution from location and dimension separately.
reg_loc_loss = reg_loc_and_dims_loss[..., :3] * reg_weights
reg_loc_loss_sum = tf.reduce_sum(reg_loc_loss)
reg_dim_loss = reg_loc_and_dims_loss[..., 3:6] * reg_weights
reg_dim_loss_sum = tf.reduce_sum(reg_dim_loss)
# Compute rotation loss contribution.
reg_rot_loss *= reg_weights
reg_rot_loss_sum = tf.reduce_sum(reg_rot_loss)
# Num. predictions.
# TODO(zhifengc): Consider other normalization factors. E.g., # of bboxes.
preds = tf.cast(bs, class_loss_sum.dtype)
# Normalize all of the components by batch size.
reg_loc_loss = reg_loc_loss_sum / preds
reg_dim_loss = reg_dim_loss_sum / preds
reg_rot_loss = reg_rot_loss_sum / preds
class_loss = class_loss_sum / preds
dir_loss = dir_loss_sum / preds
# Compute total localization regression loss.
reg_loss = (
p.location_loss_weight * reg_loc_loss +
p.dimension_loss_weight * reg_dim_loss +
p.rotation_loss_weight * reg_rot_loss)
# Apply weights to normalized class losses.
loss = (
class_loss * p.classification_loss_weight +
reg_loss * p.localization_loss_weight +
dir_loss * p.direction_classifier_weight)
metrics_dict = {
'loss': (loss, preds),
'loss/class': (class_loss, preds),
'loss/reg': (reg_loss, preds),
'loss/reg/rot': (reg_rot_loss, preds),
'loss/reg/loc': (reg_loc_loss, preds),
'loss/reg/dim': (reg_dim_loss, preds),
'loss/dir': (dir_loss, preds),
}
per_example_dict = {
'residuals': predicted_residuals,
'classification_logits': predicted_class_logits,
}
return metrics_dict, per_example_dict
def _BBoxesAndLogits(self, input_batch):
"""Decode an input batch, computing predicted bboxes from residuals."""
_, per_example_dict = self.FPropTower(self.theta, input_batch)
# Decode residuals.
predicted_bboxes = self._utils.ResidualsToBBoxes(
input_batch.anchor_bboxes, per_example_dict['residuals'])
# predicted_bboxes is a [batch, nx, ny, nz, na, 7] Tensor.
batch_size, nx, ny, nz, na, _ = py_utils.GetShape(predicted_bboxes, 6)
num_boxes = nx * ny * nz * na
# Reshape to [batch_size, num_boxes, 7]
predicted_bboxes = tf.reshape(predicted_bboxes, [batch_size, num_boxes, 7])
classification_logits = tf.reshape(
per_example_dict['classification_logits'], [batch_size, num_boxes, -1])
return py_utils.NestedMap({
'predicted_bboxes': predicted_bboxes,
'classification_logits': classification_logits
})
| 38.681445 | 80 | 0.658445 |
4a1e0067946c2877de6afa411f62ce622e8ce0d2 | 438 | py | Python | setup.py | frascoweb/frasco-images | b54e0c06e14fd816e7fd74dd23918acff3bcc828 | [
"MIT"
] | null | null | null | setup.py | frascoweb/frasco-images | b54e0c06e14fd816e7fd74dd23918acff3bcc828 | [
"MIT"
] | null | null | null | setup.py | frascoweb/frasco-images | b54e0c06e14fd816e7fd74dd23918acff3bcc828 | [
"MIT"
] | null | null | null | from setuptools import setup
setup(
name='frasco-images',
version='0.1',
url='http://github.com/frascoweb/frasco-images',
license='MIT',
author='Maxime Bouroumeau-Fuseau',
author_email='[email protected]',
description="Image manipulation for Frasco",
py_modules=['frasco_images'],
zip_safe=False,
platforms='any',
install_requires=[
'frasco',
'Pillow>=2.5.1'
]
) | 23.052632 | 52 | 0.643836 |
4a1e010f1030ccf8bf2898b527ebff6bf1e9ee9b | 375 | py | Python | tests/system/errlogbeat.py | inxonic/errlogbeat | 068748f2518c7ee220a80fce166783ebd093fbfb | [
"Apache-2.0"
] | null | null | null | tests/system/errlogbeat.py | inxonic/errlogbeat | 068748f2518c7ee220a80fce166783ebd093fbfb | [
"Apache-2.0"
] | null | null | null | tests/system/errlogbeat.py | inxonic/errlogbeat | 068748f2518c7ee220a80fce166783ebd093fbfb | [
"Apache-2.0"
] | null | null | null | import os
import sys
sys.path.append('../../vendor/github.com/elastic/beats/libbeat/tests/system')
from beat.beat import TestCase
class BaseTest(TestCase):
@classmethod
def setUpClass(self):
self.beat_name = "errlogbeat"
self.beat_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../"))
super(BaseTest, self).setUpClass()
| 26.785714 | 91 | 0.685333 |
4a1e027e36f1119094479ab4e90f5ae12f47d96d | 2,706 | py | Python | glyce/glyce/layers/combo_position_embed.py | jsonW0/StrokeOrderEmbeddings | aa73b216a118de2efba1d299b96990ba9244fa3f | [
"Apache-2.0"
] | null | null | null | glyce/glyce/layers/combo_position_embed.py | jsonW0/StrokeOrderEmbeddings | aa73b216a118de2efba1d299b96990ba9244fa3f | [
"Apache-2.0"
] | null | null | null | glyce/glyce/layers/combo_position_embed.py | jsonW0/StrokeOrderEmbeddings | aa73b216a118de2efba1d299b96990ba9244fa3f | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: Jason Wang
# Based on code by: Xiaoya Li(Glyce)
import os
import sys
root_path = "/".join(os.path.realpath(__file__).split("/")[:-3])
if root_path not in sys.path:
sys.path.insert(0, root_path)
import json
import math
import shutil
import tarfile
import logging
import tempfile
import torch
import torch.nn as nn
from torch.nn import CrossEntropyLoss
from glyce.layers.bert_basic_model import *
from glyce.utils.tokenization import BertTokenizer
from glyce.layers.char_glyph_embedding import CharGlyphEmbedding
from glyce.layers.char_graph_embedding import CharGraphEmbedding
class ComboPositionEmbedder(nn.Module):
def __init__(self, configGlyph, configGraph, parentconfig):
super(ComboPositionEmbedder, self).__init__()
self.position_embeddings = nn.Embedding(configGraph.max_position_embeddings, configGlyph.output_size + configGraph.output_size)
token_tool = BertTokenizer.from_pretrained(parentconfig.bert_model, do_lower_case=False)
idx2tokens = token_tool.ids_to_tokens
self.glyph_encoder = CharGlyphEmbedding(configGlyph, idx2tokens)
self.graph_encoder = CharGraphEmbedding(configGraph, idx2tokens)
self.layer_norm = BertLayerNorm(configGlyph.output_size + configGraph.output_size, eps=1e-12)
self.dropout = nn.Dropout(configGraph.hidden_dropout_prob)
def forward(self, input_ids, token_type_ids=None):
seq_length = input_ids.size(1)
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
position_embeddings = self.position_embeddings(position_ids)
glyph_embeddings, glyph_cls_loss = self.glyph_encoder(input_ids)
graph_embeddings = self.graph_encoder(input_ids)
embeddings = position_embeddings + torch.cat((glyph_embeddings, graph_embeddings),dim=2)
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings, glyph_cls_loss
def position(self,input_ids,token_type_ids=None):
seq_length = input_ids.size(1)
position_ids = torch.arange(seq_length, dtype=torch.long, device=input_ids.device)
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
return self.position_embeddings(position_ids)
def comboForward(self,input_ids,token_type_ids=None):
glyph_embeddings, glyph_cls_loss = self.glyph_encoder(input_ids)
graph_embeddings = self.graph_encoder(input_ids)
return torch.cat((glyph_embeddings, graph_embeddings),dim=2)
if __name__ == "__main__":
pass
| 38.657143 | 135 | 0.752402 |
4a1e0292c6a5298296c489bee16d0afcf400d946 | 1,713 | py | Python | POA_02_create_node_link_incidence.py | salomonw/Price_of_Anarchy_for_Transportation_Networks | 1af117feecf36bc06eabe1d3b3749147734851e0 | [
"MIT"
] | 2 | 2018-07-16T02:55:15.000Z | 2021-11-21T16:13:33.000Z | POA_02_create_node_link_incidence.py | salomonw/Price_of_Anarchy_for_Transportation_Networks | 1af117feecf36bc06eabe1d3b3749147734851e0 | [
"MIT"
] | null | null | null | POA_02_create_node_link_incidence.py | salomonw/Price_of_Anarchy_for_Transportation_Networks | 1af117feecf36bc06eabe1d3b3749147734851e0 | [
"MIT"
] | 1 | 2019-02-09T15:37:10.000Z | 2019-02-09T15:37:10.000Z | from utils_julia import *
def create_node_link_incidence(instance):
from utils_julia import *
import json
link_label_dict = zload( out_dir + 'link_label_dict.pkz')
instance = time_instances['id'][0]
with open(out_dir + "/data_traffic_assignment_uni-class/" + files_ID + '_net_' + month_w + '_full_' + instance + '.txt') as MA_journal_flow:
MA_journal_flow_lines = MA_journal_flow.readlines()
MA_journal_links = []
i = -9
for line in MA_journal_flow_lines:
i += 1
if i > 0:
MA_journal_links.append(line.split(' ')[1:3])
numLinks = i
link_list_js = [str(int(MA_journal_links[i][0])) + ',' + str(int(MA_journal_links[i][1])) for \
i in range(len(MA_journal_links))]
link_list_pk = [str(int(MA_journal_links[i][0])) + '->' + str(int(MA_journal_links[i][1])) for \
i in range(len(MA_journal_links))]
numNodes = max([int(MA_journal_links[i][1]) for i in range(numLinks)])
N = np.zeros((numNodes, numLinks))
N_dict = {}
for j in range(np.shape(N)[1]):
for i in range(np.shape(N)[0]):
if (str(i+1) == link_label_dict[str(j)].split('->')[0]):
N[i, j] = 1
elif (str(i+1) == link_label_dict[str(j)].split('->')[1]):
N[i, j] = -1
key = str(i) + '-' + str(j)
N_dict[key] = N[i, j]
with open(out_dir + 'node_link_incidence.json', 'w') as json_file:
json.dump(N_dict, json_file)
zdump(N, out_dir + 'node_link_incidence.pkz')
execfile('parameters_julia.py')
for instance in time_instances['id']:
create_node_link_incidence(instance)
| 36.446809 | 144 | 0.581436 |
4a1e02bfee2e634ff016a8dd4c0e6ce013c74e2b | 2,697 | py | Python | loginpass/_django.py | authlib/socialism | 635823a78a2a92cf8630f9935aebb9afcccb8656 | [
"BSD-3-Clause"
] | 223 | 2018-04-23T14:48:17.000Z | 2022-03-19T09:27:36.000Z | loginpass/_django.py | authlib/socialism | 635823a78a2a92cf8630f9935aebb9afcccb8656 | [
"BSD-3-Clause"
] | 68 | 2018-04-22T13:55:25.000Z | 2022-03-15T15:28:42.000Z | loginpass/_django.py | authlib/socialism | 635823a78a2a92cf8630f9935aebb9afcccb8656 | [
"BSD-3-Clause"
] | 86 | 2018-04-24T21:09:26.000Z | 2022-03-17T08:55:34.000Z |
def create_django_urlpatterns(backend, oauth, handle_authorize):
from authlib.integrations.django_client import DjangoRemoteApp
from django.urls import path
class RemoteApp(backend, DjangoRemoteApp):
OAUTH_APP_CONFIG = backend.OAUTH_CONFIG
token_name = '_loginpass_{}_token'.format(backend.NAME)
auth_route_name = 'loginpass_{}_auth'.format(backend.NAME)
login_route_name = 'loginpass_{}_login'.format(backend.NAME)
remote = oauth.register(
backend.NAME,
overwrite=True,
fetch_token=lambda request: getattr(request, token_name, None),
client_cls=RemoteApp,
)
auth = create_auth_endpoint(remote, handle_authorize)
login = create_login_endpoint(remote, backend, auth_route_name)
return [
path('auth/', auth, name=auth_route_name),
path('login/', login, name=login_route_name),
]
def create_auth_endpoint(remote, handle_authorize):
def auth(request):
from django.http import HttpResponse
if request.method not in ('GET', 'POST'):
return HttpResponse(status=405)
method = getattr(request, request.method)
id_token = method.get('id_token')
if method.get('code'):
token = remote.authorize_access_token(request)
if id_token:
token['id_token'] = id_token
elif id_token:
token = {'id_token': id_token}
elif method.get('oauth_verifier'):
# OAuth 1
token = remote.authorize_access_token(request)
else:
# handle failed
return handle_authorize(remote, None, None)
if 'id_token' in token:
user_info = remote.parse_id_token(request, token)
else:
token_name = '_loginpass_{}_token'.format(remote.name)
setattr(request, token_name, token)
user_info = remote.userinfo(request=request, token=token)
return handle_authorize(request, remote, token, user_info)
return auth
def create_login_endpoint(remote, backend, auth_route_name):
from django.conf import settings
from django.urls import reverse
config = getattr(settings, 'AUTHLIB_OAUTH_CLIENTS', None)
authorize_params = None
if config:
backend_config = config.get(backend.NAME)
if backend_config:
authorize_params = backend_config.get('authorize_params')
def login(request):
redirect_uri = request.build_absolute_uri(reverse(auth_route_name))
params = {}
if authorize_params:
params.udpate(authorize_params)
return remote.authorize_redirect(request, redirect_uri, **params)
return login
| 33.7125 | 75 | 0.664442 |
4a1e02c3ca9a166d5bfa4e122d945e0e6b109d97 | 1,276 | py | Python | day-07/sol-07.py | guptaanmol184/advent-or-code | 6622e3242239d271bdaf89cf3d9aa16f98bbf5f5 | [
"Unlicense"
] | null | null | null | day-07/sol-07.py | guptaanmol184/advent-or-code | 6622e3242239d271bdaf89cf3d9aa16f98bbf5f5 | [
"Unlicense"
] | null | null | null | day-07/sol-07.py | guptaanmol184/advent-or-code | 6622e3242239d271bdaf89cf3d9aa16f98bbf5f5 | [
"Unlicense"
] | null | null | null | # Advent of code Year 2021 Day 07 solution
# Author = Anmol Gupta
# Date = December 2021
from collections import Counter
import sys
input = list()
with open("input.txt", "r") as input_file:
input = input_file.readlines()
horizontal_positions = [int(position) for position in input[0].strip().split(",")]
# 1
# Find the cheapest position for moving all the crabs
c = Counter(horizontal_positions)
mini = sys.maxsize
for move_position in c.keys():
fuel_cost = 0
for postion, count in c.items():
fuel_cost += abs(postion - move_position) * count
mini = fuel_cost if fuel_cost < mini else mini
print("Part One : " + str(mini))
# 2
# Find the cheapest position for moving all the crabs, with the updated fueld cumsuption function
c = Counter(horizontal_positions)
mini = sys.maxsize
crab_positions = c.keys()
start_position = min(crab_positions)
end_position = max(crab_positions)
for move_position in range(start_position, end_position + 1):
total_fuel_cost = 0
for postion, count in c.items():
move_dist = abs(postion - move_position)
fuel_cost = (move_dist * (move_dist + 1)) // 2
total_fuel_cost += fuel_cost * count
mini = total_fuel_cost if total_fuel_cost < mini else mini
print("Part Two : " + str(mini))
| 29.674419 | 97 | 0.710031 |
4a1e03166696f8e50afcd8aad6293fd2a27fe8e5 | 5,471 | py | Python | actioncable/connection.py | dazzl-tv/python-actioncable-zwei | 123bc4a992d25f5218abba1b280af29de7cd3a7b | [
"MIT"
] | null | null | null | actioncable/connection.py | dazzl-tv/python-actioncable-zwei | 123bc4a992d25f5218abba1b280af29de7cd3a7b | [
"MIT"
] | null | null | null | actioncable/connection.py | dazzl-tv/python-actioncable-zwei | 123bc4a992d25f5218abba1b280af29de7cd3a7b | [
"MIT"
] | null | null | null | """
ActionCable connection.
"""
import threading
import uuid
import json
import logging
import time
import websocket
class Connection:
"""
The connection to a websocket server
"""
def __init__(self, url, origin=None, log_ping=False, cookie=None, header=None):
"""
:param url: The url of the cable server.
:param origin: (Optional) The origin.
:param log_ping: (Default: False) If true every
ping gets logged.
:param cookie: (Optional) A cookie to send (used for
authentication for instance).
:param header: (Optional) custom header for websocket handshake.
"""
self.url = url
self.origin = origin
self.log_ping = log_ping
self.cookie = cookie
self.header = header
self.logger = logging.getLogger('ActionCable Connection')
self.subscriptions = {}
self.websocket = None
self.ws_thread = None
self.auto_reconnect = False
if origin is not None:
self.origin = origin
def connect(self, origin=None):
"""
Connects to the server.
:param origin: (Optional) The origin.
"""
self.logger.debug('Establish connection...')
if self.connected:
self.logger.warning('Connection already established. Return...')
return
if origin is not None:
self.origin = origin
self.auto_reconnect = True
self.ws_thread = threading.Thread(
name="APIConnectionThread_{}".format(uuid.uuid1()),
target=self._run_forever)
self.ws_thread.daemon = True
self.ws_thread.start()
def disconnect(self):
"""
Closes the connection.
"""
self.logger.debug('Close connection...')
self.auto_reconnect = False
if self.websocket is not None:
self.websocket.close()
def _run_forever(self):
while self.auto_reconnect:
try:
self.logger.debug('Run connection loop.')
self.websocket = websocket.WebSocketApp(
self.url,
cookie=self.cookie,
header=self.header,
on_message=lambda socket, message: self._on_message(socket, message),
on_close=lambda socket, code, msg: self._on_close(socket, code, msg)
)
self.websocket.on_open = lambda socket: self._on_open(socket)
self.websocket.run_forever(ping_interval=5, ping_timeout=3, origin=self.origin)
time.sleep(2)
except Exception as exc:
self.logger.error('Connection loop raised exception. Exception: %s', exc)
def send(self, data):
"""
Sends data to the server.
"""
self.logger.debug('Send data: {}'.format(data))
if not self.connected:
self.logger.warning('Connection not established. Return...')
return
self.websocket.send(json.dumps(data))
def _on_open(self, socket):
"""
Called when the connection is open.
"""
self.logger.debug('Connection established.')
def _on_message(self, socket, message):
"""
Called aways when a message arrives.
"""
data = json.loads(message)
message_type = None
identifier = None
subscription = None
if 'type' in data:
message_type = data['type']
if 'identifier' in data:
identifier = json.loads(data['identifier'])
if identifier is not None:
subscription = self.find_subscription(identifier)
if subscription is not None:
subscription.received(data)
elif message_type == 'welcome':
self.logger.debug('Welcome message received.')
for subscription in self.subscriptions.values():
if subscription.state == 'connection_pending':
subscription.create()
elif message_type == 'ping':
if self.log_ping:
self.logger.debug('Ping received.')
else:
self.logger.warning('Message not supported. (Message: {})'.format(message))
def _on_close(self, socket, close_status_code, close_msg):
"""
Called when the connection was closed.
"""
self.logger.debug('Connection closed with status {} and message {}.'.format(close_status_code, close_msg))
for subscription in self.subscriptions.values():
if subscription.state == 'subscribed':
subscription.state = 'connection_pending'
@property
def socket_present(self):
"""
If socket is present.
"""
return self.websocket is not None and self.websocket.sock is not None
@property
def connected(self):
"""
If connected to server.
"""
return self.websocket is not None and \
self.websocket.sock is not None and \
self.websocket.sock.connected
def find_subscription(self, identifier):
"""
Finds a subscription
by it's identifier.
"""
for subscription in self.subscriptions.values():
if subscription.identifier == identifier:
return subscription
| 29.733696 | 114 | 0.571011 |
4a1e037a15ce86ab909686b8933941cda5c6ada6 | 1,954 | py | Python | preprocess.py | magungh1/Tacotron-2 | 711369e5e37764ca7a5cca9b04cdec0065394efb | [
"MIT"
] | null | null | null | preprocess.py | magungh1/Tacotron-2 | 711369e5e37764ca7a5cca9b04cdec0065394efb | [
"MIT"
] | null | null | null | preprocess.py | magungh1/Tacotron-2 | 711369e5e37764ca7a5cca9b04cdec0065394efb | [
"MIT"
] | null | null | null | import argparse
import os
from multiprocessing import cpu_count
from datasets import preprocessor
from hparams import hparams
from tqdm import tqdm
def preprocess(args, out_dir, hparams):
mel_dir = os.path.join(out_dir, 'mels')
wav_dir = os.path.join(out_dir, 'audio')
linear_dir = os.path.join(out_dir, 'linear')
os.makedirs(mel_dir, exist_ok=True)
os.makedirs(wav_dir, exist_ok=True)
os.makedirs(linear_dir, exist_ok=True)
metadata = preprocessor.build_from_path(hparams, mel_dir, linear_dir, wav_dir, args.n_jobs, tqdm=tqdm)
write_metadata(metadata, out_dir)
def write_metadata(metadata, out_dir):
with open(os.path.join(out_dir, 'train.txt'), 'w', encoding='utf-8') as f:
for m in metadata:
f.write('|'.join([str(x) for x in m]) + '\n')
mel_frames = sum([int(m[4]) for m in metadata])
timesteps = sum([int(m[3]) for m in metadata])
sr = hparams.sample_rate
hours = timesteps / sr / 3600
print('Write {} utterances, {} mel frames, {} audio timesteps, ({:.2f} hours)'.format(
len(metadata), mel_frames, timesteps, hours))
print('Max input length (text chars): {}'.format(max(len(m[5]) for m in metadata)))
print('Max mel frames length: {}'.format(max(int(m[4]) for m in metadata)))
print('Max audio timesteps length: {}'.format(max(m[3] for m in metadata)))
def run_preprocess(args, hparams):
output_folder = os.path.join(args.base_dir, args.output)
preprocess(args, output_folder, hparams)
def main():
print('initializing preprocessing..')
parser = argparse.ArgumentParser()
parser.add_argument('--base_dir', default='')
parser.add_argument('--hparams', default='',
help='Hyperparameter overrides as a comma-separated list of name=value pairs')
parser.add_argument('--output', default='training_data')
parser.add_argument('--n_jobs', type=int, default=cpu_count())
args = parser.parse_args()
modified_hp = hparams.parse(args.hparams)
run_preprocess(args, modified_hp)
if __name__ == '__main__':
main()
| 34.892857 | 103 | 0.726714 |
4a1e0483760c70f2df0af84e32a096e139e805ea | 837 | py | Python | subseries/subseries.py | mamaz/learn-stuffs-with-python | 2677203717bb5ddc30b0a907b8e38a9c9c7f8015 | [
"MIT"
] | null | null | null | subseries/subseries.py | mamaz/learn-stuffs-with-python | 2677203717bb5ddc30b0a907b8e38a9c9c7f8015 | [
"MIT"
] | null | null | null | subseries/subseries.py | mamaz/learn-stuffs-with-python | 2677203717bb5ddc30b0a907b8e38a9c9c7f8015 | [
"MIT"
] | null | null | null | from typing import List
def is_subseries(first: List[int], second: List[int]) -> bool:
"""
returns True if second is subseries of first
Args:
first (List[int]): source of subseries
second (List[int]): series that will be checked against first series
Returns:
bool: True if second is subquery, False otherwise
"""
counter = 0
pivot = 0
for s in range(0, len(second)):
for i in range(pivot, len(first)):
if first[i] == second[s]:
counter += 1
pivot = i
break
return counter == len(second)
if __name__ == "__main__":
first = [1, 3, 5, 8, 9]
checkers = [[3, 8, 9], [1, 5, 9], [1, 3, 5], [1], [1, 3, 18], [8, 5], [9, 8, 5]]
for checker in checkers:
print(is_subseries(first, checker))
| 24.617647 | 84 | 0.542413 |
4a1e04fc87962918286f9c8bc1c17c918f309504 | 8,577 | py | Python | crafting/gridworld/algorithms/composite_dataset.py | cdevin/cpv | 49e20a79f555f695aafb7ed2e1580b65f8b7bef8 | [
"MIT"
] | 8 | 2019-11-11T02:04:42.000Z | 2021-09-26T18:43:10.000Z | crafting/gridworld/algorithms/composite_dataset.py | cdevin/cpv | 49e20a79f555f695aafb7ed2e1580b65f8b7bef8 | [
"MIT"
] | 11 | 2020-03-24T17:45:34.000Z | 2022-03-12T00:03:33.000Z | crafting/gridworld/algorithms/composite_dataset.py | cdevin/cpv | 49e20a79f555f695aafb7ed2e1580b65f8b7bef8 | [
"MIT"
] | null | null | null | import torch
import glob
#from skimage import io, transform
import cv2
from torchvision import transforms, utils
import numpy as np
import pickle
from natsort import natsorted
MAX_ACTIONS = 7
import time
class ActionToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def convert_image(self, image):
if isinstance(image, torch.Tensor):
image = image.permute(2,0,1)
return image.type(torch.FloatTensor)/255.
else:
image[-1,-1,-1] = 0
image = image.transpose((2, 0, 1))
return torch.from_numpy(image).type(torch.FloatTensor)/255.
def __call__(self, sample):
image = sample['image']#, sample['last_image']
action = image[-1,-1,-1]
for k,v in sample.items():
if 'image' in k:
sample[k] = self.convert_image(v)
if 'ref_middle' in sample:
sample['ref_middle'] = torch.stack([self.convert_image(i) for i in sample['ref_middle']])
if 'exp_middle' in sample:
sample['exp_middle'] = torch.stack([self.convert_image(i) for i in sample['exp_middle']])
#import pdb; pdb.set_trace()
#limage = limage.transpose((2, 0, 1))
#print({n: v.shape for n,v in sample.items()})
sample['action'] = int(action)
return sample
class CompositeDataset(torch.utils.data.Dataset):
def __init__(self, directory='/persistent/affordance_world/data/paired_compositions2/',
train=True, size=None, include_ref=True, pickup_balance=0, is_labeled=False,
num_middle_states=0,
):
self.directory = directory
self.transform = ActionToTensor()
self.include_ref = include_ref
self.pickup_balance = pickup_balance
self.is_labeled = is_labeled
self.train = train
self.num_middle_states =num_middle_states
if train:
traj_files = natsorted(glob.glob(directory+'/episode*[1-9]_*.npy'))
print(len(traj_files))
else:
traj_files = natsorted(glob.glob(directory+'/episode*0_*.npy'))
print(len(traj_files))
print("gathering paths", len(traj_files))
if include_ref:
coef = 2
else:
coef = 1
print("coef is ", coef)
if size is None:
size = int(len(traj_files)/coef)
if size * coef != len(traj_files):
print("unenven number of traj_files")
if size*coef > len(traj_files):
size = int(len(traj_files)/coef)
self.reference_files = []#0 for i in range(size)]
self.expert_files = []#0 for i in range(size)]
print("size", size)
for i in range(size*coef):
file = traj_files[i]
dirs = file.split('/')
words = dirs[-1].split('_')
episode = int(words[0][7:])
# if episode != int(i/2)+1:# and episode != int(i/2) :
# print("episode", episode, "file", file, "len", len(self.reference_files))
# import pdb; pdb.set_trace()
# if episode >= len(self.reference_files):
# print("episode", episode, "file", file, "len", len(self.reference_files))
# import pdb; pdb.set_trace()
if 'ref' in file:
self.reference_files.append((file, episode))
elif 'exp' in file:
self.expert_files.append((file, episode))
if 0 in self.reference_files or 0 in self.expert_files:
print("0 in ref or exp files")
import pdb; pdb.set_trace()
if self.include_ref:
for i in range(len(self.expert_files)):
#print(i, self.expert_files[i], self.4reference_files[i])
assert(self.expert_files[i][-1] == self.reference_files[i][-1])
print("done gathering paths")
# self.time_avg = 0
# self.num_steps = 0
def __len__(self):
return len(self.expert_files)
def _get_label(self, path):
return int(path.split('_')[-2])
def _get_middles(self, trajectory):
trajectory = list(trajectory)
x = int(len(trajectory)/self.num_middle_states)
if x == 0:
middle = trajectory
else:
middle = trajectory[::x][1:]
while len(middle) < self.num_middle_states:
middle = [trajectory[0]]+middle
while len(middle) > self.num_middle_states:
middle = middle[:-1]
#print(len(middle))
#print([t.shape for t in middle])
return middle
def __getitem__(self, idx):
#t1 = time.clock()
ref_is_exp = np.random.randint(0,2)
if ref_is_exp:
ref_files = self.expert_files
exp_files = self.reference_files
else:
exp_files = self.expert_files
ref_files = self.reference_files
exp = np.load(exp_files[idx][0])
#if len(exp) < 3:
# print("idx", idx, "exp length is", exp.shape, "file", self.expert_files[idx][0])
if len(exp) < 4:
exp = [t for t in exp]
while len(exp) < 4:
exp = [exp[0]] + exp
index = np.random.randint(0, len(exp)-2)
if self.include_ref:
ref = np.load(ref_files[idx][0])
ref_init = ref[0]
ref_final = ref[-1]
sample = {'image': exp[index],
'init_image': exp[0],
'final_image': exp[-1],
'post_image': ref_final ,
'pre_image': ref_init,
}
if self.num_middle_states >0:
sample['ref_middle'] = self._get_middles(ref)
sample['exp_middle'] = self._get_middles(exp)
else:
sample = {'image': exp[index],
'init_image': exp[0],
'final_image': exp[-1],
}
if self.is_labeled:
sample['task'] = self._get_label(exp_files[idx][0])
sample = self.transform(sample)
if sample['action'] == 6:
print("idx", idx, "action", sample['action'], "exp length is", len(exp), "file", exp_files[idx][0])
# Hack to avoid crashing, need to remove these tiny samples
sample['action'] = 5
return sample
class StateActionToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def convert_image(self, image):
if image.shape[0] == 33:
image = image[:32]
if isinstance(image, torch.Tensor):
#image = image.permute(2,0,1)
return image.type(torch.FloatTensor)/10 -0.5
else:
return torch.from_numpy(image).type(torch.FloatTensor)/10 -0.5
def __call__(self, sample):
image = sample['image']#, sample['last_image']
action = image[-1]
for k,v in sample.items():
if 'image' in k:
sample[k] = self.convert_image(v)
#print(sample[k])
# if 'ref_middle' in sample:
# sample['ref_middle'] = torch.stack([self.convert_image(i) for i in sample['ref_middle']])
# if 'exp_middle' in sample:
# sample['exp_middle'] = torch.stack([self.convert_image(i) for i in sample['exp_middle']])
#import pdb; pdb.set_trace()
#limage = limage.transpose((2, 0, 1))
#print({n: v.shape for n,v in sample.items()})
sample['action'] = int(action)
return sample
class StateCompositeDataset(CompositeDataset):
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.transform = StateActionToTensor()
class IRLDataset(CompositeDataset):
def __getitem__(self, idx):
idx = idx % len(self.expert_files)
exp = np.load(self.expert_files[idx][0])
if len(exp) < 3:
print("idx", idx, "exp length is", exp.shape, "file", self.expert_files[idx][0])
if len(exp) < 4:
exp = [t for t in exp]
while len(exp) < 3:
exp = [exp[0]] + exp
index = np.random.randint(0, len(exp)-2)
sample ={'image': exp[index],
'next_image': exp[index+1]}
sample = self.transform(sample)
sample['action'] = np.eye(7)[sample['action']].astype(np.float32)
return sample
def __len__(self):
return len(self.expert_files)*15
| 37.784141 | 111 | 0.546578 |
4a1e0551bceb008dc507a500864bfdac33a68374 | 1,877 | py | Python | qbwc/base.py | tyler-n-lovely-test/qbwc | 9cc8e733135d3c177d8073d2cfd56684e42ed735 | [
"Apache-2.0"
] | null | null | null | qbwc/base.py | tyler-n-lovely-test/qbwc | 9cc8e733135d3c177d8073d2cfd56684e42ed735 | [
"Apache-2.0"
] | 1 | 2022-02-16T15:43:32.000Z | 2022-02-16T15:43:32.000Z | qbwc/base.py | tyler-n-lovely-test/qbwc | 9cc8e733135d3c177d8073d2cfd56684e42ed735 | [
"Apache-2.0"
] | 1 | 2020-12-02T14:13:40.000Z | 2020-12-02T14:13:40.000Z | import xmltodict
import requests
import json
from typing import Union, List
from qbwc.generated import types
from qbwc.generated.types import QBXMLMsgsRq, QBXMLMsgsRs
from qbwc.config import uri, request_headers, debug
from qbwc.helpers import (
dict_to_out,
dict_to_in,
split_pascal_case
)
_base_xml = '''
<?xml version="1.0" encoding="utf-8"?>
<?qbxml version="13.0"?>
<QBXML>
<QBXMLMsgsRq onError="stopOnError">
</QBXMLMsgsRq>
</QBXML>
'''.strip()
class QBWCError(Exception):
pass
def _dicttoxml(d: dict, qbxmlversion: str='13.0'):
xml = xmltodict.unparse(d)
if qbxmlversion:
header, xmldata = xml.split('\n', 1)
xml = f'{header}\n<?qbxml version="{qbxmlversion}"?>\n{xmldata}'
return xml
def _except_response_has_error(rs: dict) -> None:
for k, v in rs.items():
if isinstance(v, dict):
v = [v]
for i in v:
if i['@statusSeverity'] == 'Error':
e = QBWCError(json.dumps({
k: v for k, v in i.items() if k.startswith('@')
}))
raise e
return
def _remove_status_from_rs(rs: dict) -> None:
for k, v in rs.items():
if isinstance(v, dict):
v = [v]
for i in v:
for key in tuple(i.keys()):
if key.startswith('@'):
del i[key]
return
def request(rq: QBXMLMsgsRq) -> QBXMLMsgsRs:
rq = dict_to_out(rq)
base = xmltodict.parse(_base_xml)
base['QBXML']['QBXMLMsgsRq'].update(rq)
xml = _dicttoxml(base)
if debug: print(xml)
r = requests.post(uri, data=xml, headers=request_headers)
r.raise_for_status()
c = r.content
rs = xmltodict.parse(c)['QBXML']['QBXMLMsgsRs']
_except_response_has_error(rs)
_remove_status_from_rs(rs)
rs = dict_to_in(rs)
return rs
| 24.697368 | 73 | 0.5935 |
4a1e05cbad884816a99b9053029798a9c2d554c0 | 1,656 | py | Python | app_js/code/app/__init__.py | mellemahp/python_app_template | cee503d32054629d9b67ce6dea67e7ea0928aafa | [
"MIT"
] | 1 | 2020-11-03T17:35:07.000Z | 2020-11-03T17:35:07.000Z | app_js/code/app/__init__.py | mellemahp/python_app_template | cee503d32054629d9b67ce6dea67e7ea0928aafa | [
"MIT"
] | 2 | 2020-02-05T05:05:42.000Z | 2021-05-11T03:31:52.000Z | app_js/code/app/__init__.py | mellemahp/python_app_template | cee503d32054629d9b67ce6dea67e7ea0928aafa | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""/app/__init__.py
Description: Application factory for Template app
Project: Python App Template
Author: Hunter Mellema
Date: 1/20/2020
"""
# === Start imports ===#
# third party
import bottle
from bottle import Bottle
import toml
# local imports
from config import config_dict
from . import views
from .auth import auth_wrapper
# === End Imports ===#
def build_app(mode):
""" Builds a new app instance with specified configuration
Args:
mode (str): Mode to use (choose one of ['dev', 'test', 'prod'])
"""
app = Bottle()
# add the configuration parameters to app instance
app.config = config_dict[mode]
app.debug = app.config["DEBUG"]
bottle.TEMPLATE_PATH.insert(0, app.config["TEMPLATE_PATH"])
# load toml route configuration file
with open(app.config["ROUTES_FILE"]) as f:
cfg = toml.load(f)
# add all pages and routes
add_routes(app, cfg)
# add other setup here like databases and such
return app
def add_routes(app, cfg):
"""Adds all routes from the routes file to the application route table
Args:
app (bottle.Bottle()): pre-configured application instance
cfg (dict): dictionary containing data from routes.toml file
Notes:
Edits application in place
"""
for route in cfg["routes"][0]:
c = cfg["routes"][0][route][0]
module, fxn = c["handler"].split(".")
app.route(
path=c["route"],
name=route,
method=c["methods"],
callback=auth_wrapper(getattr(globals()[module], fxn), c["auth"]),
)
| 23.323944 | 78 | 0.63285 |
4a1e06063edc54d649ace7547ed49647a118cbc1 | 684 | py | Python | grr/lib/rdfvalues/hunts.py | mikecb/grr | 52fdd977729af2a09a147301c55b8b7f1eccfa67 | [
"Apache-2.0"
] | 5 | 2017-03-17T08:25:09.000Z | 2022-02-22T05:28:14.000Z | grr/lib/rdfvalues/hunts.py | mikecb/grr | 52fdd977729af2a09a147301c55b8b7f1eccfa67 | [
"Apache-2.0"
] | null | null | null | grr/lib/rdfvalues/hunts.py | mikecb/grr | 52fdd977729af2a09a147301c55b8b7f1eccfa67 | [
"Apache-2.0"
] | 3 | 2018-12-07T07:04:37.000Z | 2022-02-22T05:28:16.000Z | #!/usr/bin/env python
"""RDFValue implementations for hunts."""
from grr.lib.rdfvalues import structs as rdf_structs
from grr.proto import flows_pb2
from grr.proto import jobs_pb2
class HuntNotification(rdf_structs.RDFProtoStruct):
protobuf = jobs_pb2.HuntNotification
class HuntContext(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.HuntContext
class HuntRunnerArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.HuntRunnerArgs
def Validate(self):
if self.HasField("client_rule_set"):
self.client_rule_set.Validate()
class HuntError(rdf_structs.RDFProtoStruct):
"""An RDFValue class representing a hunt error."""
protobuf = jobs_pb2.HuntError
| 22.8 | 52 | 0.78655 |
4a1e0779ab9593cf081c6fd79afd30271b7ab07a | 395 | py | Python | Neighbour/wsgi.py | FabianMatata/Neighbours | fba748fd8172072a851f5c6aa9c62f50fc193e4e | [
"MIT"
] | null | null | null | Neighbour/wsgi.py | FabianMatata/Neighbours | fba748fd8172072a851f5c6aa9c62f50fc193e4e | [
"MIT"
] | null | null | null | Neighbour/wsgi.py | FabianMatata/Neighbours | fba748fd8172072a851f5c6aa9c62f50fc193e4e | [
"MIT"
] | null | null | null | """
WSGI config for Neighbour project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Neighbour.settings')
application = get_wsgi_application()
| 23.235294 | 78 | 0.787342 |
4a1e0782cd7e825ff1b2586c03fbf7a3ce8103a9 | 513 | py | Python | palpatine/line.py | uranusjr/palpatine | 401b8c02a27f754c54a5a4212d892950abe169a5 | [
"BSD-3-Clause"
] | 1 | 2015-03-12T20:55:05.000Z | 2015-03-12T20:55:05.000Z | palpatine/line.py | uranusjr/palpatine | 401b8c02a27f754c54a5a4212d892950abe169a5 | [
"BSD-3-Clause"
] | null | null | null | palpatine/line.py | uranusjr/palpatine | 401b8c02a27f754c54a5a4212d892950abe169a5 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from . import const
from .utils import out
def clear(clear_type=const.ALL):
"""Clear part of, or all characters in the current line.
The cursor is *not* moved after the line is cleared. You will need to call
cursor-moving functions (see :py:mod:`.cursor`) to move it manually.
:param clear_type: What part of the current line should be cleared. See
:py:mod:`.const` for a list of possible choices.
"""
out(clear_type, letter='K')
| 28.5 | 78 | 0.674464 |
4a1e07fada71574bce478dda19bbdc2891d1c691 | 75 | py | Python | Configuration/Eras/python/Modifier_run2_ECAL_2016_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | Configuration/Eras/python/Modifier_run2_ECAL_2016_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | Configuration/Eras/python/Modifier_run2_ECAL_2016_cff.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
run2_ECAL_2016 = cms.Modifier()
| 18.75 | 40 | 0.8 |
4a1e080cfa6908106a566087df18195e3e31cbeb | 5,405 | py | Python | docs/source/conf.py | supsi-dacd-isaac/krangpower | eba38942af552992ad466f1b432121a99870c9b2 | [
"MIT"
] | 12 | 2018-07-19T10:03:11.000Z | 2021-01-12T22:30:58.000Z | docs/source/conf.py | supsi-dacd-isaac/krangpower | eba38942af552992ad466f1b432121a99870c9b2 | [
"MIT"
] | null | null | null | docs/source/conf.py | supsi-dacd-isaac/krangpower | eba38942af552992ad466f1b432121a99870c9b2 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# krangpower documentation build configuration file, created by
# sphinx-quickstart on Fri May 18 12:01:42 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(r'../..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.ifconfig',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'krangpower'
copyright = '2018, Federico Rosato'
author = 'Federico Rosato'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2'
# The full version, including alpha/beta/rc tags.
release = '0.2.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_theme_options = {}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# This is required for the alabaster theme
# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars
html_sidebars = {
'**': ['home.html',
'navigation.html',
'relations.html', # needs 'show_related': True theme option to display
'searchbox.html'
]
}
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'krangpowerdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'krangpower.tex', 'krangpower Documentation',
'Federico Rosato', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'krangpower', 'krangpower Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'krangpower', 'krangpower Documentation',
author, 'krangpower', 'One line description of project.',
'Miscellaneous'),
]
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'https://docs.python.org/': None}
| 30.536723 | 82 | 0.684181 |
4a1e08871dedca029bbfe21b78447986aeac1f73 | 840 | py | Python | dags/simple_dag.py | denironyx/docker-airflow-tutorial | dde72fe933fd1a241636c7bf27d2a6409263ffb2 | [
"Apache-2.0"
] | null | null | null | dags/simple_dag.py | denironyx/docker-airflow-tutorial | dde72fe933fd1a241636c7bf27d2a6409263ffb2 | [
"Apache-2.0"
] | null | null | null | dags/simple_dag.py | denironyx/docker-airflow-tutorial | dde72fe933fd1a241636c7bf27d2a6409263ffb2 | [
"Apache-2.0"
] | null | null | null | from datetime import datetime, timedelta
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python_operator import PythonOperator
def print_hello():
return "Hello world!"
default_args = {
"owner": "airflow",
"depends_on_past": False,
"start_date": datetime(2019, 4, 30),
"email": ["[email protected]"],
"email_on_failure": False,
"email_on_retry": False,
"retries": 1,
"retry_delay": timedelta(minutes=2),
}
dag = DAG(
"hello_world",
description="Simple tutorial DAG",
schedule_interval="0 12 * * *",
default_args=default_args,
catchup=False,
)
t1 = DummyOperator(task_id="dummy_task", retries=3, dag=dag)
t2 = PythonOperator(task_id="hello_task", python_callable=print_hello, dag=dag)
# sets downstream foe t1
t1 >> t2 | 24 | 79 | 0.708333 |
4a1e096dcae0185b37c9aca953b78e2579e66c65 | 4,221 | py | Python | scripts/build_tokens.py | amis-erc20/NetherDelta | cbc39f20c56e3dd669017cea271d9e860c8dcd4a | [
"MIT"
] | null | null | null | scripts/build_tokens.py | amis-erc20/NetherDelta | cbc39f20c56e3dd669017cea271d9e860c8dcd4a | [
"MIT"
] | null | null | null | scripts/build_tokens.py | amis-erc20/NetherDelta | cbc39f20c56e3dd669017cea271d9e860c8dcd4a | [
"MIT"
] | 1 | 2018-08-19T03:48:44.000Z | 2018-08-19T03:48:44.000Z | import json
from os import listdir, path
import sys
import yaml
TOKEN_KEYS_MAPPING = {
"addr": "addr",
"symbol": "name",
"name": "fullName",
"decimals": "decimals"
}
def make_listing_entry(defn):
token = {
dst_key: defn[src_key]
for (src_key, dst_key) in TOKEN_KEYS_MAPPING.items()
}
if "__FORKDELTA_CUSTOM_SYMBOL" in defn:
token["name"] = defn["__FORKDELTA_CUSTOM_SYMBOL"]
return token
NOTICE_HTML_TEMPLATE = """<p class="alert alert-warning">
{notice}
</p>
"""
GUIDE_HTML_TEMPLATE = """{notice_html}<blockquote>
<p>{description_html}</p>
<footer>{website_href}</footer>
</blockquote>\n"""
DESCRIPTION_HTML_JOINER = "</p>\n <p>" # With spaces to keep indentation consistent
WEBSITE_HREF_TEMPLATE = '<a href="{url}" target="_blank">{url}</a>'
def make_description_html(defn):
description = defn.get("description", "")
description_html = "</p>\n <p>".join(description.split("\n"))
website = dict([(key, d[key]) for d in defn["links"] for key in d]).get(
"Website", "")
if website:
website_href = WEBSITE_HREF_TEMPLATE.format(url=website)
else:
website_href = ""
if not description_html and not website_href:
return "" # No guide to write
if "notice" in defn:
notice_html = NOTICE_HTML_TEMPLATE.format(notice=defn["notice"])
else:
notice_html = ""
return GUIDE_HTML_TEMPLATE.format(
description_html=description_html,
website_href=website_href,
notice_html=notice_html)
def inject_tokens(config_filename, tokens):
with open(config_filename) as f:
config = f.readlines()
config_iterator = iter(config)
prefix = []
for line in config_iterator:
if line == ' "tokens": [\n':
prefix.append(line)
break
prefix.append(line)
suffix = []
suffix_started = False
for line in config_iterator:
if line == ' ],\n':
suffix_started = True
if suffix_started:
suffix.append(line)
json_tokens = [ # Keep the silly format, you filthy animals
json.dumps(token_entry).replace('{', '{ ').replace('}', ' }')
for token_entry in tokens
]
formatted_tokens = [
" {},\n".format(json_token) for json_token in json_tokens
]
formatted_tokens[-1] = formatted_tokens[-1].rstrip("\n,") + "\n"
return prefix + formatted_tokens + suffix
CONFIG_FILE = "config/main.json"
ETH_TOKEN = {
"addr": "0x0000000000000000000000000000000000000000",
"name": "ETH",
"decimals": 18
}
def main(tokenbase_path):
tokens_dir = path.join(tokenbase_path, "tokens")
token_file_filter = lambda fname: fname.startswith("0x") and fname.endswith(".yaml")
symbols = set("eth")
tokens = []
for defn_fname in sorted(
map(lambda s: s.lower(),
filter(token_file_filter, listdir(tokens_dir)))):
with open(path.join(tokens_dir, defn_fname), encoding="utf8") as f:
print(defn_fname)
defn = yaml.safe_load(f)
listing_entry = make_listing_entry(defn)
if listing_entry["name"] in symbols:
find_symbol = lambda t: t["name"] == listing_entry["name"].lower()
previous_assignment = next(filter(find_symbol, tokens), None)
print("ERROR: Duplicate token symbol", listing_entry["name"],
"({})".format(listing_entry["addr"]),
"previously assigned to", previous_assignment["addr"])
exit(2)
symbols.add(listing_entry["name"].lower())
tokens.append(listing_entry)
guide = make_description_html(defn)
if guide:
with open(
"tokenGuides/{}.ejs".format(listing_entry["name"]),
"w",
encoding="utf8") as f:
f.write(guide)
new_config = inject_tokens("config/main.json", tokens)
with open(CONFIG_FILE, "w", encoding="utf8") as f:
f.writelines(new_config)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: build_tokens.py <tokenbase working copy path>")
exit()
main(sys.argv[1])
| 29.110345 | 88 | 0.610045 |
4a1e09780033da5f54f90f19743b5c7096fb93a5 | 1,244 | py | Python | tools/ams_checker.py | charlieDurnsfordHollands/utilities | 5306d6f96502f7c4c808d2fd9fe10572b39dfb90 | [
"MIT"
] | null | null | null | tools/ams_checker.py | charlieDurnsfordHollands/utilities | 5306d6f96502f7c4c808d2fd9fe10572b39dfb90 | [
"MIT"
] | 1 | 2021-05-06T16:26:32.000Z | 2021-06-28T17:26:09.000Z | tools/ams_checker.py | charlieDurnsfordHollands/utilities | 5306d6f96502f7c4c808d2fd9fe10572b39dfb90 | [
"MIT"
] | 3 | 2021-06-16T13:55:09.000Z | 2021-09-10T13:11:58.000Z | """Script to query AMS subscriptions and show the backlog of messages."""
# References used in the creation of this script.
# https://realpython.com/python-string-formatting/
# https://stackoverflow.com/a/12965254/1442342
from __future__ import print_function
import json
import urllib
# An admin token for the AMS project needs to be provided.
TOKEN = ''
URL_TEMPLATE = ('https://msg.argo.grnet.gr/v1/projects/accounting/'
'subscriptions/{sub}:offsets?key={token}')
TYPES = ('grid', 'cloud', 'storage')
print("Subscription \tBacklog")
print("-"*31)
for type in TYPES:
for service in ('repository', 'portal'):
sub = service + '-' + type
url = URL_TEMPLATE.format(sub=sub, token=TOKEN)
# print(url)
response = urllib.urlopen(url)
data = json.loads(response.read())
# print(data)
# print(data['current'], data['max'], data['max'] - data['current'])
print(sub, data['max'] - data['current'], sep=' \t')
for type in TYPES[0:2]:
sub = 'IRIS-' + type + '-APEL'
url = URL_TEMPLATE.format(sub=sub, token=TOKEN)
response = urllib.urlopen(url)
data = json.loads(response.read())
print(sub, data['max'] - data['current'], sep=' \t')
| 33.621622 | 76 | 0.635048 |
4a1e09a95e8a4b7f7755fb1439f578706b88e68e | 3,919 | py | Python | cloudshell/snmp/snmp_configurator.py | QualiSystems/cloudshell-snmp | 9f8c4a927997d69cf85cac2a9bb1c36952d62d99 | [
"Apache-2.0"
] | null | null | null | cloudshell/snmp/snmp_configurator.py | QualiSystems/cloudshell-snmp | 9f8c4a927997d69cf85cac2a9bb1c36952d62d99 | [
"Apache-2.0"
] | 36 | 2016-05-13T08:42:13.000Z | 2021-07-07T13:53:23.000Z | cloudshell/snmp/snmp_configurator.py | QualiSystems/cloudshell-snmp | 9f8c4a927997d69cf85cac2a9bb1c36952d62d99 | [
"Apache-2.0"
] | 5 | 2016-08-05T17:49:21.000Z | 2019-05-28T03:27:22.000Z | from abc import ABCMeta, abstractmethod
from cloudshell.snmp.cloudshell_snmp import Snmp
from cloudshell.snmp.snmp_parameters import SnmpParametersHelper
ABC = ABCMeta("ABC", (object,), {"__slots__": ()})
class SnmpConfigurator(object):
"""Create snmp service, according to resource config values."""
def __init__(self, resource_config, logger, snmp=None):
"""Create snmp service, according to resource config values.
:param cloudshell.shell.standards.resource_config_generic_models.GenericSnmpConfig resource_config: # noqa: E501
:param logging.Logger logger:
:param snmp:
"""
self.resource_config = resource_config
self._logger = logger
# use like a container
self._snmp = snmp or Snmp()
self._snmp_parameters_helper = SnmpParametersHelper(resource_config)
@property
def _snmp_parameters(self):
return self._snmp_parameters_helper.get_snmp_parameters()
def get_service(self):
"""Enable/Disable snmp.
:rtype: SnmpContextManager
"""
return self._snmp.get_snmp_service(self._snmp_parameters, self._logger)
class EnableDisableSnmpFlowInterface(ABC):
@abstractmethod
def enable_snmp(self, snmp_parameters):
"""Enable SNMP.
:param cloudshell.snmp.snmp_parameters.SnmpParameters snmp_parameters:
"""
pass
@abstractmethod
def disable_snmp(self, snmp_parameters):
"""Disable SNMP.
:param cloudshell.snmp.snmp_parameters.SnmpParameters snmp_parameters:
"""
pass
class EnableDisableSnmpManager(object):
"""Context manager to enable/disable snmp."""
def __init__(
self,
enable_disable_flow,
snmp_parameters,
snmp,
logger,
enable=True,
disable=True,
):
"""Context manager to enable/disable snmp.
:param EnableDisableSnmpFlowInterface enable_disable_flow:
:param cloudshell.snmp.snmp_parameters.SnmpParameters snmp_parameters:
:param cloudshell.snmp.cloudshell_snmp.Snmp snmp:
:param logging.Logger logger:
:param bool enable:
:disable bool disable:
"""
self._enable_disable_flow = enable_disable_flow
self._snmp_parameters = snmp_parameters
self._logger = logger
self._snmp_manager = snmp.get_snmp_service(self._snmp_parameters, self._logger)
self._enable = enable
self._disable = disable
def __enter__(self):
if self._enable:
self._logger.debug("Calling enable snmp flow")
self._enable_disable_flow.enable_snmp(self._snmp_parameters)
return self._snmp_manager.__enter__()
def __exit__(self, exc_type, exc_val, exc_tb):
"""Disable snmp service."""
if self._disable:
self._logger.debug("Calling disable snmp flow")
self._enable_disable_flow.disable_snmp(self._snmp_parameters)
self._snmp_manager.__exit__(exc_type, exc_val, exc_tb)
class EnableDisableSnmpConfigurator(SnmpConfigurator, ABC):
def __init__(self, enable_disable_snmp_flow, resource_config, logger):
"""Enable Disable SNMP Configurator.
:param EnableDisableSnmpFlowInterface enable_disable_snmp_flow:
:param resource_config:
:param logger:
"""
super(EnableDisableSnmpConfigurator, self).__init__(resource_config, logger)
self._enable_disable_snmp_flow = enable_disable_snmp_flow
def get_service(self):
enable = self.resource_config.enable_snmp.lower() == str(True).lower()
disable = self.resource_config.disable_snmp.lower() == str(True).lower()
return EnableDisableSnmpManager(
self._enable_disable_snmp_flow,
self._snmp_parameters,
self._snmp,
self._logger,
enable,
disable,
)
| 32.932773 | 121 | 0.676193 |
4a1e0d4358ddc73e3e5279434c3b5eda9a7d48fe | 20,475 | py | Python | tf2onnxnightly/rewriter/rnn_utils.py | tensorleap/tensorflow-onnx | 56f6070828928bbb0f30890b2229eec8b663213d | [
"Apache-2.0"
] | null | null | null | tf2onnxnightly/rewriter/rnn_utils.py | tensorleap/tensorflow-onnx | 56f6070828928bbb0f30890b2229eec8b663213d | [
"Apache-2.0"
] | null | null | null | tf2onnxnightly/rewriter/rnn_utils.py | tensorleap/tensorflow-onnx | 56f6070828928bbb0f30890b2229eec8b663213d | [
"Apache-2.0"
] | null | null | null | # SPDX-License-Identifier: Apache-2.0
"""
tf2onnx.rewriter.rnn_utils - rnn support
"""
from __future__ import unicode_literals
from collections import defaultdict
from enum import Enum
import logging
import numpy as np
from tf2onnxnightly import utils
from tf2onnxnightly.graph_builder import GraphBuilder
from tf2onnxnightly.graph_matcher import OpTypePattern # pylint: disable=unused-import
# pylint: disable=invalid-name,unused-argument,missing-docstring
logger = logging.getLogger(__name__)
class REWRITER_RESULT(Enum):
SKIP = 1
OK = 2
FAIL = 3
# TensorFlow LSTMCell/BasicLSTMCell computation graph matching
xc_pattern = \
OpTypePattern('Split', inputs=[
OpTypePattern("Const"), # axis for split
OpTypePattern("BiasAdd", name="bias_add", inputs=[
OpTypePattern("MatMul", inputs=[
OpTypePattern("ConcatV2|Concat", name="xh"),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="cell_kernel"),
]),
]),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="cell_bias"),
]),
]),
])
lstmcell_pattern = \
OpTypePattern('Mul', name='ht', inputs=[
OpTypePattern("Sigmoid", name="ot", inputs=[xc_pattern]),
OpTypePattern('Tanh', inputs=[
OpTypePattern("Add|AddV2", name="ct", inputs=[
OpTypePattern("Mul", name="ct_identity_consumer", inputs=[
OpTypePattern("Sigmoid", name="ft", inputs=[
OpTypePattern("Add|AddV2", inputs=[
xc_pattern,
OpTypePattern("*", name="ft_bias"),
]),
]),
OpTypePattern("*"),
]),
OpTypePattern("Mul", inputs=[
OpTypePattern("Sigmoid", name="it", inputs=[xc_pattern]),
OpTypePattern("Tanh", name="gt", inputs=[xc_pattern]),
]),
]),
]),
])
xc_pattern_optimized = \
OpTypePattern('Split', inputs=[
OpTypePattern("Const"),
OpTypePattern("Identity", inputs=[
OpTypePattern("MatMul", inputs=[
OpTypePattern("ConcatV2|Concat", name="xh"),
OpTypePattern("Const", name="cell_kernel"),
]),
]),
])
lstmcell_pattern_optimized = \
OpTypePattern('Mul', name='ht', inputs=[
OpTypePattern("Sigmoid", name="ot", inputs=[xc_pattern_optimized]),
OpTypePattern('Tanh', inputs=[
OpTypePattern("Add|AddV2", name="ct", inputs=[
OpTypePattern("Mul", name="ct_identity_consumer", inputs=[
OpTypePattern("Sigmoid", name="ft", inputs=[
OpTypePattern("Add|AddV2", inputs=[
xc_pattern_optimized,
OpTypePattern("*", name="ft_bias"),
]),
]),
OpTypePattern("*"),
]),
OpTypePattern("Mul", inputs=[
OpTypePattern("Sigmoid", name="it", inputs=[xc_pattern_optimized]),
OpTypePattern("Tanh", name="gt", inputs=[xc_pattern_optimized]),
]),
]),
]),
])
# input sequence: top to down, left to right
# split into update gate and reset gate
gru_split_pattern = \
OpTypePattern("Split", inputs=[
OpTypePattern("Const"), # split dim, a constant
OpTypePattern("Sigmoid", inputs=[
OpTypePattern("BiasAdd", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="gate_bias")
]),
OpTypePattern("MatMul", name="update_reset_gate", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="gate_kernel")
]),
OpTypePattern("ConcatV2|Concat", name="cell_inputs")
])
])
])
])
grucell_pattern = \
OpTypePattern("Add", name="cell_output", inputs=[
OpTypePattern("Mul", inputs=[
gru_split_pattern,
OpTypePattern("Identity")
]),
OpTypePattern("Mul", inputs=[
OpTypePattern("Sub", inputs=[
OpTypePattern("Const"), # 1-u
gru_split_pattern
]),
OpTypePattern("*", name="optional_activation", inputs=[
OpTypePattern("BiasAdd", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_bias")
]),
OpTypePattern("MatMul", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_kernel")
]),
OpTypePattern("ConcatV2|Concat")
])
])
])
])
])
cudnn_compatible_grucell_pattern = \
OpTypePattern("Add", name="cell_output", inputs=[
OpTypePattern("Mul", inputs=[
OpTypePattern("Sub", inputs=[
OpTypePattern("Const"), # 1-u
gru_split_pattern
]),
OpTypePattern("*", name="optional_activation", inputs=[
OpTypePattern("Add", inputs=[
OpTypePattern("Mul", inputs=[
gru_split_pattern,
OpTypePattern("BiasAdd", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_state_bias")
]),
OpTypePattern("MatMul", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_state_kernel"),
]),
OpTypePattern("Identity")
])
])
]),
OpTypePattern("BiasAdd", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_input_bias")
]),
OpTypePattern("MatMul", inputs=[
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_input_kernel"),
]),
OpTypePattern("*")
])
])
])
])
]),
OpTypePattern("Mul", inputs=[
gru_split_pattern,
OpTypePattern("Identity")
])
])
grublockcell_pattern0 = OpTypePattern("GRUBlockCell", name="gru_block_cell", inputs=[
OpTypePattern("*"),
OpTypePattern("*"),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="gate_kernel")
]),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_kernel")
]),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="gate_bias")
]),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="hidden_bias")
])
])
grublockcell_pattern1 = OpTypePattern("GRUBlockCell", name="gru_block_cell", inputs=[
OpTypePattern("*"),
OpTypePattern("*"),
OpTypePattern("Const", name="gate_kernel"),
OpTypePattern("Const", name="hidden_kernel"),
OpTypePattern("Const", name="gate_bias"),
OpTypePattern("Const", name="hidden_bias")
])
lstmblockcell_pattern = \
OpTypePattern("LSTMBlockCell", name="lstm_block_cell", inputs=[
OpTypePattern("*"),
OpTypePattern("*"),
OpTypePattern("*"),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="cell_kernel")
]),
OpTypePattern("*", name="Pi"),
OpTypePattern("*", name="Pf"),
OpTypePattern("*", name="Po"),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="cell_bias")
])
])
seq_len_pattern0 = OpTypePattern("Select|SelectV2", inputs=[
OpTypePattern("GreaterEqual", inputs=[
OpTypePattern("*"),
OpTypePattern("Enter", inputs=[
OpTypePattern("*", name="seq_len_node")
])
]),
OpTypePattern("*"),
OpTypePattern("*")
])
seq_len_pattern1 = OpTypePattern("Select|SelectV2", inputs=[
OpTypePattern("GreaterEqual", inputs=[
OpTypePattern("*"),
OpTypePattern("Const", name="seq_len_node")
]),
OpTypePattern("*"),
OpTypePattern("*")
])
class RNNUnitType(Enum):
LSTMCell = 0 # TF LSTMCell and BasicLSTMCell share the same pattern
LSTMBlockCell = 1
GRUCell = 2
GRUBlockCell = 3
CudnnCompatibleGRUCell = 4
rnn_cell_patterns = {
RNNUnitType.LSTMCell: [lstmcell_pattern, lstmcell_pattern_optimized],
RNNUnitType.LSTMBlockCell: [lstmblockcell_pattern],
RNNUnitType.GRUCell: [grucell_pattern],
RNNUnitType.GRUBlockCell: [grublockcell_pattern0, grublockcell_pattern1],
RNNUnitType.CudnnCompatibleGRUCell: [cudnn_compatible_grucell_pattern]
}
def get_pattern(cell_type_name):
return rnn_cell_patterns[cell_type_name]
def get_rnn_scope_name(while_scope_name):
parts = while_scope_name.split('/')
rnn_scope = '/'.join(parts[0:-2]) + "/"
return rnn_scope
def parse_rnn_loop(graph, loop_properties, rnn_scope, while_context_scope):
"""check if the while loop is generated by dynamic_rnn or bidirectional_rnn
Args:
loop_properties: LoopProperties
rnn_scope: rnn scope name
while_context_scope: while loop scope name
check a while loop is generated by dynamic_rnn or bidirectional_rnn by
1. some patterns in _time_step in dynamic_rnn: tensor array read, tensor array write
2. some patterns in control_flow_ops.while_loop in dynamic_rnn:
cond: time < loop_bound
loop_vars: (time, output_ta, state)
time has name called "time"
iteration_cnt is added by control flow.
be noted:
1. iteration counter does not exist in tf1.4 or earlier versions
2. if dynamic_rnn's first input is not consumed, output ta does not exist.
"""
time_name = rnn_scope + "time"
ta_array_name_prefix = rnn_scope + "dynamic_rnn/output_"
iteration_counter_name = while_context_scope + "iteration_counter"
found_time = False
is_rnn_out_ta = None
time_var = None
iteration_var = None
for val in loop_properties.all_variables.values():
enter_input_node = graph.get_node_by_output(val.enter_input_id)
if val.is_tensor_array:
ta_name = enter_input_node.get_attr("tensor_array_name").s.decode("utf-8")
if not ta_name.startswith(ta_array_name_prefix):
is_rnn_out_ta = False
elif enter_input_node.name == time_name:
found_time = True
time_var = val
elif enter_input_node.name == iteration_counter_name:
iteration_var = val
if not found_time or is_rnn_out_ta is False:
logger.debug("this should not be a dynamic_rnn loop, found_time: %s, is_rnn_out_ta: %s",
found_time, is_rnn_out_ta)
return None
if not loop_properties.tensor_array_inputs:
logger.debug("this should not be a dynamic_rnn loop, no ta input is found")
return None
return time_var, iteration_var
def get_weights_from_const_node(g, node):
temp = node
val = None
# this would help ignore Identity in non-const_folded graph.
while temp.type == 'Identity':
temp = temp.inputs[0]
if temp and temp.type == 'Const':
val = temp.get_tensor_value(as_list=False)
dtype = utils.map_onnx_to_numpy_type(g.get_dtype(temp.output[0]))
val = val.astype(dtype)
logger.debug("found weights %s", temp.name)
else:
logger.debug("weight node seems not to be Const, skip, node name is %s", temp.name)
return None
return val
######################################################
#### Utilities for bidirectional rnn #######
######################################################
class ONNX_RNN_TYPE(Enum):
GRU = 0
LSTM = 1
onnx_rnn_type_mapping = {
ONNX_RNN_TYPE.GRU: "GRU",
ONNX_RNN_TYPE.LSTM: "LSTM"
}
onnx_rnn_attr_mapping = {
ONNX_RNN_TYPE.LSTM: [
"clip",
"hidden_size",
"input_forget"
],
ONNX_RNN_TYPE.GRU: {
"clip",
"hidden_size",
"linear_before_reset"
}
}
onnx_rnn_seq_len_index_mapping = {
ONNX_RNN_TYPE.LSTM: 4,
ONNX_RNN_TYPE.GRU: 4
}
def find_bidirectional_rnns(g, ops, rnn_type):
"""
Find possible bidirectional rnns, return: list of tuple,
Format of tuple is (fw onnx rnn node, bw onnx rnn node).
"""
fw_rnns = defaultdict(list)
bw_rnns = defaultdict(list)
for n in g.get_nodes():
if n.type != onnx_rnn_type_mapping[rnn_type]:
continue
input_id = n.input[0]
temp = n.inputs[0]
is_bw = False
if temp.type == "Transpose":
input_id = temp.input[0]
temp = temp.inputs[0]
if utils.is_tf_reverse_op(temp):
input_id = temp.input[0]
is_bw = True
if is_bw:
# if output 0 is consumed and there is no reverse after the 1st output.
# it's not backward rnn.
if g.find_output_consumers(n.output[0]) and not get_reverse_nodes_after_y_output(g, n):
logger.warning("rnn %s following Reverse op isn't the part of bi-rnn.", n.name)
continue
logger.debug("find bw rnn %s", input_id)
bw_rnns[input_id].append(n)
else:
logger.debug("find fw rnn %s", input_id)
fw_rnns[input_id].append(n)
# fw_rnn and bw_rnn must share the same input
birnn_input = list(set(fw_rnns.keys()).intersection(bw_rnns.keys()))
bi_rnns = []
matched_rnn = []
for inp in birnn_input:
fw_rnn = fw_rnns[inp]
bw_rnn = bw_rnns[inp]
# it's possible several bi-rnns share the same input
for fw_n in fw_rnn:
for bw_n in bw_rnn:
if belong_to_birnn(g, fw_n, bw_n, rnn_type) and \
fw_n not in matched_rnn and bw_n not in matched_rnn:
logger.debug("found birnn comprising %s and %s", fw_n.name, bw_n.name)
bi_rnns.append((fw_n, bw_n))
matched_rnn.extend([fw_n, bw_n])
return bi_rnns
def belong_to_birnn(g, fw_rnn, bw_rnn, rnn_type):
"""
Check whether fw_rnn and bw_rnn are part of the same birnn.
If fw_rnn and bw_rnn have the same attributes except those related to activation
and share the same seq_len, they are able to be merged into a bi-rnn.
"""
logger.debug("check whether %s and %s are part of birnn", fw_rnn.name, bw_rnn.name)
for name in onnx_rnn_attr_mapping[rnn_type]:
fw_attr_value = fw_rnn.get_attr_value(name)
bw_attr_value = bw_rnn.get_attr_value(name)
if fw_attr_value != bw_attr_value:
logger.debug(
"fw_rnn and bw_rnn mismatch at attr %s: %s, %s",
name, fw_attr_value, bw_attr_value
)
return False
seq_len_index = onnx_rnn_seq_len_index_mapping[rnn_type]
fw_seq_len = fw_rnn.input[seq_len_index]
bw_seq_len = bw_rnn.input[seq_len_index]
if not utils.have_same_inference_value(g, fw_seq_len, bw_seq_len):
logger.debug(
"fw_rnn and bw_rnn have different seq_len input: %s, %s",
fw_seq_len, bw_seq_len
)
return False
return True
def get_reverse_nodes_after_y_output(g, rnn_bw):
bw_consumers = g.find_output_consumers(rnn_bw.output[0])
# todo: figure out a better way to remove reverse op
squeeze_nodes = [c for c in bw_consumers if c.type == "Squeeze"]
s_cnt = len(squeeze_nodes)
if s_cnt == 1:
s = squeeze_nodes[0]
trans_nodes = g.find_output_consumers(s.output[0])
if len(trans_nodes) == 1:
if trans_nodes[0].type == "Transpose":
reverse_nodes = g.find_output_consumers(trans_nodes[0].output[0])
elif utils.is_tf_reverse_op(trans_nodes[0]):
reverse_nodes = trans_nodes
else:
logger.debug("not found reverse op, unexpected")
return []
are_all_reverse = all([utils.is_tf_reverse_op(r_op) for r_op in reverse_nodes])
if are_all_reverse:
return reverse_nodes
logger.debug("bw y output is used followed by reverse node")
return []
logger.debug("unexpected number of transpose after RNN 1st output:%s", s_cnt)
return []
logger.debug("unexpected number of squeeze following RNN 1st output:%s", s_cnt)
return []
def get_np_val_for_const(g, node, input_index):
return node.inputs[input_index].get_tensor_value(as_list=False)
def check_const(g, input_id):
node = g.get_node_by_output(input_id)
if node and node.is_const():
return (True, node.get_tensor_value(as_list=False))
return (None, None)
def process_single_init_node(g, fw_init_input_id, bw_init_input_id, to_append):
fw_init_is_const, init_fw_val = check_const(g, fw_init_input_id)
bw_init_is_const, init_bw_val = check_const(g, bw_init_input_id)
if fw_init_is_const and bw_init_is_const:
initial_val = np.concatenate((init_fw_val, init_bw_val), axis=0)
init_name = utils.make_name("initial")
init_node = g.make_const(init_name, initial_val, skip_conversion=True)
else:
init_node = g.make_node("Concat", [fw_init_input_id, bw_init_input_id], attr={"axis": 0})
to_append.append(init_node)
return init_node
def slice_birnn_for_original_rnn_consumers(g, rnn_fw, rnn_bw, bi_rnn, rnn_output_index, all_nodes, to_remove):
fw_consumers = g.find_output_consumers(rnn_fw.output[rnn_output_index])
bw_consumers = g.find_output_consumers(rnn_bw.output[rnn_output_index])
if not fw_consumers and not bw_consumers:
return
if rnn_output_index == 0:
axis = 1
# remove reverse op for rnn_bw
reverse_nodes = get_reverse_nodes_after_y_output(g, rnn_bw)
for r_op in reverse_nodes:
logger.debug("remove reverse op %s", r_op.name)
g.replace_all_inputs(r_op.output[0], r_op.input[0], ops=all_nodes)
to_remove.append(r_op.name)
elif rnn_output_index in [1, 2]:
axis = 0
else:
raise ValueError("rnn only should has 3 outputs.")
if fw_consumers:
attr = {"axes": [axis], "starts": [0], "ends": [1]}
inputs_map = {"data": bi_rnn.output[rnn_output_index], **attr}
slice_node_fw = GraphBuilder(g).make_slice(inputs_map)
all_nodes.append(g.get_node_by_output(slice_node_fw))
g.replace_all_inputs(rnn_fw.output[rnn_output_index], slice_node_fw, ops=fw_consumers)
if bw_consumers:
attr = {"axes": [axis], "starts": [1], "ends": [2]}
inputs_map = {"data": bi_rnn.output[rnn_output_index], **attr}
slice_node_bw = GraphBuilder(g).make_slice(inputs_map)
all_nodes.append(g.get_node_by_output(slice_node_bw))
g.replace_all_inputs(rnn_bw.output[rnn_output_index], slice_node_bw, ops=bw_consumers)
def remove_reverse_in_bw_input(g, bw_rnn_input_x, rnn_type):
old_x_consumers = g.find_output_consumers(bw_rnn_input_x)
# the transpose/reverse here must be followed by RNN if it is still useful.
# this is guaranteed by dynamic_rnn logic.
old_x_has_rnn_as_consumer = [n for n in old_x_consumers if n.type == onnx_rnn_type_mapping[rnn_type]]
if not old_x_has_rnn_as_consumer:
logger.debug("plan to remove useless reverse op in bw")
reverse_node = g.get_node_by_output(bw_rnn_input_x)
if reverse_node.type == "Transpose":
reverse_node = reverse_node.inputs[0]
g.replace_all_inputs(reverse_node.output[0], reverse_node.input[0]) # ops=g.get_nodes()
g.remove_node(reverse_node.name)
else:
raise ValueError("Reverse is still used by RNN as input, cannot remove")
| 34.940273 | 110 | 0.591013 |
4a1e0e7f8e629d87f457af6fe5d0225893b6c81b | 6,367 | py | Python | fhir/resources/DSTU2/referralrequest.py | cstoltze/fhir.resources | 52f99738935b7313089d89daf94d73ce7d167c9d | [
"BSD-3-Clause"
] | 144 | 2019-05-08T14:24:43.000Z | 2022-03-30T02:37:11.000Z | fhir/resources/DSTU2/referralrequest.py | cstoltze/fhir.resources | 52f99738935b7313089d89daf94d73ce7d167c9d | [
"BSD-3-Clause"
] | 82 | 2019-05-13T17:43:13.000Z | 2022-03-30T16:45:17.000Z | fhir/resources/DSTU2/referralrequest.py | cstoltze/fhir.resources | 52f99738935b7313089d89daf94d73ce7d167c9d | [
"BSD-3-Clause"
] | 48 | 2019-04-04T14:14:53.000Z | 2022-03-30T06:07:31.000Z | # -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/DSTU2/referralrequest.html
Release: DSTU2
Version: 1.0.2
Revision: 7202
"""
from typing import List as ListType
from pydantic import Field
from . import domainresource, fhirtypes
class ReferralRequest(domainresource.DomainResource):
"""A request for referral or transfer of care
Used to record and send details about a request for referral service or
transfer of a patient to the care of another provider or provider organization.
"""
resource_type = Field("ReferralRequest", const=True)
status: fhirtypes.Code = Field(
None,
alias="status",
title="Type `Code` (represented as `dict` in JSON).",
description="draft | requested | active | cancelled | accepted | rejected | completed",
# note: Enum values can be used in validation,
# but use in your own responsibilities, read official FHIR documentation.
enum_values=[
"draft",
"requested",
"active",
"cancelled",
"accepted",
"rejected",
"completed",
],
element_property=True,
)
identifier: ListType[fhirtypes.IdentifierType] = Field(
None,
alias="identifier",
title="Business identifier",
description="The workflow status of the referral or transfer of care request.",
element_property=True,
)
date: fhirtypes.DateTime = Field(
None,
alias="date",
title="Date of creation/activation",
description=(
"Date/DateTime of creation for draft requests "
"and date of activation for active requests."
),
element_property=True,
)
type: fhirtypes.CodeableConceptType = Field(
None,
alias="type",
title="Type `CodeableConcept` (represented as `dict` in JSON).",
description="Referral/Transition of care request type",
element_property=True,
)
specialty: fhirtypes.CodeableConceptType = Field(
None,
alias="specialty",
title="Type `CodeableConcept` (represented as `dict` in JSON).",
description="The clinical specialty (discipline) that the referral is requested for",
element_property=True,
)
priority: fhirtypes.CodeableConceptType = Field(
None,
alias="priority",
title="Type `CodeableConcept` (represented as `dict` in JSON).",
description="Urgency of referral / transfer of care request",
element_property=True,
)
patient: fhirtypes.ReferenceType = Field(
None,
alias="patient",
title="Type 'Reference' referencing 'Patient' (represented as 'dict' in JSON).",
description="Patient referred to care or transfer",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Patient"],
element_property=True,
)
requester: fhirtypes.ReferenceType = Field(
None,
alias="requester",
title=(
"Type 'Reference' referencing 'Practitioner', 'Organization' and "
"'Patient' (represented as 'dict' in JSON)."
),
description="Requester of referral / transfer of care",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Practitioner", "Organization", "Patient"],
element_property=True,
)
recipient: ListType[fhirtypes.ReferenceType] = Field(
None,
alias="recipient",
title=(
"Type 'Reference' referencing 'Practitioner' and 'Organization'"
" (represented as 'dict' in JSON)."
),
description="Receiver of referral / transfer of care request",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Practitioner", "Organization"],
element_property=True,
)
encounter: fhirtypes.ReferenceType = Field(
None,
alias="encounter",
title="Type 'Reference' referencing 'Encounter' (represented as 'dict' in JSON).",
description="Originating encounter",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Encounter"],
element_property=True,
)
dateSent: fhirtypes.DateTime = Field(
None,
alias="dateSent",
title="Date referral/transfer of care request is sent",
description=(
"Date/DateTime the request for referral or transfer "
"of care is sent by the author."
),
element_property=True,
)
reason: fhirtypes.CodeableConceptType = Field(
None,
alias="reason",
title="Type `CodeableConcept` (represented as `dict` in JSON).",
description="Reason for referral / transfer of care request",
element_property=True,
)
description: fhirtypes.String = Field(
None,
alias="description",
title="Type `String` (represented as `dict` in JSON)",
description="A textual description of the referral",
element_property=True,
)
serviceRequested: ListType[fhirtypes.CodeableConceptType] = Field(
None,
alias="serviceRequested",
title="Type `CodeableConcept` (represented as `dict` in JSON).",
description="Actions requested as part of the referral",
element_property=True,
)
supportingInformation: ListType[fhirtypes.ReferenceType] = Field(
None,
alias="supportingInformation",
title="Type 'Reference' referencing 'Any' (represented as 'dict' in JSON).",
description="Additonal information to support referral or transfer of care request",
# note: Listed Resource Type(s) should be allowed as Reference.
enum_reference_types=["Any"],
element_property=True,
)
fulfillmentTime: fhirtypes.PeriodType = Field(
None,
alias="fulfillmentTime",
title="Requested service(s) fulfillment time",
description=(
"The period of time within which the services identified in the "
"referral/transfer of care is specified or required to occur."
),
# if property is element of this resource.
element_property=True,
)
| 33.867021 | 95 | 0.627925 |
4a1e0ea2f066d60846c01eaf3d5c581da08c2977 | 473 | py | Python | HackerRank/Python/Easy/E0087.py | Mohammed-Shoaib/HackerRank-Problems | ccfb9fc2f0d8dff454439d75ce519cf83bad7c3b | [
"MIT"
] | 54 | 2019-05-13T12:13:09.000Z | 2022-02-27T02:59:00.000Z | HackerRank/Python/Easy/E0087.py | Mohammed-Shoaib/HackerRank-Problems | ccfb9fc2f0d8dff454439d75ce519cf83bad7c3b | [
"MIT"
] | 2 | 2020-10-02T07:16:43.000Z | 2020-10-19T04:36:19.000Z | HackerRank/Python/Easy/E0087.py | Mohammed-Shoaib/HackerRank-Problems | ccfb9fc2f0d8dff454439d75ce519cf83bad7c3b | [
"MIT"
] | 20 | 2020-05-26T09:48:13.000Z | 2022-03-18T15:18:27.000Z | # Problem Statement: https://www.hackerrank.com/challenges/html-parser-part-2/problem
import sys
from html.parser import HTMLParser
class MyHTMLParser(HTMLParser):
def handle_data(self, data):
if data != '\n':
print(f'>>> Data\n{data}')
def handle_comment(self, data):
if '\n' in data:
print('>>> Multi-line Comment')
else:
print('>>> Single-line Comment')
print(data)
N = input()
parser = MyHTMLParser()
parser.feed(sys.stdin.read())
parser.close() | 21.5 | 85 | 0.689218 |
4a1e0ead252d7d88fe5a51276d58cb99807cfcb8 | 13,958 | py | Python | Addons/ImportFbxWithVertexColor.py | Stmdotcom/Spiraloid-Toolkit-for-Blender | 9a09d3ece77338a18776ff8719aff9bf15fa99ed | [
"MIT"
] | 65 | 2018-08-21T08:59:41.000Z | 2022-02-18T08:50:02.000Z | Addons/ImportFbxWithVertexColor.py | Stmdotcom/Spiraloid-Toolkit-for-Blender | 9a09d3ece77338a18776ff8719aff9bf15fa99ed | [
"MIT"
] | 4 | 2018-09-16T16:43:38.000Z | 2022-02-21T00:30:19.000Z | Addons/ImportFbxWithVertexColor.py | Stmdotcom/Spiraloid-Toolkit-for-Blender | 9a09d3ece77338a18776ff8719aff9bf15fa99ed | [
"MIT"
] | 14 | 2019-03-30T16:07:20.000Z | 2021-07-03T04:32:01.000Z | bl_info = {
'name': 'ImportFbxWithVertexColor',
'author': 'Bay Raitt',
'version': (0, 4),
'blender': (2, 80, 0),
"description": "Imports an .fbx file with vertex color and sets up the material and creates an optional decimation",
'category': 'Import-Export',
'location': 'File > Import/Export',
'wiki_url': ''}
import bpy
from bpy_extras.io_utils import ImportHelper
import os.path
import bpy, os
from bpy.props import *
import os
import warnings
import re
from itertools import count, repeat
from collections import namedtuple
from math import pi
import bpy
from bpy.types import Operator
from mathutils import Vector
from bpy.props import (
StringProperty,
BoolProperty,
EnumProperty,
FloatProperty,
CollectionProperty,
)
def highlightObjects(selection_list):
for i in selection_list:
bpy.data.objects[i.name].select_set(state=True)
class BR_OT_import_fbx_with_vertex_color(bpy.types.Operator, ImportHelper):
"""Import Multiple .FBX Files at Once"""
bl_idname = "import_scene.fbx_w_color"
bl_label = 'FBX (.fbx) w vertex color'
bl_options = {'REGISTER', 'UNDO'}
bl_description = "FBX Files w Vertex Color"
# ImportHelper mixin class uses this
filename_ext = ".obj"
filter_glob : StringProperty(
default="*.obj",
options={'HIDDEN'},
)
# Selected files
files : CollectionProperty(type=bpy.types.PropertyGroup)
# List of operator properties, the attributes will be assigned
image_search_setting : BoolProperty(
name="Image Search",
description="Search subdirs for any associated images "
"(Warning, may be slow)",
default=True,
)
decimate_setting : BoolProperty(
name="Decimate Mesh",
description="Decimate each layer"
"(Warning, may be slow)",
default=True,
)
apply_setting : BoolProperty(
name="Apply Decimation",
description="Apply all decimation modifiers"
"(Warning, may be slow)",
default=True,
)
material_setting : BoolProperty(
name="Single Material",
description="Use single material for all objects",
default=True,
)
parent_setting : BoolProperty(
name="Group",
description="Parent all subobjects underneath an empty",
default=True,
)
decimate_ratio : FloatProperty(
name="Ratio",
description="Choose percentage to reduce mesh by",
min=0.0 , max=1.0,
default=0.25,
)
axis_forward_setting : EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default='-Y',
)
axis_up_setting : EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default='Z',
)
def draw(self, context):
layout = self.layout
row = layout.row(align=True)
box : layout.box()
row : box.row()
row.prop(self, "decimate_setting")
row.prop(self, "decimate_ratio")
layout.prop(self, "apply_setting")
layout.prop(self, "material_setting")
layout.prop(self, "parent_setting")
layout.prop(self, "axis_forward_setting")
layout.prop(self, "axis_up_setting")
# layout.prop(self, "image_search_setting")
def execute(self, context):
# get the folder
folder = (os.path.dirname(self.filepath))
# iterate through the selected files
for i in self.files:
newObjects = ""
# generate full path to file
path_to_file = (os.path.join(folder, i.name))
# call obj operator and assign ui values
bpy.ops.import_scene.fbx(filepath = path_to_file,
use_manual_orientation=True,
axis_forward = self.axis_forward_setting,
axis_up = self.axis_up_setting
)
# use_image_search = self.image_search_setting
# use_manual_orientation=False,
# global_scale=1,
# bake_space_transform=False,
# use_custom_normals=True,
# use_image_search=True,
# use_alpha_decals=False,
# decal_offset=0,
# use_anim=True,
# anim_offset=1,
# use_custom_props=True,
# use_custom_props_enum_as_string=True,
# ignore_leaf_bones=False,
# force_connect_children=False,
# automatic_bone_orientation=False,
# primary_bone_axis='Y', secondary_bone_axis='X',
# use_prepost_rot=True
#
imported_objects = bpy.context.selected_objects
print('Imported name: ', imported_objects)
if self.material_setting:
assetName = i.name.replace(".fbx", '')
matName = (assetName + "Mat")
mat = bpy.data.materials.new(name=matName)
mat.use_nodes = True
bsdf = mat.node_tree.nodes["Principled BSDF"]
colAttr = mat.node_tree.nodes.new('ShaderNodeAttribute')
mat.node_tree.links.new(bsdf.inputs['Base Color'], colAttr.outputs['Color'])
for ob in imported_objects:
#Check if object is a Mesh
if ob.type == 'MESH':
if ob.data.vertex_colors:
if not self.material_setting:
matName = (ob.name + "Mat")
mat = bpy.data.materials.new(name=matName)
mat.use_nodes = True
bsdf = mat.node_tree.nodes["Principled BSDF"]
colAttr = mat.node_tree.nodes.new('ShaderNodeAttribute')
colAttr.attribute_name = ob.data.vertex_colors[0].name
mat.node_tree.links.new(bsdf.inputs['Base Color'], colAttr.outputs['Color'])
# Assign it to object
if ob.data.materials:
ob.data.materials[0] = mat
else:
ob.data.materials.append(mat)
else:
mat.use_nodes = True
colAttr.attribute_name = ob.data.vertex_colors[0].name
# Assign it to object
if ob.data.materials:
ob.data.materials[0] = mat
else:
ob.data.materials.append(mat)
if self.decimate_setting:
mod = bpy.data.objects[ob.name].modifiers.new(name='Decimate',type='DECIMATE')
mod.decimate_type = 'DISSOLVE'
mod.angle_limit = 0.0872665
mod2 = bpy.data.objects[ob.name].modifiers.new(name='Decimate',type='DECIMATE')
mod2.use_collapse_triangulate = True
mod2.ratio = self.decimate_ratio
if self.apply_setting:
bpy.ops.object.select_all(action='DESELECT')
ob.select_set(state=True)
bpy.context.view_layer.objects.active = ob
for mod in [m for m in ob.modifiers if m.type == 'DECIMATE']:
bpy.ops.object.modifier_apply(modifier=mod.name)
if self.parent_setting:
assetName = i.name.replace(".fbx", '')
bpy.ops.object.select_all(action='DESELECT')
bpy.ops.object.empty_add(type='CIRCLE', align='WORLD', radius=(1), location=(0, 0, 0), rotation=(1.5708, 0, 0))
obj = bpy.context.view_layer.objects.active
obj.name = assetName
layer = bpy.context.view_layer
layer.update()
for ob in imported_objects:
bpy.ops.object.select_all(action='DESELECT')
obj.select_set(state=True)
bpy.context.view_layer.objects.active = obj
ob.select_set(state=True)
bpy.ops.object.parent_set(type='OBJECT', keep_transform=True)
layer = bpy.context.view_layer
layer.update()
# utils for batch processing kitbash stamps.
# bpy.context.scene.objects.active = bpy.context.selected_objects[0]
# bpy.ops.object.modifier_add(type='SUBSURF')
# bpy.context.object.modifiers["Subsurf"].levels = 2
# bpy.ops.object.modifier_add(type='DECIMATE')
# bpy.context.object.modifiers["Decimate.001"].ratio = 0.5
# bpy.ops.object.modifier_add(type='DECIMATE')
# bpy.context.object.modifiers["Decimate.001"].decimate_type = 'DISSOLVE'
# bpy.context.object.modifiers["Decimate.001"].delimit = {'SEAM', 'SHARP'}
# bpy.context.object.modifiers["Decimate.001"].angle_limit = 0.0523599
# bpy.ops.object.modifier_add(type='TRIANGULATE')
# bpy.context.object.modifiers["Triangulate"].quad_method = 'BEAUTY'
# # bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Decimate")
# # bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Decimate.001")
# # bpy.ops.object.modifier_apply(apply_as='DATA', modifier="Triangulate")
# bpy.ops.object.mode_set(mode='EDIT')
# bpy.ops.mesh.select_all(action = 'DESELECT')
# bpy.ops.mesh.select_all(action='TOGGLE')
# bpy.ops.mesh.tris_convert_to_quads()
# bpy.ops.mesh.faces_shade_smooth()
# bpy.ops.mesh.mark_sharp(clear=True)
# bpy.ops.mesh.mark_sharp(clear=True, use_verts=True)
# #if not bpy.context.object.data.uv_layers:
# bpy.ops.uv.smart_project(island_margin=0.01 , user_area_weight=0.75)
# bpy.ops.object.mode_set(mode='OBJECT')
# bpy.context.object.data.use_auto_smooth = True
# bpy.context.object.data.auto_smooth_angle = 0.575959
bpy.ops.object.select_all(action='DESELECT')
return {'FINISHED'}
filepath : StringProperty(name="File path", description="File filepath of Fbx", maxlen=4096, default="")
filter_folder : BoolProperty(name="Filter folders", description="", default=True, options={'HIDDEN'})
filter_glob : StringProperty(default="*.fbx", options={'HIDDEN'})
files : CollectionProperty(name='File path', type=bpy.types.OperatorFileListElement)
filename_ext = '.fbx'
#@classmethod
#def poll(cls, context):
# return context.active_object is not None and context.active_object.type == 'MESH'
# ImportHelper mixin class uses this
filename_ext = ".fbx"
filter_glob : StringProperty(
default="*.fbx",
options={'HIDDEN'},
)
# Selected files
files : CollectionProperty(type=bpy.types.PropertyGroup)
def menu_import_draw(self, context):
self.layout.operator(BR_OT_import_fbx_with_vertex_color.bl_idname)
def menu_export_draw(self, context):
self.layout.operator(BR_OT_export_shapekey_as_obj.bl_idname)
classes = (
BR_OT_import_fbx_with_vertex_color,
)
def register():
from bpy.utils import register_class
for cls in classes:
register_class(cls)
bpy.types.TOPBAR_MT_file_import.append(menu_import_draw)
bpy.types.TOPBAR_MT_file_export.append(menu_export_draw)
def unregister():
from bpy.utils import unregister_class
for cls in reversed(classes):
unregister_class(cls)
bpy.types.TOPBAR_MT_file_import.remove(menu_import_draw)
bpy.types.TOPBAR_MT_file_export.remove(menu_export_draw)
if __name__ != "__main__":
bpy.types.VIEW3D_MT_view.remove(menu_import_draw)
bpy.types.VIEW3D_MT_view.remove(menu_export_draw)
if __name__ == "__main__":
try:
# by running unregister here we can run this script
# in blenders text editor
# the first time we run this script inside blender
# we get an error that removing the changes fails
unregister()
except:
pass
register()
| 37.023873 | 127 | 0.523427 |
4a1e0f0ee0cc458aa82e8b05c426067fb736b5fe | 5,187 | py | Python | pruebas.py | JCGALVIS/Proyecto2Programacion20191 | 3054e61ddb9f89427dad8b96dcdd0f0b4d92eda2 | [
"MIT"
] | null | null | null | pruebas.py | JCGALVIS/Proyecto2Programacion20191 | 3054e61ddb9f89427dad8b96dcdd0f0b4d92eda2 | [
"MIT"
] | 1 | 2019-03-27T16:01:26.000Z | 2019-03-27T16:01:26.000Z | pruebas.py | JCGALVIS/Proyecto2Programacion20191 | 3054e61ddb9f89427dad8b96dcdd0f0b4d92eda2 | [
"MIT"
] | null | null | null | import sys
import unittest
import adn as adn
class prueba(unittest.TestCase):
def test_obtener_complemento(self):
self.assertEqual(adn.obtener_complemento('A'), 'T')
self.assertEqual(adn.obtener_complemento('T'), 'A')
self.assertEqual(adn.obtener_complemento('C'), 'G')
self.assertEqual(adn.obtener_complemento('G'), 'C')
with self.assertRaises(TypeError):
adn.obtener_complemento('f')
def test_generar_cadena_complementaria(self):
self.assertEqual(adn.generar_cadena_complementaria('GATA'), 'CTAT')
self.assertEqual(adn.generar_cadena_complementaria('CTGT'), 'GACA')
with self.assertRaises(TypeError):
adn.generar_cadena_complementaria('GCTH')
def test_calcular_correspondencia(self):
self.assertEqual(adn.calcular_correspondencia('GATA', 'CATA'), 0.75)
self.assertEqual(adn.calcular_correspondencia('TCT', 'GAT'), 0.3333333333333333)
with self.assertRaises(TypeError):
adn.calcular_correspondencia('ABCD', 'FGHI')
def test_corresponden(self):
self.assertEqual(adn.calcular_correspondencia('GATA', 'GATA'), True)
self.assertEqual(adn.calcular_correspondencia('TCT', 'GAT'), False)
self.assertEqual(adn.calcular_correspondencia('TCTG', 'GATC'), False)
def test_es_cadena_valida(self):
self.assertEquals(adn.es_cadena_valida('AGATA'),True)
self.assertEquals(adn.es_cadena_valida('GATA'),True)
self.assertEquals(adn.es_cadena_valida('CCTT'), True)
self.assertEquals(adn.es_cadena_valida('ALAT'), False)
self.assertEquals(adn.es_cadena_valida('ATCB'), False)
self.assertEquals(adn.es_cadena_valida('OGTAU'), False)
def test_es_base(self):
self.assertEquals(adn.es_base('A'), True)
self.assertEquals(adn.es_base('T'), True)
self.assertEquals(adn.es_base('C'), True)
self.assertEquals(adn.es_base('C'), True)
self.assertEquals(adn.es_base('T'), True)
self.assertEquals(adn.es_base('a'), True)
self.assertEquals(adn.es_base('B'), False)
self.assertEquals(adn.es_base('B'), False)
with self.assertRaises(ValueError):
adn.es_base('AT')
with self.assertRaises(ValueError):
adn.es_base('1')
with self.assertRaises(ValueError):
adn.es_base('')
def test_es_subcadena(self):
self.assertEquals(adn.es_subcadena('AGATA','AGA'), True)
self.assertEquals(adn.es_subcadena('GTAC','TA'), True)
self.assertEquals(adn.es_subcadena('GGCT', 'GCT'), True)
self.assertEquals(adn.es_subcadena('CCTT', 'CTC'), False)
with self.assertRaises(ValueError):
adn.es_subcadena('TC','TCP')
with self.assertRaises(ValueError):
adn.es_subcadena('LI', 'KO')
with self.assertRaises(ValueError):
adn.es_subcadena('AG','NM')
self.assertEquals(adn.es_subcadena('ATA', 'ATA'), 'Las cadenas son iguales')
with self.assertRaises(ValueError):
adn.es_subcadena('TC', 'TCP')
def test_reparar_dano(self):
self.assertEquals(adn.reparar_dano(['I','A','T','A'],'G'),['G', 'A', 'T', 'A'])
self.assertEquals(adn.reparar_dano(['C', 'A', 'B'], 'C'), ['C', 'A', 'C'])
self.assertEquals(adn.reparar_dano(['T', 'U', 'A'], 'A'), ['T', 'A', 'A'])
with self.assertRaises(ValueError):
adn.reparar_dano(['G', 'P', 'C', 'G'], 'H'),['C', 'A', 'C']
def test_obtener_secciones(self):
self.assertEqual(adn.obtener_secciones('AGATAGA', 3), 'AG ATAGA')
self.assertEqual(adn.obtener_secciones('GATATACA', 4), 'GA TATACA')
self.assertEqual(adn.obtener_secciones('tacaga', 2), 'TAC AGA')
self.assertRaises(ZeroDivisionError, lambda: adn.obtener_complementos('ACCG', 0))
with self.assertRaises(TypeError):
adn.obtener_secciones('ACGC', 'A')
with self.assertRaises(TypeError):
adn.obtener_secciones('GCTI')
def test_obtener_complementos(self):
self.assertEqual(adn.obtener_complementos(['GATATACA', 'TATACACA', 'TCTATGTA', 'TAGAGATA', 'GATA']),
['CTATATGT', 'ATATGTGT', 'AGATACAT', 'ATCTCTAT', 'CTAT'])
self.assertEqual(adn.obtener_complementos(['tagata', 'cataga', 'gataca']), ['ATCTAT', 'GTATGT', 'CTATGT'])
with self.assertRaises(TypeError):
adn.obtener_complementos(['GCTA', 'UTASF'])
def test_unir_cadena(self):
self.assertEqual(adn.unir_cadena(['GACA', 'TAC', 'GATA']), 'GACATACGATA')
self.assertEqual(adn.unir_cadena(['TATA', 'GATAGA', 'TG']), 'TATAGATAGATG')
with self.assertRaises(TypeError):
adn.unir_cadena(['FAFSAS', 'UTASF'])
def test_complementar_cadenas(self):
self.assertEqual(adn.complementar_cadenas(['GATATA', 'TATACA', 'CAGATCA']), ['CTATAT', 'ATATGT', 'GTCTAGT'])
self.assertEqual(adn.complementar_cadenas(['TATACAGA', 'TATAGA', 'TCACAG']), ['ATATGTCT', 'ATATCT', 'AGTGTC'])
with self.assertRaises(TypeError):
adn.unir_cadena(['AFTAGA', 'UTASF'])
if __name__ == 'main':
unittest.main()
| 46.72973 | 118 | 0.640833 |
4a1e0f3d009cd37d5796cfb48871d54f8a8da86c | 1,084 | py | Python | apero/tools/module/setup/drs_trigger.py | njcuk9999/apero-drs | 83b043e9f277a011b03e0227c77307961b200901 | [
"MIT"
] | 1 | 2021-03-09T17:49:31.000Z | 2021-03-09T17:49:31.000Z | apero/tools/module/setup/drs_trigger.py | njcuk9999/apero-drs | 83b043e9f277a011b03e0227c77307961b200901 | [
"MIT"
] | 43 | 2020-10-06T18:42:24.000Z | 2022-03-28T21:23:10.000Z | apero/tools/module/setup/drs_trigger.py | njcuk9999/apero-drs | 83b043e9f277a011b03e0227c77307961b200901 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
# CODE DESCRIPTION HERE
Created on 2019-11-02 10:09
@author: ncook
Version 0.0.1
"""
# =============================================================================
# Define variables
# =============================================================================
# -----------------------------------------------------------------------------
# =============================================================================
# Define functions
# =============================================================================
# =============================================================================
# Start of code
# =============================================================================
# Main code here
if __name__ == "__main__":
# ----------------------------------------------------------------------
# Main code here
pass
# =============================================================================
# End of code
# ============================================================================= | 30.111111 | 79 | 0.168819 |
4a1e101650b97bf512272a22f7a8e29f232aa680 | 2,828 | py | Python | test/functional/p2p_timeouts.py | Lexxos/dash | 6f2e0420c3a8ba1f9ffbc4601e31ff760b9446b9 | [
"MIT"
] | 1 | 2021-12-14T10:08:03.000Z | 2021-12-14T10:08:03.000Z | test/functional/p2p_timeouts.py | Lexxos/digital-cash | 6f2e0420c3a8ba1f9ffbc4601e31ff760b9446b9 | [
"MIT"
] | null | null | null | test/functional/p2p_timeouts.py | Lexxos/digital-cash | 6f2e0420c3a8ba1f9ffbc4601e31ff760b9446b9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test various net timeouts.
- Create three amerox nodes:
no_verack_node - we never send a verack in response to their version
no_version_node - we never send a version (only a ping)
no_send_node - we never send any P2P message.
- Start all three nodes
- Wait 1 second
- Assert that we're connected
- Send a ping to no_verack_node and no_version_node
- Wait 1 second
- Assert that we're still connected
- Send a ping to no_verack_node and no_version_node
- Wait 2 seconds
- Assert that we're no longer connected (timeout to receive version/verack is 3 seconds)
"""
from time import sleep
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class TestP2PConn(P2PInterface):
def on_version(self, message):
# Don't send a verack in response
pass
class TimeoutsTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
# set timeout to receive version/verack to 3 seconds
self.extra_args = [["-peertimeout=3"]]
def run_test(self):
# Setup the p2p connections and start up the network thread.
no_verack_node = self.nodes[0].add_p2p_connection(TestP2PConn())
no_version_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False)
no_send_node = self.nodes[0].add_p2p_connection(TestP2PConn(), send_version=False)
network_thread_start()
sleep(1)
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
sleep(1)
assert "version" in no_verack_node.last_message
assert no_verack_node.is_connected
assert no_version_node.is_connected
assert no_send_node.is_connected
no_verack_node.send_message(msg_ping())
no_version_node.send_message(msg_ping())
expected_timeout_logs = [
"version handshake timeout from 0",
"socket no message in first 3 seconds, 1 0 from 1",
"socket no message in first 3 seconds, 0 0 from 2",
]
with self.nodes[0].assert_debug_log(expected_msgs=expected_timeout_logs):
sleep(3 + 1) # Sleep one second more than peertimeout
assert not no_verack_node.is_connected
assert not no_version_node.is_connected
assert not no_send_node.is_connected
if __name__ == '__main__':
TimeoutsTest().main()
| 33.666667 | 93 | 0.709335 |
4a1e10949541756adce2b76f91f1de189bb2c7f4 | 5,539 | py | Python | nits/file.py | scott-howard-james/nits | 9ac76b6885e3306083cc4931a9428ae99a976a04 | [
"Apache-2.0"
] | null | null | null | nits/file.py | scott-howard-james/nits | 9ac76b6885e3306083cc4931a9428ae99a976a04 | [
"Apache-2.0"
] | null | null | null | nits/file.py | scott-howard-james/nits | 9ac76b6885e3306083cc4931a9428ae99a976a04 | [
"Apache-2.0"
] | null | null | null | # standard
from collections import defaultdict, OrderedDict
import csv
import sys
import tempfile
import unittest
class File:
'''
An abstract class simplifying file access through the use of only two functions:
- read (file)
- write (data, file):
'''
@classmethod
def read(cls, filename):
'''
return file elements in a generator
'''
assert False
@classmethod
def write(cls, data, filename):
'''
write data to filename
'''
assert False
@staticmethod
def decomment(file, comment):
for row in file:
if comment is None:
yield row
else:
raw = row.split(comment)[0].strip()
if raw:
yield raw
class Text(File):
'''
Instantiate the File class for a simple text file
'''
@classmethod
def read(cls, filename, comment=None, blanklines=False, strip=True):
'''
- comment: ignore comments
- blanklines: ignore blank lines
- strip: strip write space
'''
def line(d):
if comment is None:
return d
elif comment not in d:
return d
else:
return d[:d.index(comment)].strip()
with open(filename, 'rt') as f:
for datum in f:
if strip:
d = datum.strip()
else:
d = datum.rstrip()
if blanklines:
yield line(d)
elif len(d) > 0:
remnant = line(d)
if len(remnant) > 0:
yield remnant
@classmethod
def write(cls,
data,
filename,
eol='\n' # explicitly change the End of Line marker
):
if filename is None:
f = sys.stdout
else:
f = open(filename, 'wt')
with f:
for datum in data:
f.write(datum + eol)
class CSV(File):
'''
Instantiate the File class for Comma Separated Values (CSV)
'''
@classmethod
def read(cls,
filename,
header=True,
comment=None,
fields=None):
'''
- header: is first line the header?
- fields: optional list of field values
'''
with open(filename, 'rt') as file:
csv_file = csv.reader(File.decomment(file, comment))
for i, record in enumerate(csv_file):
if len(record) == 0:
continue
record = [f.strip() for f in record]
if header:
if i == 0:
if fields is None:
fields = record
else:
yield OrderedDict(list(zip(fields, record)))
else:
yield record
@classmethod
def write(cls,
data,
filename=None,
fields=None,
header=True,
append=False,
delimiter=','):
'''
- fields: optional list of field values
- header: display header on first line?
- append: add to existing file?
- delimiter: what character to use for separating elements
'''
def formatter(datum, fields):
if not isinstance(datum, dict):
return dict(list(zip(fields, [str(d) for d in datum])))
else:
d = defaultdict()
for field in fields:
if field in datum:
d[field] = datum[field]
return d
if append:
mode = 'a'
else:
mode = 'w'
if filename is None:
f = sys.stdout
elif sys.version_info < (3, 0, 0):
mode += 'b'
f = open(filename, mode)
else:
f = open(filename, mode, newline='')
with f as csv_file:
first = True
for datum in data:
if first:
if fields is None:
if isinstance(datum, dict):
fields = list(datum.keys())
else:
fields = datum # first line is the list of fields
csv_writer = csv.DictWriter(csv_file, fields,
lineterminator='\n', delimiter=delimiter)
if header:
csv_writer.writerow(dict(list(zip(fields, fields))))
first = False
csv_writer.writerow(formatter(datum, fields))
class Test_File(unittest.TestCase):
def setUp(self):
self.named = tempfile.NamedTemporaryFile(delete=True)
self.data = [[i+str(j) for j in range(4)] for i in ['x', 'a', 'b', 'c']]
self.filename = self.named.name
def tearDown(self):
self.named.close()
def test_text(self):
data = [' '.join(datum) for datum in self.data]
Text.write(data, self.filename)
for i, same in enumerate(Text.read(self.filename)):
assert data[i] == same
def test_csv(self):
CSV.write(self.data, self.filename, header=False)
for i, same in enumerate(CSV.read(self.filename, header=True)):
assert list(same.keys()) == self.data[0]
assert list(same.values()) == self.data[i+1]
if __name__ == '__main__':
unittest.main()
| 29 | 84 | 0.484022 |
4a1e11471b9c741e7d84dd582c0af639d1448f2d | 17,955 | py | Python | tests/test_cli.py | logworthy/cookiecutter | 8073c27271545e4ef6d3221ef3eeeaeaaa65e7b8 | [
"BSD-3-Clause"
] | null | null | null | tests/test_cli.py | logworthy/cookiecutter | 8073c27271545e4ef6d3221ef3eeeaeaaa65e7b8 | [
"BSD-3-Clause"
] | null | null | null | tests/test_cli.py | logworthy/cookiecutter | 8073c27271545e4ef6d3221ef3eeeaeaaa65e7b8 | [
"BSD-3-Clause"
] | null | null | null | """Collection of tests around cookiecutter's command-line interface."""
import json
import os
import pytest
from click.testing import CliRunner
from cookiecutter import utils
from cookiecutter.__main__ import main
from cookiecutter.main import cookiecutter
@pytest.fixture(scope='session')
def cli_runner():
"""Fixture that returns a helper function to run the cookiecutter cli."""
runner = CliRunner()
def cli_main(*cli_args, **cli_kwargs):
"""Run cookiecutter cli main with the given args."""
return runner.invoke(main, cli_args, **cli_kwargs)
return cli_main
@pytest.fixture
def remove_fake_project_dir(request):
"""Remove the fake project directory created during the tests."""
def fin_remove_fake_project_dir():
if os.path.isdir('fake-project'):
utils.rmtree('fake-project')
request.addfinalizer(fin_remove_fake_project_dir)
@pytest.fixture
def make_fake_project_dir(request):
"""Create a fake project to be overwritten in the according tests."""
os.makedirs('fake-project')
@pytest.fixture(params=['-V', '--version'])
def version_cli_flag(request):
"""Pytest fixture return both version invocation options."""
return request.param
def test_cli_version(cli_runner, version_cli_flag):
"""Verify correct version output by `cookiecutter` on cli invocation."""
result = cli_runner(version_cli_flag)
assert result.exit_code == 0
assert result.output.startswith('Cookiecutter')
@pytest.mark.usefixtures('make_fake_project_dir', 'remove_fake_project_dir')
def test_cli_error_on_existing_output_directory(cli_runner):
"""Test cli invocation without `overwrite-if-exists` fail if dir exist."""
result = cli_runner('tests/fake-repo-pre/', '--no-input')
assert result.exit_code != 0
expected_error_msg = 'Error: "fake-project" directory already exists\n'
assert result.output == expected_error_msg
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli(cli_runner):
"""Test cli invocation work without flags if directory not exist."""
result = cli_runner('tests/fake-repo-pre/', '--no-input')
assert result.exit_code == 0
assert os.path.isdir('fake-project')
with open(os.path.join('fake-project', 'README.rst')) as f:
assert 'Project name: **Fake Project**' in f.read()
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_verbose(cli_runner):
"""Test cli invocation display log if called with `verbose` flag."""
result = cli_runner('tests/fake-repo-pre/', '--no-input', '-v')
assert result.exit_code == 0
assert os.path.isdir('fake-project')
with open(os.path.join('fake-project', 'README.rst')) as f:
assert 'Project name: **Fake Project**' in f.read()
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_replay(mocker, cli_runner):
"""Test cli invocation display log with `verbose` and `replay` flags."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--replay', '-v')
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=True,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_replay_file(mocker, cli_runner):
"""Test cli invocation correctly pass --replay-file option."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--replay-file', '~/custom-replay-file', '-v')
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay='~/custom-replay-file',
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_exit_on_noinput_and_replay(mocker, cli_runner):
"""Test cli invocation fail if both `no-input` and `replay` flags passed."""
mock_cookiecutter = mocker.patch(
'cookiecutter.cli.cookiecutter', side_effect=cookiecutter
)
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--no-input', '--replay', '-v')
assert result.exit_code == 1
expected_error_msg = (
"You can not use both replay and no_input or extra_context at the same time."
)
assert expected_error_msg in result.output
mock_cookiecutter.assert_called_once_with(
template_path,
None,
True,
replay=True,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
@pytest.fixture(params=['-f', '--overwrite-if-exists'])
def overwrite_cli_flag(request):
"""Pytest fixture return all `overwrite-if-exists` invocation options."""
return request.param
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_run_cookiecutter_on_overwrite_if_exists_and_replay(
mocker, cli_runner, overwrite_cli_flag
):
"""Test cli invocation with `overwrite-if-exists` and `replay` flags."""
mock_cookiecutter = mocker.patch(
'cookiecutter.cli.cookiecutter', side_effect=cookiecutter
)
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--replay', '-v', overwrite_cli_flag)
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=True,
overwrite_if_exists=True,
skip_if_file_exists=False,
output_dir='.',
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_overwrite_if_exists_when_output_dir_does_not_exist(
cli_runner, overwrite_cli_flag
):
"""Test cli invocation with `overwrite-if-exists` and `no-input` flags.
Case when output dir not exist.
"""
result = cli_runner('tests/fake-repo-pre/', '--no-input', overwrite_cli_flag)
assert result.exit_code == 0
assert os.path.isdir('fake-project')
@pytest.mark.usefixtures('make_fake_project_dir', 'remove_fake_project_dir')
def test_cli_overwrite_if_exists_when_output_dir_exists(cli_runner, overwrite_cli_flag):
"""Test cli invocation with `overwrite-if-exists` and `no-input` flags.
Case when output dir already exist.
"""
result = cli_runner('tests/fake-repo-pre/', '--no-input', overwrite_cli_flag)
assert result.exit_code == 0
assert os.path.isdir('fake-project')
@pytest.fixture(params=['-o', '--output-dir'])
def output_dir_flag(request):
"""Pytest fixture return all output-dir invocation options."""
return request.param
@pytest.fixture
def output_dir(tmpdir):
"""Pytest fixture return `output_dir` argument as string."""
return str(tmpdir.mkdir('output'))
def test_cli_output_dir(mocker, cli_runner, output_dir_flag, output_dir):
"""Test cli invocation with `output-dir` flag changes output directory."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, output_dir_flag, output_dir)
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=False,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir=output_dir,
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
@pytest.fixture(params=['-h', '--help', 'help'])
def help_cli_flag(request):
"""Pytest fixture return all help invocation options."""
return request.param
def test_cli_help(cli_runner, help_cli_flag):
"""Test cli invocation display help message with `help` flag."""
result = cli_runner(help_cli_flag)
assert result.exit_code == 0
assert result.output.startswith('Usage')
@pytest.fixture
def user_config_path(tmpdir):
"""Pytest fixture return `user_config` argument as string."""
return str(tmpdir.join('tests/config.yaml'))
def test_user_config(mocker, cli_runner, user_config_path):
"""Test cli invocation works with `config-file` option."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--config-file', user_config_path)
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=False,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=user_config_path,
default_config=False,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
def test_default_user_config_overwrite(mocker, cli_runner, user_config_path):
"""Test cli invocation ignores `config-file` if `default-config` passed."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(
template_path, '--config-file', user_config_path, '--default-config',
)
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=False,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=user_config_path,
default_config=True,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
def test_default_user_config(mocker, cli_runner):
"""Test cli invocation accepts `default-config` flag correctly."""
mock_cookiecutter = mocker.patch('cookiecutter.cli.cookiecutter')
template_path = 'tests/fake-repo-pre/'
result = cli_runner(template_path, '--default-config')
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=False,
overwrite_if_exists=False,
skip_if_file_exists=False,
output_dir='.',
config_file=None,
default_config=True,
extra_context=None,
password=None,
directory=None,
accept_hooks=True,
)
def test_echo_undefined_variable_error(tmpdir, cli_runner):
"""Cli invocation return error if variable undefined in template."""
output_dir = str(tmpdir.mkdir('output'))
template_path = 'tests/undefined-variable/file-name/'
result = cli_runner(
'--no-input', '--default-config', '--output-dir', output_dir, template_path,
)
assert result.exit_code == 1
error = "Unable to create file '{{cookiecutter.foobar}}'"
assert error in result.output
message = (
"Error message: 'collections.OrderedDict object' has no attribute 'foobar'"
)
assert message in result.output
context = {
'cookiecutter': {
'github_username': 'hackebrot',
'project_slug': 'testproject',
'_template': template_path,
'_output_dir': output_dir,
}
}
context_str = json.dumps(context, indent=4, sort_keys=True)
assert context_str in result.output
def test_echo_unknown_extension_error(tmpdir, cli_runner):
"""Cli return error if extension incorrectly defined in template."""
output_dir = str(tmpdir.mkdir('output'))
template_path = 'tests/test-extensions/unknown/'
result = cli_runner(
'--no-input', '--default-config', '--output-dir', output_dir, template_path,
)
assert result.exit_code == 1
assert 'Unable to load extension: ' in result.output
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_extra_context(cli_runner):
"""Cli invocation replace content if called with replacement pairs."""
result = cli_runner(
'tests/fake-repo-pre/', '--no-input', '-v', 'project_name=Awesomez',
)
assert result.exit_code == 0
assert os.path.isdir('fake-project')
with open(os.path.join('fake-project', 'README.rst')) as f:
assert 'Project name: **Awesomez**' in f.read()
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_cli_extra_context_invalid_format(cli_runner):
"""Cli invocation raise error if called with unknown argument."""
result = cli_runner(
'tests/fake-repo-pre/', '--no-input', '-v', 'ExtraContextWithNoEqualsSoInvalid',
)
assert result.exit_code == 2
assert "Error: Invalid value for '[EXTRA_CONTEXT]...'" in result.output
assert 'should contain items of the form key=value' in result.output
@pytest.fixture
def debug_file(tmpdir):
"""Pytest fixture return `debug_file` argument as path object."""
return tmpdir.join('fake-repo.log')
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_debug_file_non_verbose(cli_runner, debug_file):
"""Test cli invocation writes log to `debug-file` if flag enabled.
Case for normal log output.
"""
assert not debug_file.exists()
result = cli_runner(
'--no-input', '--debug-file', str(debug_file), 'tests/fake-repo-pre/',
)
assert result.exit_code == 0
assert debug_file.exists()
context_log = (
"DEBUG cookiecutter.main: context_file is "
"tests/fake-repo-pre/cookiecutter.json"
)
assert context_log in debug_file.readlines(cr=False)
assert context_log not in result.output
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_debug_file_verbose(cli_runner, debug_file):
"""Test cli invocation writes log to `debug-file` if flag enabled.
Case for verbose log output.
"""
assert not debug_file.exists()
result = cli_runner(
'--verbose',
'--no-input',
'--debug-file',
str(debug_file),
'tests/fake-repo-pre/',
)
assert result.exit_code == 0
assert debug_file.exists()
context_log = (
"DEBUG cookiecutter.main: context_file is "
"tests/fake-repo-pre/cookiecutter.json"
)
assert context_log in debug_file.readlines(cr=False)
assert context_log in result.output
@pytest.mark.usefixtures('make_fake_project_dir', 'remove_fake_project_dir')
def test_debug_list_installed_templates(cli_runner, debug_file, user_config_path):
"""Verify --list-installed command correct invocation."""
fake_template_dir = os.path.dirname(os.path.abspath('fake-project'))
os.makedirs(os.path.dirname(user_config_path))
with open(user_config_path, 'w') as config_file:
config_file.write('cookiecutters_dir: "%s"' % fake_template_dir)
open(os.path.join('fake-project', 'cookiecutter.json'), 'w').write('{}')
result = cli_runner(
'--list-installed', '--config-file', user_config_path, str(debug_file),
)
assert "1 installed templates:" in result.output
assert result.exit_code == 0
def test_debug_list_installed_templates_failure(
cli_runner, debug_file, user_config_path
):
"""Verify --list-installed command error on invocation."""
os.makedirs(os.path.dirname(user_config_path))
with open(user_config_path, 'w') as config_file:
config_file.write('cookiecutters_dir: "/notarealplace/"')
result = cli_runner(
'--list-installed', '--config-file', user_config_path, str(debug_file)
)
assert "Error: Cannot list installed templates." in result.output
assert result.exit_code == -1
@pytest.mark.usefixtures('remove_fake_project_dir')
def test_directory_repo(cli_runner):
"""Test cli invocation works with `directory` option."""
result = cli_runner(
'tests/fake-repo-dir/', '--no-input', '-v', '--directory=my-dir',
)
assert result.exit_code == 0
assert os.path.isdir("fake-project")
with open(os.path.join("fake-project", "README.rst")) as f:
assert "Project name: **Fake Project**" in f.read()
cli_accept_hook_arg_testdata = [
("--accept-hooks=yes", None, True),
("--accept-hooks=no", None, False),
("--accept-hooks=ask", "yes", True),
("--accept-hooks=ask", "no", False),
]
@pytest.mark.parametrize(
"accept_hooks_arg,user_input,expected", cli_accept_hook_arg_testdata
)
def test_cli_accept_hooks(
mocker,
cli_runner,
output_dir_flag,
output_dir,
accept_hooks_arg,
user_input,
expected,
):
"""Test cli invocation works with `accept-hooks` option."""
mock_cookiecutter = mocker.patch("cookiecutter.cli.cookiecutter")
template_path = "tests/fake-repo-pre/"
result = cli_runner(
template_path, output_dir_flag, output_dir, accept_hooks_arg, input=user_input
)
assert result.exit_code == 0
mock_cookiecutter.assert_called_once_with(
template_path,
None,
False,
replay=False,
overwrite_if_exists=False,
output_dir=output_dir,
config_file=None,
default_config=False,
extra_context=None,
password=None,
directory=None,
skip_if_file_exists=False,
accept_hooks=expected,
)
| 30.956897 | 88 | 0.683654 |
4a1e119b327c0ff4fac105bd7a83fcb547eb3c2d | 11,171 | py | Python | examples/speech_synthesis/preprocessing/get_feature_manifest.py | Shiguang-Guo/fairseq | c9d3df5679d0829cda8fc3c818b6cab52b78dc37 | [
"MIT"
] | 8 | 2022-03-19T15:20:10.000Z | 2022-03-30T20:29:25.000Z | examples/speech_synthesis/preprocessing/get_feature_manifest.py | Shiguang-Guo/fairseq | c9d3df5679d0829cda8fc3c818b6cab52b78dc37 | [
"MIT"
] | 2 | 2022-02-22T08:28:06.000Z | 2022-02-22T09:26:26.000Z | examples/speech_synthesis/preprocessing/get_feature_manifest.py | Shiguang-Guo/fairseq | c9d3df5679d0829cda8fc3c818b6cab52b78dc37 | [
"MIT"
] | 2 | 2022-03-18T14:42:41.000Z | 2022-03-19T15:23:20.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import logging
from pathlib import Path
import shutil
from tempfile import NamedTemporaryFile
from collections import Counter, defaultdict
import pandas as pd
import torchaudio
from tqdm import tqdm
from fairseq.data.audio.audio_utils import convert_waveform
from examples.speech_to_text.data_utils import (
create_zip,
gen_config_yaml,
gen_vocab,
get_zip_manifest,
load_tsv_to_dicts,
save_df_to_tsv
)
from examples.speech_synthesis.data_utils import (
extract_logmel_spectrogram, extract_pitch, extract_energy, get_global_cmvn,
ipa_phonemize, get_mfa_alignment, get_unit_alignment,
get_feature_value_min_max
)
log = logging.getLogger(__name__)
def process(args):
assert "train" in args.splits
out_root = Path(args.output_root).absolute()
out_root.mkdir(exist_ok=True)
print("Fetching data...")
audio_manifest_root = Path(args.audio_manifest_root).absolute()
samples = []
for s in args.splits:
for e in load_tsv_to_dicts(audio_manifest_root / f"{s}.audio.tsv"):
e["split"] = s
samples.append(e)
sample_ids = [s["id"] for s in samples]
# Get alignment info
id_to_alignment = None
if args.textgrid_zip is not None:
assert args.id_to_units_tsv is None
id_to_alignment = get_mfa_alignment(
args.textgrid_zip, sample_ids, args.sample_rate, args.hop_length
)
elif args.id_to_units_tsv is not None:
# assume identical hop length on the unit sequence
id_to_alignment = get_unit_alignment(args.id_to_units_tsv, sample_ids)
# Extract features and pack features into ZIP
feature_name = "logmelspec80"
zip_path = out_root / f"{feature_name}.zip"
pitch_zip_path = out_root / "pitch.zip"
energy_zip_path = out_root / "energy.zip"
gcmvn_npz_path = out_root / "gcmvn_stats.npz"
if zip_path.exists() and gcmvn_npz_path.exists():
print(f"{zip_path} and {gcmvn_npz_path} exist.")
else:
feature_root = out_root / feature_name
feature_root.mkdir(exist_ok=True)
pitch_root = out_root / "pitch"
energy_root = out_root / "energy"
if args.add_fastspeech_targets:
pitch_root.mkdir(exist_ok=True)
energy_root.mkdir(exist_ok=True)
print("Extracting Mel spectrogram features...")
for sample in tqdm(samples):
waveform, sample_rate = torchaudio.load(sample["audio"])
waveform, sample_rate = convert_waveform(
waveform, sample_rate, normalize_volume=args.normalize_volume,
to_sample_rate=args.sample_rate
)
sample_id = sample["id"]
target_length = None
if id_to_alignment is not None:
a = id_to_alignment[sample_id]
target_length = sum(a.frame_durations)
if a.start_sec is not None and a.end_sec is not None:
start_frame = int(a.start_sec * sample_rate)
end_frame = int(a.end_sec * sample_rate)
waveform = waveform[:, start_frame: end_frame]
extract_logmel_spectrogram(
waveform, sample_rate, feature_root / f"{sample_id}.npy",
win_length=args.win_length, hop_length=args.hop_length,
n_fft=args.n_fft, n_mels=args.n_mels, f_min=args.f_min,
f_max=args.f_max, target_length=target_length
)
if args.add_fastspeech_targets:
assert id_to_alignment is not None
extract_pitch(
waveform, sample_rate, pitch_root / f"{sample_id}.npy",
hop_length=args.hop_length, log_scale=True,
phoneme_durations=id_to_alignment[sample_id].frame_durations
)
extract_energy(
waveform, energy_root / f"{sample_id}.npy",
hop_length=args.hop_length, n_fft=args.n_fft,
log_scale=True,
phoneme_durations=id_to_alignment[sample_id].frame_durations
)
print("ZIPing features...")
create_zip(feature_root, zip_path)
get_global_cmvn(feature_root, gcmvn_npz_path)
shutil.rmtree(feature_root)
if args.add_fastspeech_targets:
create_zip(pitch_root, pitch_zip_path)
shutil.rmtree(pitch_root)
create_zip(energy_root, energy_zip_path)
shutil.rmtree(energy_root)
print("Fetching ZIP manifest...")
audio_paths, audio_lengths = get_zip_manifest(zip_path)
pitch_paths, pitch_lengths, energy_paths, energy_lengths = [None] * 4
if args.add_fastspeech_targets:
pitch_paths, pitch_lengths = get_zip_manifest(pitch_zip_path)
energy_paths, energy_lengths = get_zip_manifest(energy_zip_path)
# Generate TSV manifest
print("Generating manifest...")
id_to_cer = None
if args.cer_threshold is not None:
assert Path(args.cer_tsv_path).is_file()
id_to_cer = {
x["id"]: x["uer"] for x in load_tsv_to_dicts(args.cer_tsv_path)
}
manifest_by_split = {split: defaultdict(list) for split in args.splits}
for sample in tqdm(samples):
sample_id, split = sample["id"], sample["split"]
if args.snr_threshold is not None and "snr" in sample \
and sample["snr"] < args.snr_threshold:
continue
if args.cer_threshold is not None \
and id_to_cer[sample_id] > args.cer_threhold:
continue
normalized_utt = sample["tgt_text"]
if id_to_alignment is not None:
normalized_utt = " ".join(id_to_alignment[sample_id].tokens)
elif args.ipa_vocab:
normalized_utt = ipa_phonemize(
normalized_utt, lang=args.lang, use_g2p=args.use_g2p
)
manifest_by_split[split]["id"].append(sample_id)
manifest_by_split[split]["audio"].append(audio_paths[sample_id])
manifest_by_split[split]["n_frames"].append(audio_lengths[sample_id])
manifest_by_split[split]["tgt_text"].append(normalized_utt)
manifest_by_split[split]["speaker"].append(sample["speaker"])
manifest_by_split[split]["src_text"].append(sample["src_text"])
if args.add_fastspeech_targets:
assert id_to_alignment is not None
duration = " ".join(
str(d) for d in id_to_alignment[sample_id].frame_durations
)
manifest_by_split[split]["duration"].append(duration)
manifest_by_split[split]["pitch"].append(pitch_paths[sample_id])
manifest_by_split[split]["energy"].append(energy_paths[sample_id])
for split in args.splits:
save_df_to_tsv(
pd.DataFrame.from_dict(manifest_by_split[split]),
out_root / f"{split}.tsv"
)
# Generate vocab
vocab_name, spm_filename = None, None
if id_to_alignment is not None or args.ipa_vocab:
vocab = Counter()
for t in manifest_by_split["train"]["tgt_text"]:
vocab.update(t.split(" "))
vocab_name = "vocab.txt"
with open(out_root / vocab_name, "w") as f:
for s, c in vocab.most_common():
f.write(f"{s} {c}\n")
else:
spm_filename_prefix = "spm_char"
spm_filename = f"{spm_filename_prefix}.model"
with NamedTemporaryFile(mode="w") as f:
for t in manifest_by_split["train"]["tgt_text"]:
f.write(t + "\n")
f.flush() # needed to ensure gen_vocab sees dumped text
gen_vocab(Path(f.name), out_root / spm_filename_prefix, "char")
# Generate speaker list
speakers = sorted({sample["speaker"] for sample in samples})
speakers_path = out_root / "speakers.txt"
with open(speakers_path, "w") as f:
for speaker in speakers:
f.write(f"{speaker}\n")
# Generate config YAML
win_len_t = args.win_length / args.sample_rate
hop_len_t = args.hop_length / args.sample_rate
extra = {
"sample_rate": args.sample_rate,
"features": {
"type": "spectrogram+melscale+log",
"eps": 1e-5, "n_mels": args.n_mels, "n_fft": args.n_fft,
"window_fn": "hann", "win_length": args.win_length,
"hop_length": args.hop_length, "sample_rate": args.sample_rate,
"win_len_t": win_len_t, "hop_len_t": hop_len_t,
"f_min": args.f_min, "f_max": args.f_max,
"n_stft": args.n_fft // 2 + 1
}
}
if len(speakers) > 1:
extra["speaker_set_filename"] = "speakers.txt"
if args.add_fastspeech_targets:
pitch_min, pitch_max = get_feature_value_min_max(
[(out_root / n).as_posix() for n in pitch_paths.values()]
)
energy_min, energy_max = get_feature_value_min_max(
[(out_root / n).as_posix() for n in energy_paths.values()]
)
extra["features"]["pitch_min"] = pitch_min
extra["features"]["pitch_max"] = pitch_max
extra["features"]["energy_min"] = energy_min
extra["features"]["energy_max"] = energy_max
gen_config_yaml(
out_root, spm_filename=spm_filename, vocab_name=vocab_name,
audio_root=out_root.as_posix(), input_channels=None,
input_feat_per_channel=None, specaugment_policy=None,
cmvn_type="global", gcmvn_path=gcmvn_npz_path, extra=extra
)
def main():
parser = argparse.ArgumentParser()
parser.add_argument("--audio-manifest-root", "-m", required=True, type=str)
parser.add_argument("--output-root", "-o", required=True, type=str)
parser.add_argument("--splits", "-s", type=str, nargs="+",
default=["train", "dev", "test"])
parser.add_argument("--ipa-vocab", action="store_true")
parser.add_argument("--use-g2p", action="store_true")
parser.add_argument("--lang", type=str, default="en-us")
parser.add_argument("--win-length", type=int, default=1024)
parser.add_argument("--hop-length", type=int, default=256)
parser.add_argument("--n-fft", type=int, default=1024)
parser.add_argument("--n-mels", type=int, default=80)
parser.add_argument("--f-min", type=int, default=20)
parser.add_argument("--f-max", type=int, default=8000)
parser.add_argument("--sample-rate", type=int, default=22050)
parser.add_argument("--normalize-volume", "-n", action="store_true")
parser.add_argument("--textgrid-zip", type=str, default=None)
parser.add_argument("--id-to-units-tsv", type=str, default=None)
parser.add_argument("--add-fastspeech-targets", action="store_true")
parser.add_argument("--snr-threshold", type=float, default=None)
parser.add_argument("--cer-threshold", type=float, default=None)
parser.add_argument("--cer-tsv-path", type=str, default="")
args = parser.parse_args()
process(args)
if __name__ == "__main__":
main()
| 42.475285 | 80 | 0.644168 |
4a1e13fcac08706f1b193ead2b7c29eb454392a2 | 3,199 | py | Python | do_wide_and_deep_networks_learn_the_same_things/large_scale_training/single_task_evaluator.py | xxdreck/google-research | dac724bc2b9362d65c26747a8754504fe4c615f8 | [
"Apache-2.0"
] | 23,901 | 2018-10-04T19:48:53.000Z | 2022-03-31T21:27:42.000Z | do_wide_and_deep_networks_learn_the_same_things/large_scale_training/single_task_evaluator.py | xxdreck/google-research | dac724bc2b9362d65c26747a8754504fe4c615f8 | [
"Apache-2.0"
] | 891 | 2018-11-10T06:16:13.000Z | 2022-03-31T10:42:34.000Z | do_wide_and_deep_networks_learn_the_same_things/large_scale_training/single_task_evaluator.py | admariner/google-research | 7cee4b22b925581d912e8d993625c180da2a5a4f | [
"Apache-2.0"
] | 6,047 | 2018-10-12T06:31:02.000Z | 2022-03-31T13:59:28.000Z | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An evaluator object that can evaluate models with a single output."""
import orbit
import tensorflow as tf
class SingleTaskEvaluator(orbit.StandardEvaluator):
"""Evaluates a single-output model on a given dataset.
This evaluator will handle running a model with one output on a single
dataset, and will apply the output of that model to one or more
`tf.keras.metrics.Metric` objects.
"""
def __init__(self,
eval_dataset,
label_key,
model,
metrics,
evaluator_options=None):
"""Initializes a `SingleTaskEvaluator` instance.
If the `SingleTaskEvaluator` should run its model under a distribution
strategy, it should be created within that strategy's scope.
Arguments:
eval_dataset: A `tf.data.Dataset` or `DistributedDataset` that contains a
string-keyed dict of `Tensor`s.
label_key: The key corresponding to the label value in feature
dictionaries dequeued from `eval_dataset`. This key will be removed from
the dictionary before it is passed to the model.
model: A `tf.Module` or Keras `Model` object to evaluate.
metrics: A single `tf.keras.metrics.Metric` object, or a list of
`tf.keras.metrics.Metric` objects.
evaluator_options: An optional `orbit.StandardEvaluatorOptions` object.
"""
self.label_key = label_key
self.model = model
self.metrics = metrics if isinstance(metrics, list) else [metrics]
# Capture the strategy from the containing scope.
self.strategy = tf.distribute.get_strategy()
super(SingleTaskEvaluator, self).__init__(
eval_dataset=eval_dataset, options=evaluator_options)
def eval_begin(self):
"""Actions to take once before every eval loop."""
for metric in self.metrics:
metric.reset_states()
def eval_step(self, iterator):
"""One eval step. Called multiple times per eval loop by the superclass."""
def step_fn(inputs):
# Extract the target value and delete it from the input dict, so that
# the model never sees it.
target = inputs.pop(self.label_key)
output = self.model(inputs)
for metric in self.metrics:
metric.update_state(target, output)
# This is needed to handle distributed computation.
self.strategy.run(step_fn, args=(next(iterator),))
def eval_end(self):
"""Actions to take once after an eval loop."""
with self.strategy.scope():
# Export the metrics.
metrics = {metric.name: metric.result() for metric in self.metrics}
return metrics
| 36.352273 | 80 | 0.703345 |
4a1e14374087d1ae3c33e104551080f4d2d858c8 | 86 | py | Python | data/studio21_generated/interview/0122/starter_code.py | vijaykumawat256/Prompt-Summarization | 614f5911e2acd2933440d909de2b4f86653dc214 | [
"Apache-2.0"
] | null | null | null | data/studio21_generated/interview/0122/starter_code.py | vijaykumawat256/Prompt-Summarization | 614f5911e2acd2933440d909de2b4f86653dc214 | [
"Apache-2.0"
] | null | null | null | data/studio21_generated/interview/0122/starter_code.py | vijaykumawat256/Prompt-Summarization | 614f5911e2acd2933440d909de2b4f86653dc214 | [
"Apache-2.0"
] | null | null | null | class Solution:
def maxScore(self, cardPoints: List[int], k: int) -> int:
| 28.666667 | 61 | 0.604651 |
4a1e14f7f9da4ce3ed8b032af73ee39d5bfb95a6 | 485 | py | Python | webapp/track/models.py | drylikov/mappad.ru | 98672ec4c880848aa865a6dbcd88c3b44cbc64e2 | [
"MIT"
] | null | null | null | webapp/track/models.py | drylikov/mappad.ru | 98672ec4c880848aa865a6dbcd88c3b44cbc64e2 | [
"MIT"
] | null | null | null | webapp/track/models.py | drylikov/mappad.ru | 98672ec4c880848aa865a6dbcd88c3b44cbc64e2 | [
"MIT"
] | null | null | null | from datetime import datetime
from webapp import db
class Track(db.Model):
__tablename__ = 'tracks'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(140))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
description = db.Column(db.String(250))
raw_gpx = db.Column(db.Text())
def __repr__(self):
return '<Track {}>'.format(self.title)
| 26.944444 | 75 | 0.680412 |
4a1e162aa2fd45a32f60c42a34e6854e2b722649 | 32,335 | py | Python | edk2basetools/BPDG/GenVpd.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | 7 | 2020-09-08T01:16:14.000Z | 2021-12-25T06:32:42.000Z | edk2basetools/BPDG/GenVpd.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | 25 | 2020-11-02T23:28:42.000Z | 2022-03-29T01:57:40.000Z | edk2basetools/BPDG/GenVpd.py | YuweiChen1110/edk2-basetools | cfd05c928492b7ffd1329634cfcb089db995eeca | [
"BSD-2-Clause-Patent"
] | 18 | 2020-09-10T02:54:03.000Z | 2021-11-29T06:41:52.000Z | ## @file
# This file include GenVpd class for fix the Vpd type PCD offset, and PcdEntry for describe
# and process each entry of vpd type PCD.
#
# Copyright (c) 2010 - 2018, Intel Corporation. All rights reserved.<BR>
#
# SPDX-License-Identifier: BSD-2-Clause-Patent
#
from __future__ import absolute_import
import edk2basetools.Common.LongFilePathOs as os
from io import BytesIO
from . import StringTable as st
import array
import re
from edk2basetools.Common.LongFilePathSupport import OpenLongFilePath as open
from struct import *
from edk2basetools.Common.DataType import MAX_SIZE_TYPE, MAX_VAL_TYPE, TAB_STAR
import edk2basetools.Common.EdkLogger as EdkLogger
import edk2basetools.Common.BuildToolError as BuildToolError
_FORMAT_CHAR = {1: 'B',
2: 'H',
4: 'I',
8: 'Q'
}
## The VPD PCD data structure for store and process each VPD PCD entry.
#
# This class contain method to format and pack pcd's value.
#
class PcdEntry:
def __init__(self, PcdCName, SkuId,PcdOffset, PcdSize, PcdValue, Lineno=None, FileName=None, PcdUnpackValue=None,
PcdBinOffset=None, PcdBinSize=None, Alignment=None):
self.PcdCName = PcdCName.strip()
self.SkuId = SkuId.strip()
self.PcdOffset = PcdOffset.strip()
self.PcdSize = PcdSize.strip()
self.PcdValue = PcdValue.strip()
self.Lineno = Lineno.strip()
self.FileName = FileName.strip()
self.PcdUnpackValue = PcdUnpackValue
self.PcdBinOffset = PcdBinOffset
self.PcdBinSize = PcdBinSize
self.Alignment = Alignment
if self.PcdValue == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s line: %s) , no Value specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdOffset == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s) , no Offset specified!" % (self.PcdCName, self.FileName, self.Lineno))
if self.PcdSize == '' :
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid PCD format(Name: %s File: %s Line: %s), no PcdSize specified!" % (self.PcdCName, self.FileName, self.Lineno))
self._GenOffsetValue ()
## Analyze the string value to judge the PCD's datum type equal to Boolean or not.
#
# @param ValueString PCD's value
# @param Size PCD's size
#
# @retval True PCD's datum type is Boolean
# @retval False PCD's datum type is not Boolean.
#
def _IsBoolean(self, ValueString, Size):
if (Size == "1"):
if ValueString.upper() in ["TRUE", "FALSE"]:
return True
elif ValueString in ["0", "1", "0x0", "0x1", "0x00", "0x01"]:
return True
return False
## Convert the PCD's value from string to integer.
#
# This function will try to convert the Offset value form string to integer
# for both hexadecimal and decimal.
#
def _GenOffsetValue(self):
if self.PcdOffset != TAB_STAR:
try:
self.PcdBinOffset = int (self.PcdOffset)
except:
try:
self.PcdBinOffset = int(self.PcdOffset, 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid offset value %s for PCD %s (File: %s Line: %s)" % (self.PcdOffset, self.PcdCName, self.FileName, self.Lineno))
## Pack Boolean type VPD PCD's value form string to binary type.
#
# @param ValueString The boolean type string for pack.
#
#
def _PackBooleanValue(self, ValueString):
if ValueString.upper() == "TRUE" or ValueString in ["1", "0x1", "0x01"]:
try:
self.PcdValue = pack(_FORMAT_CHAR[1], 1)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
else:
try:
self.PcdValue = pack(_FORMAT_CHAR[1], 0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack Integer type VPD PCD's value form string to binary type.
#
# @param ValueString The Integer type string for pack.
#
#
def _PackIntValue(self, IntValue, Size):
if Size not in _FORMAT_CHAR:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size %d for PCD %s in integer datum size(File: %s Line: %s)." % (Size, self.PcdCName, self.FileName, self.Lineno))
for Type, MaxSize in MAX_SIZE_TYPE.items():
if Type == 'BOOLEAN':
continue
if Size == MaxSize:
if IntValue < 0:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"PCD can't be set to negative value %d for PCD %s in %s datum type(File: %s Line: %s)." % (
IntValue, self.PcdCName, Type, self.FileName, self.Lineno))
elif IntValue > MAX_VAL_TYPE[Type]:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Too large PCD value %d for datum type %s for PCD %s(File: %s Line: %s)." % (
IntValue, Type, self.PcdCName, self.FileName, self.Lineno))
try:
self.PcdValue = pack(_FORMAT_CHAR[Size], IntValue)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack VOID* type VPD PCD's value form string to binary type.
#
# The VOID* type of string divided into 3 sub-type:
# 1: L"String"/L'String', Unicode type string.
# 2: "String"/'String', Ascii type string.
# 3: {bytearray}, only support byte-array.
#
# @param ValueString The Integer type string for pack.
#
def _PackPtrValue(self, ValueString, Size):
if ValueString.startswith('L"') or ValueString.startswith("L'"):
self._PackUnicode(ValueString, Size)
elif ValueString.startswith('{') and ValueString.endswith('}'):
self._PackByteArray(ValueString, Size)
elif (ValueString.startswith('"') and ValueString.endswith('"')) or (ValueString.startswith("'") and ValueString.endswith("'")):
self._PackString(ValueString, Size)
else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid VOID* type PCD %s value %s (File: %s Line: %s)" % (self.PcdCName, ValueString, self.FileName, self.Lineno))
## Pack an Ascii PCD value.
#
# An Ascii string for a PCD should be in format as ""/''.
#
def _PackString(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
QuotedFlag = True
if ValueString.startswith("'"):
QuotedFlag = False
ValueString = ValueString[1:-1]
# No null-terminator in 'string'
if (QuotedFlag and len(ValueString) + 1 > Size) or (not QuotedFlag and len(ValueString) > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"PCD value string %s is exceed to size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
try:
self.PcdValue = pack('%ds' % Size, ValueString.encode('utf-8'))
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid size or value for PCD %s to pack(File: %s Line: %s)." % (self.PcdCName, self.FileName, self.Lineno))
## Pack a byte-array PCD value.
#
# A byte-array for a PCD should be in format as {0x01, 0x02, ...}.
#
def _PackByteArray(self, ValueString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % (self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
if (ValueString == ""):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter ValueString %s of PCD %s!(File: %s Line: %s)" % (self.PcdUnpackValue, self.PcdCName, self.FileName, self.Lineno))
ValueString = ValueString.strip()
ValueString = ValueString.lstrip('{').strip('}')
ValueList = ValueString.split(',')
ValueList = [item.strip() for item in ValueList]
if len(ValueList) > Size:
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The byte array %s is too large for size %d(File: %s Line: %s)" % (ValueString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
for Index in range(len(ValueList)):
Value = None
if ValueList[Index].lower().startswith('0x'):
# translate hex value
try:
Value = int(ValueList[Index], 16)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid HEX value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
else:
# translate decimal value
try:
Value = int(ValueList[Index], 10)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s is an invalid DECIMAL value.(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
if Value > 255:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"The value item %s in byte array %s do not in range 0 ~ 0xFF(File: %s Line: %s)" % \
(ValueList[Index], ValueString, self.FileName, self.Lineno))
ReturnArray.append(Value)
for Index in range(len(ValueList), Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
## Pack a unicode PCD value into byte array.
#
# A unicode string for a PCD should be in format as L""/L''.
#
def _PackUnicode(self, UnicodeString, Size):
if (Size < 0):
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid parameter Size %s of PCD %s!(File: %s Line: %s)" % \
(self.PcdBinSize, self.PcdCName, self.FileName, self.Lineno))
QuotedFlag = True
if UnicodeString.startswith("L'"):
QuotedFlag = False
UnicodeString = UnicodeString[2:-1]
# No null-terminator in L'string'
if (QuotedFlag and (len(UnicodeString) + 1) * 2 > Size) or (not QuotedFlag and len(UnicodeString) * 2 > Size):
EdkLogger.error("BPDG", BuildToolError.RESOURCE_OVERFLOW,
"The size of unicode string %s is too larger for size %s(File: %s Line: %s)" % \
(UnicodeString, Size, self.FileName, self.Lineno))
ReturnArray = array.array('B')
for Value in UnicodeString:
try:
ReturnArray.append(ord(Value))
ReturnArray.append(0)
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID,
"Invalid unicode character %s in unicode string %s(File: %s Line: %s)" % \
(Value, UnicodeString, self.FileName, self.Lineno))
for Index in range(len(UnicodeString) * 2, Size):
ReturnArray.append(0)
self.PcdValue = ReturnArray.tolist()
## The class implementing the BPDG VPD PCD offset fix process
#
# The VPD PCD offset fix process includes:
# 1. Parse the input guided.txt file and store it in the data structure;
# 2. Format the input file data to remove unused lines;
# 3. Fixed offset if needed;
# 4. Generate output file, including guided.map and guided.bin file;
#
class GenVPD :
## Constructor of DscBuildData
#
# Initialize object of GenVPD
# @Param InputFileName The filename include the vpd type pcd information
# @param MapFileName The filename of map file that stores vpd type pcd information.
# This file will be generated by the BPDG tool after fix the offset
# and adjust the offset to make the pcd data aligned.
# @param VpdFileName The filename of Vpd file that hold vpd pcd information.
#
def __init__(self, InputFileName, MapFileName, VpdFileName):
self.InputFileName = InputFileName
self.MapFileName = MapFileName
self.VpdFileName = VpdFileName
self.FileLinesList = []
self.PcdFixedOffsetSizeList = []
self.PcdUnknownOffsetList = []
try:
fInputfile = open(InputFileName, "r")
try:
self.FileLinesList = fInputfile.readlines()
except:
EdkLogger.error("BPDG", BuildToolError.FILE_READ_FAILURE, "File read failed for %s" % InputFileName, None)
finally:
fInputfile.close()
except:
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % InputFileName, None)
##
# Parser the input file which is generated by the build tool. Convert the value of each pcd's
# from string to its real format. Also remove the useless line in the input file.
#
def ParserInputFile (self):
count = 0
for line in self.FileLinesList:
# Strip "\r\n" generated by readlines ().
line = line.strip()
line = line.rstrip(os.linesep)
# Skip the comment line
if (not line.startswith("#")) and len(line) > 1 :
#
# Enhanced for support "|" character in the string.
#
ValueList = ['', '', '', '', '']
ValueRe = re.compile(r'\s*L?\".*\|.*\"\s*$')
PtrValue = ValueRe.findall(line)
ValueUpdateFlag = False
if len(PtrValue) >= 1:
line = re.sub(ValueRe, '', line)
ValueUpdateFlag = True
TokenList = line.split('|')
ValueList[0:len(TokenList)] = TokenList
if ValueUpdateFlag:
ValueList[4] = PtrValue[0]
self.FileLinesList[count] = ValueList
# Store the line number
self.FileLinesList[count].append(str(count + 1))
elif len(line) <= 1 :
# Set the blank line to "None"
self.FileLinesList[count] = None
else :
# Set the comment line to "None"
self.FileLinesList[count] = None
count += 1
# The line count contain usage information
count = 0
# Delete useless lines
while (True) :
try :
if (self.FileLinesList[count] is None) :
del(self.FileLinesList[count])
else :
count += 1
except :
break
#
# After remove the useless line, if there are no data remain in the file line list,
# Report warning messages to user's.
#
if len(self.FileLinesList) == 0 :
EdkLogger.warn('BPDG', BuildToolError.RESOURCE_NOT_AVAILABLE,
"There are no VPD type pcds defined in DSC file, Please check it.")
# Process the pcds one by one base on the pcd's value and size
count = 0
for line in self.FileLinesList:
if line is not None :
PCD = PcdEntry(line[0], line[1], line[2], line[3], line[4], line[5], self.InputFileName)
# Strip the space char
PCD.PcdCName = PCD.PcdCName.strip(' ')
PCD.SkuId = PCD.SkuId.strip(' ')
PCD.PcdOffset = PCD.PcdOffset.strip(' ')
PCD.PcdSize = PCD.PcdSize.strip(' ')
PCD.PcdValue = PCD.PcdValue.strip(' ')
PCD.Lineno = PCD.Lineno.strip(' ')
#
# Store the original pcd value.
# This information will be useful while generate the output map file.
#
PCD.PcdUnpackValue = str(PCD.PcdValue)
#
# Translate PCD size string to an integer value.
PackSize = None
try:
PackSize = int(PCD.PcdSize, 10)
PCD.PcdBinSize = PackSize
except:
try:
PackSize = int(PCD.PcdSize, 16)
PCD.PcdBinSize = PackSize
except:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, "Invalid PCD size value %s at file: %s line: %s" % (PCD.PcdSize, self.InputFileName, PCD.Lineno))
#
# If value is Unicode string (e.g. L""), then use 2-byte alignment
# If value is byte array (e.g. {}), then use 8-byte alignment
#
PCD.PcdOccupySize = PCD.PcdBinSize
if PCD.PcdUnpackValue.startswith("{"):
Alignment = 8
elif PCD.PcdUnpackValue.startswith("L"):
Alignment = 2
else:
Alignment = 1
PCD.Alignment = Alignment
if PCD.PcdOffset != TAB_STAR:
if PCD.PcdOccupySize % Alignment != 0:
if PCD.PcdUnpackValue.startswith("{"):
EdkLogger.warn("BPDG", "The offset value of PCD %s is not 8-byte aligned!" %(PCD.PcdCName), File=self.InputFileName)
else:
EdkLogger.error("BPDG", BuildToolError.FORMAT_INVALID, 'The offset value of PCD %s should be %s-byte aligned.' % (PCD.PcdCName, Alignment))
else:
if PCD.PcdOccupySize % Alignment != 0:
PCD.PcdOccupySize = (PCD.PcdOccupySize // Alignment + 1) * Alignment
PackSize = PCD.PcdOccupySize
if PCD._IsBoolean(PCD.PcdValue, PCD.PcdSize):
PCD._PackBooleanValue(PCD.PcdValue)
self.FileLinesList[count] = PCD
count += 1
continue
#
# Try to translate value to an integer firstly.
#
IsInteger = True
PackValue = None
try:
PackValue = int(PCD.PcdValue)
except:
try:
PackValue = int(PCD.PcdValue, 16)
except:
IsInteger = False
if IsInteger:
PCD._PackIntValue(PackValue, PackSize)
else:
PCD._PackPtrValue(PCD.PcdValue, PackSize)
self.FileLinesList[count] = PCD
count += 1
else :
continue
##
# This function used to create a clean list only contain useful information and reorganized to make it
# easy to be sorted
#
def FormatFileLine (self) :
for eachPcd in self.FileLinesList :
if eachPcd.PcdOffset != TAB_STAR :
# Use pcd's Offset value as key, and pcd's Value as value
self.PcdFixedOffsetSizeList.append(eachPcd)
else :
# Use pcd's CName as key, and pcd's Size as value
self.PcdUnknownOffsetList.append(eachPcd)
##
# This function is use to fix the offset value which the not specified in the map file.
# Usually it use the star (meaning any offset) character in the offset field
#
def FixVpdOffset (self):
# At first, the offset should start at 0
# Sort fixed offset list in order to find out where has free spaces for the pcd's offset
# value is TAB_STAR to insert into.
self.PcdFixedOffsetSizeList.sort(key=lambda x: x.PcdBinOffset)
#
# Sort the un-fixed pcd's offset by its size.
#
self.PcdUnknownOffsetList.sort(key=lambda x: x.PcdBinSize)
index =0
for pcd in self.PcdUnknownOffsetList:
index += 1
if pcd.PcdCName == ".".join(("gEfiMdeModulePkgTokenSpaceGuid", "PcdNvStoreDefaultValueBuffer")):
if index != len(self.PcdUnknownOffsetList):
for i in range(len(self.PcdUnknownOffsetList) - index):
self.PcdUnknownOffsetList[index+i -1 ], self.PcdUnknownOffsetList[index+i] = self.PcdUnknownOffsetList[index+i], self.PcdUnknownOffsetList[index+i -1]
#
# Process all Offset value are TAB_STAR
#
if (len(self.PcdFixedOffsetSizeList) == 0) and (len(self.PcdUnknownOffsetList) != 0) :
# The offset start from 0
NowOffset = 0
for Pcd in self.PcdUnknownOffsetList :
if NowOffset % Pcd.Alignment != 0:
NowOffset = (NowOffset// Pcd.Alignment + 1) * Pcd.Alignment
Pcd.PcdBinOffset = NowOffset
Pcd.PcdOffset = str(hex(Pcd.PcdBinOffset))
NowOffset += Pcd.PcdOccupySize
self.PcdFixedOffsetSizeList = self.PcdUnknownOffsetList
return
# Check the offset of VPD type pcd's offset start from 0.
if self.PcdFixedOffsetSizeList[0].PcdBinOffset != 0 :
EdkLogger.warn("BPDG", "The offset of VPD type pcd should start with 0, please check it.",
None)
# Judge whether the offset in fixed pcd offset list is overlapped or not.
lenOfList = len(self.PcdFixedOffsetSizeList)
count = 0
while (count < lenOfList - 1) :
PcdNow = self.PcdFixedOffsetSizeList[count]
PcdNext = self.PcdFixedOffsetSizeList[count+1]
# Two pcd's offset is same
if PcdNow.PcdBinOffset == PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is same with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Overlapped
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize > PcdNext.PcdBinOffset :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offset of %s at line: %s is overlapped with %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
# Has free space, raise a warning message
if PcdNow.PcdBinOffset + PcdNow.PcdOccupySize < PcdNext.PcdBinOffset :
EdkLogger.warn("BPDG", BuildToolError.ATTRIBUTE_GET_FAILURE,
"The offsets have free space of between %s at line: %s and %s at line: %s in file %s" % \
(PcdNow.PcdCName, PcdNow.Lineno, PcdNext.PcdCName, PcdNext.Lineno, PcdNext.FileName),
None)
count += 1
LastOffset = self.PcdFixedOffsetSizeList[0].PcdBinOffset
FixOffsetSizeListCount = 0
lenOfList = len(self.PcdFixedOffsetSizeList)
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
##
# Insert the un-fixed offset pcd's list into fixed offset pcd's list if has free space between those pcds.
#
while (FixOffsetSizeListCount < lenOfList) :
eachFixedPcd = self.PcdFixedOffsetSizeList[FixOffsetSizeListCount]
NowOffset = eachFixedPcd.PcdBinOffset
# Has free space
if LastOffset < NowOffset :
if lenOfUnfixedList != 0 :
countOfUnfixedList = 0
while(countOfUnfixedList < lenOfUnfixedList) :
eachUnfixedPcd = self.PcdUnknownOffsetList[countOfUnfixedList]
needFixPcdSize = eachUnfixedPcd.PcdOccupySize
# Not been fixed
if eachUnfixedPcd.PcdOffset == TAB_STAR :
if LastOffset % eachUnfixedPcd.Alignment != 0:
LastOffset = (LastOffset // eachUnfixedPcd.Alignment + 1) * eachUnfixedPcd.Alignment
# The offset un-fixed pcd can write into this free space
if needFixPcdSize <= (NowOffset - LastOffset) :
# Change the offset value of un-fixed pcd
eachUnfixedPcd.PcdOffset = str(hex(LastOffset))
eachUnfixedPcd.PcdBinOffset = LastOffset
# Insert this pcd into fixed offset pcd list.
self.PcdFixedOffsetSizeList.insert(FixOffsetSizeListCount, eachUnfixedPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(countOfUnfixedList)
# After item added, should enlarge the length of fixed pcd offset list
lenOfList += 1
FixOffsetSizeListCount += 1
# Decrease the un-fixed pcd offset list's length
lenOfUnfixedList -= 1
# Modify the last offset value
LastOffset += needFixPcdSize
else :
# It can not insert into those two pcds, need to check still has other space can store it.
LastOffset = NowOffset + self.PcdFixedOffsetSizeList[FixOffsetSizeListCount].PcdOccupySize
FixOffsetSizeListCount += 1
break
# Set the FixOffsetSizeListCount = lenOfList for quit the loop
else :
FixOffsetSizeListCount = lenOfList
# No free space, smoothly connect with previous pcd.
elif LastOffset == NowOffset :
LastOffset = NowOffset + eachFixedPcd.PcdOccupySize
FixOffsetSizeListCount += 1
# Usually it will not enter into this thunk, if so, means it overlapped.
else :
EdkLogger.error("BPDG", BuildToolError.ATTRIBUTE_NOT_AVAILABLE,
"The offset value definition has overlapped at pcd: %s, its offset is: %s, in file: %s line: %s" % \
(eachFixedPcd.PcdCName, eachFixedPcd.PcdOffset, eachFixedPcd.InputFileName, eachFixedPcd.Lineno),
None)
FixOffsetSizeListCount += 1
# Continue to process the un-fixed offset pcd's list, add this time, just append them behind the fixed pcd's offset list.
lenOfUnfixedList = len(self.PcdUnknownOffsetList)
lenOfList = len(self.PcdFixedOffsetSizeList)
while (lenOfUnfixedList > 0) :
# Still has items need to process
# The last pcd instance
LastPcd = self.PcdFixedOffsetSizeList[lenOfList-1]
NeedFixPcd = self.PcdUnknownOffsetList[0]
NeedFixPcd.PcdBinOffset = LastPcd.PcdBinOffset + LastPcd.PcdOccupySize
if NeedFixPcd.PcdBinOffset % NeedFixPcd.Alignment != 0:
NeedFixPcd.PcdBinOffset = (NeedFixPcd.PcdBinOffset // NeedFixPcd.Alignment + 1) * NeedFixPcd.Alignment
NeedFixPcd.PcdOffset = str(hex(NeedFixPcd.PcdBinOffset))
# Insert this pcd into fixed offset pcd list's tail.
self.PcdFixedOffsetSizeList.insert(lenOfList, NeedFixPcd)
# Delete the item's offset that has been fixed and added into fixed offset list
self.PcdUnknownOffsetList.pop(0)
lenOfList += 1
lenOfUnfixedList -= 1
##
# Write the final data into output files.
#
def GenerateVpdFile (self, MapFileName, BinFileName):
#Open an VPD file to process
try:
fVpdFile = open(BinFileName, "wb")
except:
# Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.VpdFileName, None)
try :
fMapFile = open(MapFileName, "w")
except:
# Open failed
EdkLogger.error("BPDG", BuildToolError.FILE_OPEN_FAILURE, "File open failed for %s" % self.MapFileName, None)
# Use a instance of BytesIO to cache data
fStringIO = BytesIO()
# Write the header of map file.
try :
fMapFile.write (st.MAP_FILE_COMMENT_TEMPLATE + "\n")
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
for eachPcd in self.PcdFixedOffsetSizeList :
# write map file
try :
fMapFile.write("%s | %s | %s | %s | %s \n" % (eachPcd.PcdCName, eachPcd.SkuId, eachPcd.PcdOffset, eachPcd.PcdSize, eachPcd.PcdUnpackValue))
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.MapFileName, None)
# Write Vpd binary file
fStringIO.seek (eachPcd.PcdBinOffset)
if isinstance(eachPcd.PcdValue, list):
for i in range(len(eachPcd.PcdValue)):
Value = eachPcd.PcdValue[i:i + 1]
if isinstance(bytes(Value), str):
fStringIO.write(chr(Value[0]))
else:
fStringIO.write(bytes(Value))
else:
fStringIO.write (eachPcd.PcdValue)
try :
fVpdFile.write (fStringIO.getvalue())
except:
EdkLogger.error("BPDG", BuildToolError.FILE_WRITE_FAILURE, "Write data to file %s failed, please check whether the file been locked or using by other applications." % self.VpdFileName, None)
fStringIO.close ()
fVpdFile.close ()
fMapFile.close ()
| 46.862319 | 206 | 0.55658 |
4a1e174967505a67d5f72cc5b322936c0bf73d68 | 5,280 | py | Python | audiobook.py | wyb330/Tacotron-2 | 54824d091f50d730fbbd30d2b8f048e9874b6800 | [
"MIT"
] | null | null | null | audiobook.py | wyb330/Tacotron-2 | 54824d091f50d730fbbd30d2b8f048e9874b6800 | [
"MIT"
] | null | null | null | audiobook.py | wyb330/Tacotron-2 | 54824d091f50d730fbbd30d2b8f048e9874b6800 | [
"MIT"
] | null | null | null | import argparse
from hparams import hparams, hparams_debug_string
import tensorflow as tf
from infolog import log
from multi_speaker.synthesizer import Synthesizer
import os
import nltk
from konlpy.tag import Kkma
from tqdm import tqdm
import numpy as np
from datasets import audio
def generate_fast(model, text, speaker_id, play=True):
mels = model.run(text, speaker_id, play)
return mels
def open_file(filename):
try:
f = open(filename, encoding='utf8')
except UnicodeDecodeError:
f = open(filename)
return f
def change_file_ext(file, new_ext):
parts = file.split('.')
parts[-1] = new_ext[1:]
return '.'.join(parts)
def read(args, hparams, checkpoint_path):
log(hparams_debug_string())
if not os.path.exists(args.book):
raise ValueError('{}: {}'.format('No such file or directory', args.book))
speaker_id = args.speaker_id
synth = Synthesizer()
synth.load(checkpoint_path, hparams)
with open_file(args.book) as f:
text = f.read()
if args.lang == 'kr':
kkma = Kkma()
sents = kkma.sentences(text)
else:
sents = nltk.sent_tokenize(text)
for i, line in enumerate(sents):
try:
text = line.strip()
if text:
log('{}/{} {}'.format(i + 1, len(sents), text))
generate_fast(synth, text, speaker_id)
except Exception as e:
log(e)
break
def publish(args, hparams, checkpoint_path):
log(hparams_debug_string())
if not os.path.exists(args.book):
raise ValueError('{}: {}'.format('No such file or directory', args.book))
speaker_id = args.speaker_id
synth = Synthesizer()
synth.load(checkpoint_path, hparams)
with open_file(args.book) as f:
text = f.read()
if args.lang == 'kr':
kkma = Kkma()
sents = kkma.sentences(text)
else:
sents = nltk.sent_tokenize(text)
full_mels = None
silence = np.full((100, hparams.num_mels), hparams.min_level_db, np.float32)
for i, line in enumerate(tqdm(sents)):
text = line.strip()
if text:
mels = generate_fast(synth, text, speaker_id, play=False)
if i > 0:
full_mels = np.concatenate((full_mels, silence), axis=0) # padding silence between sents
full_mels = np.concatenate((full_mels, mels), axis=0)
else:
full_mels = mels
save_path = change_file_ext(args.book, '.wav')
log('saving to wav file...')
wav = audio.inv_mel_spectrogram(full_mels.T, hparams)
audio.save_wav(wav, save_path, sr=hparams.sample_rate)
def prepare_run(args):
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
run_name = args.model
taco_checkpoint = os.path.join('logs-' + run_name, 'taco_' + args.checkpoint)
return taco_checkpoint
def main():
accepted_modes = ['read', 'publish']
parser = argparse.ArgumentParser()
parser.add_argument('--base_dir', default='D:/voice/MultiSpeaker')
parser.add_argument('--checkpoint', default='pretrained/', help='Path to model checkpoint')
parser.add_argument('--model', default='MultiSpeaker')
parser.add_argument('--mode', default='publish', help='mode of run: can be one of {}'.format(accepted_modes))
parser.add_argument('--book', default='D:/voice/MultiSpeaker/eval.txt', required=True,
help='Text file contains list of texts to be synthesized')
parser.add_argument('--speaker_id', default=2, type=int)
parser.add_argument('--lang', default='kr')
args = parser.parse_args()
accepted_models = ['Tacotron', 'WaveNet', 'Both', 'Tacotron-2', 'MultiSpeaker']
if args.model not in accepted_models:
raise ValueError('please enter a valid model to synthesize with: {}'.format(accepted_models))
if args.mode not in accepted_modes:
raise ValueError('please enter a valid mode to synthesize with: {}'.format(accepted_modes))
checkpoint = prepare_run(args)
try:
checkpoint_path = tf.train.get_checkpoint_state(checkpoint).model_checkpoint_path
log('loaded model at {}'.format(checkpoint_path))
except AttributeError:
# Swap logs dir name in case user used Tacotron-2 for train and Both for test (and vice versa)
if 'Both' in checkpoint:
checkpoint = checkpoint.replace('Both', 'Tacotron-2')
elif 'Tacotron-2' in checkpoint:
checkpoint = checkpoint.replace('Tacotron-2', 'Both')
else:
raise AssertionError('Cannot restore checkpoint: {}, did you train a model?'.format(checkpoint))
try:
# Try loading again
checkpoint_path = tf.train.get_checkpoint_state(checkpoint).model_checkpoint_path
log('loaded model at {}'.format(checkpoint_path))
except:
raise RuntimeError('Failed to load checkpoint at {}'.format(checkpoint))
if args.mode == 'read':
read(args, hparams, checkpoint_path)
elif args.mode == 'publish':
publish(args, hparams, checkpoint_path)
if __name__ == '__main__':
main()
| 34.285714 | 113 | 0.628788 |
4a1e1b1b8004b4842af2509738644ed5a6e818cb | 246 | py | Python | Mundo 1/ex011.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | Mundo 1/ex011.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | Mundo 1/ex011.py | adonaifariasdev/cursoemvideo-python3 | 1fd35e45b24c52013fa3bc98e723971db8e6b7d1 | [
"MIT"
] | null | null | null | alt = float(input('Qual a altura da parede? '))
lar = float(input('Qual a largura daparede? '))
area = alt * lar
qtde = area / 2
print('A sua área é {:.2f}m2. \nA quantidade de tinta para pintar sua parede é {:.2f} unidades.'.format(area, qtde))
| 41 | 116 | 0.670732 |
4a1e1cd39412672f2d604c5d4a4eafe5effc09de | 11,271 | py | Python | src/lib/zothero/zotero.py | ai0/zothero | 5b057ef080ee730d82d5dd15e064d2a4730c2b11 | [
"MIT"
] | 372 | 2017-12-20T22:56:25.000Z | 2022-03-30T22:43:52.000Z | src/lib/zothero/zotero.py | dishantpandya777/zothero | 5b057ef080ee730d82d5dd15e064d2a4730c2b11 | [
"MIT"
] | 65 | 2017-12-20T23:03:42.000Z | 2022-03-30T13:07:18.000Z | src/lib/zothero/zotero.py | dishantpandya777/zothero | 5b057ef080ee730d82d5dd15e064d2a4730c2b11 | [
"MIT"
] | 26 | 2018-05-23T07:59:52.000Z | 2022-03-21T04:37:16.000Z | # encoding: utf-8
#
# Copyright (c) 2017 Dean Jackson <[email protected]>
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2017-12-15
#
"""Interface to the Zotero database.
All data classes are based on ``AttrDict``, which means their data
can be accesses either as ``dict`` values or as attributes, i.e.
``Entry.title`` and ``Entry['title']`` are equivalent.
The `Zotero` class is a fairly-thin wrapper around the SQLite database
stored by Zotero. It abstracts away the implementation details of the
Zotero datastore.
"""
from __future__ import print_function, absolute_import
import logging
import os
import sqlite3
from .models import (
Entry,
Attachment,
Collection,
Creator,
)
from .util import (
dt2sqlite,
parse_date,
shortpath,
strip_tags,
sqlite2dt,
time_since,
)
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
# Retrieve all items, excluding notes and attachments
ITEMS_SQL = u"""
SELECT items.itemID AS id,
items.dateModified AS modified,
items.key AS key,
items.libraryID AS library,
itemTypes.typeName AS type
FROM items
LEFT JOIN itemTypes
ON items.itemTypeID = itemTypes.itemTypeID
LEFT JOIN deletedItems
ON items.itemID = deletedItems.itemID
-- Ignore notes and attachments
WHERE items.itemTypeID not IN (1, 14)
AND deletedItems.dateDeleted IS NULL
"""
# Retrieve creators for a given item
CREATORS_SQL = u"""
SELECT creators.firstName AS given,
creators.lastName AS family,
itemCreators.orderIndex AS `index`,
creatorTypes.creatorType AS `type`
FROM creators
LEFT JOIN itemCreators
ON creators.creatorID = itemCreators.creatorID
LEFT JOIN creatorTypes
ON itemCreators.creatorTypeID = creatorTypes.creatorTypeID
WHERE itemCreators.itemID = ?
ORDER BY `index` ASC
"""
# Retrieve collections for a given item
COLLECTIONS_SQL = u"""
SELECT collections.collectionName AS name,
collections.key AS key
FROM collections
LEFT JOIN collectionItems
ON collections.collectionID = collectionItems.collectionID
WHERE collectionItems.itemID = ?
"""
# Retrieve attachments for a given item
ATTACHMENTS_SQL = u"""
SELECT
items.key AS key,
itemAttachments.path AS path,
(SELECT itemDataValues.value
FROM itemData
LEFT JOIN fields
ON itemData.fieldID = fields.fieldID
LEFT JOIN itemDataValues
ON itemData.valueID = itemDataValues.valueID
WHERE itemData.itemID = items.itemID AND fields.fieldName = 'title')
title,
(SELECT itemDataValues.value
FROM itemData
LEFT JOIN fields
ON itemData.fieldID = fields.fieldID
LEFT JOIN itemDataValues
ON itemData.valueID = itemDataValues.valueID
WHERE itemData.itemID = items.itemID AND fields.fieldName = 'url')
url
FROM itemAttachments
LEFT JOIN items
ON itemAttachments.itemID = items.itemID
WHERE itemAttachments.parentItemID = ?
"""
# Retrieve IDs of items whose attachments have been modified
MODIFIED_ATTACHMENTS_SQL = u"""
SELECT (SELECT items.key
FROM items
WHERE items.itemID = itemAttachments.parentItemID)
key
FROM itemAttachments
LEFT JOIN items
ON itemAttachments.itemID = items.itemID
WHERE itemAttachments.parentItemID IS NOT NULL
AND items.dateModified > ?
GROUP BY itemAttachments.parentItemID
"""
# Retrieve all data for given item
METADATA_SQL = u"""
SELECT fields.fieldName AS name,
itemDataValues.value AS value
FROM itemData
LEFT JOIN fields
ON itemData.fieldID = fields.fieldID
LEFT JOIN itemDataValues
ON itemData.valueID = itemDataValues.valueID
WHERE itemData.itemID = ?
"""
# Retrieve notes for given item
NOTES_SQL = u"""
SELECT itemNotes.note AS note
FROM itemNotes
LEFT JOIN items
ON itemNotes.itemID = items.itemID
WHERE itemNotes.parentItemID = ?
"""
# Retrieve tags for given item
TAGS_SQL = u"""
SELECT tags.name AS name
FROM tags
LEFT JOIN itemTags
ON tags.tagID = itemTags.tagID
WHERE itemTags.itemID = ?
"""
class Zotero(object):
"""Interface to the Zotero database."""
def __init__(self, datadir, dbpath=None, attachments_base_dir=None):
"""Load Zotero data from ``datadir``.
Args:
datadir (str): Path to Zotero's data directory.
dbpath (str, optional): Path to `zotero.sqlite` if not in
``datadir``.
"""
self.datadir = datadir
self._attachments_dir = attachments_base_dir
self.dbpath = dbpath or os.path.join(datadir, 'zotero.sqlite')
self._conn = None
self._bbt = None # BetterBibTex
@property
def conn(self):
"""Return connection to the database."""
if not self._conn:
self._conn = sqlite3.connect(self.dbpath)
self._conn.row_factory = sqlite3.Row
log.debug('[zotero] opened database %r', shortpath(self.dbpath))
return self._conn
@property
def bbt(self):
"""Return BetterBibTex."""
if not self._bbt:
from .betterbibtex import BetterBibTex
self._bbt = BetterBibTex(self.datadir)
if self._bbt.exists:
log.debug('[zotero] loaded BetterBibTex data')
return self._bbt
@property
def last_updated(self):
"""Return modified time of database file."""
t = os.path.getmtime(self.dbpath)
log.debug('[zotero] database last modified %s', time_since(t))
return t
@property
def storage_dir(self):
"""Path to Zotero's internal directory for attachments."""
return os.path.join(self.datadir, 'storage')
@property
def attachments_dir(self):
"""Path to Zotero's external attachment base directory."""
if not self._attachments_dir:
raise ValueError('attachments directory is unset')
if not os.path.exists(self._attachments_dir):
raise ValueError('attachments directory does not exist: %r' %
self._attachments_dir)
return self._attachments_dir
@property
def styles_dir(self):
"""Path to Zotero's directory for styles."""
path = os.path.join(self.datadir, 'styles')
if not os.path.exists(path):
raise ValueError('styles directory does not exist: %r' % path)
return path
def keys(self):
"""Iterate entry keys."""
for row in self.conn.execute(ITEMS_SQL):
yield row['key']
def ids(self):
"""Iterate entry IDs."""
for row in self.conn.execute(ITEMS_SQL):
yield row['id']
def entry(self, key):
"""Return Entry for key."""
sql = ITEMS_SQL + 'AND key = ?'
row = self.conn.execute(sql, (key,)).fetchone()
if not row:
return None
return self._load_entry(row)
def modified_since(self, dt):
"""Iterate Entries modified since datetime."""
sql = ITEMS_SQL + 'AND modified > ?'
ts = dt2sqlite(dt)
for row in self.conn.execute(sql, (ts,)):
yield self._load_entry(row)
# Items whose attachments have changed
sql = MODIFIED_ATTACHMENTS_SQL
for row in self.conn.execute(sql, (ts,)):
log.debug('[zotero] attachment(s) modified')
yield self.entry(row['key'])
def all_entries(self):
"""Return all database entries."""
for row in self.conn.execute(ITEMS_SQL):
yield self._load_entry(row)
def _load_entry(self, row):
"""Create an `Entry` from a SQLite database row."""
e = Entry(**row)
# Defaults & empty attributes
for k in ('collections', 'creators', 'attachments',
'notes', 'tags'):
e[k] = []
# Metadata
e.title = u''
e.date = None
e.year = 0
e.abstract = u''
e.zdata = {}
# Parseable attributes
e.modified = sqlite2dt(e.modified)
# --------------------------------------------------
# Other data
for row in self.conn.execute(METADATA_SQL, (e.id,)):
k, v = row['name'], row['value']
# everything goes in the `zdata` dict
e.zdata[k] = v
if k == 'title':
log.debug(u'[zotero] + "%s"', v)
e.title = v
# Legal cases
if k == 'caseName':
log.debug(u'[zotero] + "%s"', v)
e.title = v
elif k == 'date':
e.date = parse_date(v)
e.year = int(v[:4])
elif k == 'abstractNote':
e.abstract = v
# --------------------------------------------------
# Data from other tables
e.attachments = self._entry_attachments(e.id)
e.collections = self._entry_collections(e.id)
e.creators = self._entry_creators(e.id)
e.notes = self._entry_notes(e.id)
e.tags = self._entry_tags(e.id)
# Better Bibtex citekey
e.citekey = self.bbt.citekey('{}_{}'.format(e.library, e.key))
return e
def _entry_attachments(self, entry_id):
"""Fetch attachments for an entry."""
attachments = []
for row in self.conn.execute(ATTACHMENTS_SQL, (entry_id,)):
key, path, title, url = (row['key'], row['path'],
row['title'], row['url'])
# Attachment may be in Zotero's storage somewhere, so
# fix path to point to the right place.
if path and not os.path.exists(path):
if path.startswith('storage:'):
path = path[8:]
path = os.path.join(self.storage_dir, key, path)
elif path.startswith('attachments:'):
path = path[12:]
try:
path = os.path.join(self.attachments_dir, path)
except ValueError as err:
log.warning(u"[zotero] can't access attachment "
'"%s": %s', path, err)
continue
a = Attachment(key=key, name=title, path=path, url=url)
log.debug('[zotero] attachment=%r', a)
attachments.append(a)
return attachments
def _entry_collections(self, entry_id):
"""Fetch collections for an entry."""
rows = self.conn.execute(COLLECTIONS_SQL, (entry_id,))
return [Collection(**row) for row in rows]
def _entry_creators(self, entry_id):
"""Fetch creators for an entry."""
rows = self.conn.execute(CREATORS_SQL, (entry_id,))
return [Creator(**row) for row in rows]
def _entry_notes(self, entry_id):
"""Fetch notes for an entry."""
rows = self.conn.execute(NOTES_SQL, (entry_id,))
return [strip_tags(row['note']) for row in rows]
def _entry_tags(self, entry_id):
"""Fetch tags for an entry."""
rows = self.conn.execute(TAGS_SQL, (entry_id,))
return [row['name'] for row in rows]
| 30.298387 | 76 | 0.600657 |
4a1e1d36ac36a792f3a75b9afcb7823762559961 | 264 | py | Python | tests/clienttest.py | netrack/bayes | 15e0c54b795f4ce527cc5e2c46bbb7da434ac036 | [
"Apache-2.0"
] | 12 | 2019-07-15T11:15:23.000Z | 2019-12-05T12:19:48.000Z | tests/clienttest.py | netrack/bayes | 15e0c54b795f4ce527cc5e2c46bbb7da434ac036 | [
"Apache-2.0"
] | 10 | 2019-06-25T17:42:44.000Z | 2019-07-09T13:28:12.000Z | tests/clienttest.py | netrack/tensorcraft | 15e0c54b795f4ce527cc5e2c46bbb7da434ac036 | [
"Apache-2.0"
] | 1 | 2019-05-23T13:22:19.000Z | 2019-05-23T13:22:19.000Z | import unittest.mock
from tensorcraft.client import Model
from tests import asynctest
def unittest_mock_model_client(method: str):
return unittest.mock.patch.object(Model, method,
new_callable=asynctest.AsyncMagicMock)
| 26.4 | 76 | 0.708333 |
4a1e1e2f2efccb4e47c47b7d154486fedf75fef1 | 3,110 | py | Python | test/FileShare/tests/TestAll.py | mkozuharov/DataStage | a61e67c901c2b43afa0fec6a99ef72152391a55f | [
"MIT"
] | 8 | 2015-02-25T10:58:52.000Z | 2020-04-17T03:11:48.000Z | test/FileShare/tests/TestAll.py | mkozuharov/DataStage | a61e67c901c2b43afa0fec6a99ef72152391a55f | [
"MIT"
] | null | null | null | test/FileShare/tests/TestAll.py | mkozuharov/DataStage | a61e67c901c2b43afa0fec6a99ef72152391a55f | [
"MIT"
] | 7 | 2015-02-25T10:58:54.000Z | 2019-11-04T16:19:22.000Z | # ---------------------------------------------------------------------
#
# Copyright (c) 2012 University of Oxford
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# ---------------------------------------------------------------------
# $Id: TestAll.py 1047 2009-01-15 14:48:58Z graham $
#
# Unit testing for WebBrick library functions (Functions.py)
# See http://pyunit.sourceforge.net/pyunit.html
#
import sys, unittest, logging
# Add main library directory to python path
sys.path.append("../..")
import TestFilePrivateArea
import TestFileSharedArea
import TestFileCollabArea
import TestDeletedUserCheckFileAccess
import TestFileDefaultArea
import TestWebDAVAccess
import TestWebDAVbyHTTP
# Code to run unit tests from all library test modules
def getTestSuite(select="all"):
suite = unittest.TestSuite()
suite.addTest(TestFilePrivateArea.getTestSuite(select=select))
suite.addTest(TestFileSharedArea.getTestSuite(select=select))
suite.addTest(TestFileCollabArea.getTestSuite(select=select))
suite.addTest(TestDeletedUserCheckFileAccess.getTestSuite(select=select))
suite.addTest(TestFileDefaultArea.getTestSuite(select=select))
suite.addTest(TestWebDAVAccess.getTestSuite(select=select))
suite.addTest(TestWebDAVbyHTTP.getTestSuite(select=select))
return suite
from MiscLib import TestUtils
import junitxml
if __name__ == "__main__":
print "============================================================"
print "This test suite needs to run under a Linux operating system"
print "Edit TestConfig.py to specify hostname and other parameters"
print "Create test accounts on target system to match TestConfig.py"
print "============================================================"
if len(sys.argv) >= 2 and sys.argv[1] == "xml":
with open('xmlresults.xml', 'w') as report:
result = junitxml.JUnitXmlResult(report)
result.startTestRun()
getTestSuite().run(result)
result.stopTestRun()
else:
TestUtils.runTests("TestAll", getTestSuite, sys.argv)
# End.
| 40.38961 | 77 | 0.690997 |
4a1e1e7cc611f532060cf4df276e65181147ab42 | 134 | py | Python | configs/upernet/upernet_r101_512x1024_80k_cityscapes.py | Xlinford/mmsegmentation | 8b444de5e6db2af2538a73a93ac75204f5c3bb2f | [
"Apache-2.0"
] | null | null | null | configs/upernet/upernet_r101_512x1024_80k_cityscapes.py | Xlinford/mmsegmentation | 8b444de5e6db2af2538a73a93ac75204f5c3bb2f | [
"Apache-2.0"
] | null | null | null | configs/upernet/upernet_r101_512x1024_80k_cityscapes.py | Xlinford/mmsegmentation | 8b444de5e6db2af2538a73a93ac75204f5c3bb2f | [
"Apache-2.0"
] | null | null | null | _base_ = './upernet_r50_512x1024_80k_cityscapes.py'
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
| 44.666667 | 80 | 0.776119 |
4a1e1f7b851f1c0f06d6f07977f1f8d9f9882075 | 1,804 | py | Python | test_station.py | AldricGoh/year1_floodwaringsystem | d56ede73f64a351ef349f8e9d53d9db6e35a6e6f | [
"MIT"
] | null | null | null | test_station.py | AldricGoh/year1_floodwaringsystem | d56ede73f64a351ef349f8e9d53d9db6e35a6e6f | [
"MIT"
] | null | null | null | test_station.py | AldricGoh/year1_floodwaringsystem | d56ede73f64a351ef349f8e9d53d9db6e35a6e6f | [
"MIT"
] | null | null | null | # Copyright (C) 2018 Garth N. Wells
#
# SPDX-License-Identifier: MIT
"""Unit test for the station module"""
from floodsystem.station import MonitoringStation
from floodsystem.station import inconsistent_typical_range_stations
def test_create_monitoring_station():
# Create a station
s_id = "test-s-id"
m_id = "test-m-id"
label = "some station"
coord = (-2.0, 4.0)
trange = (-2.3, 3.4445)
river = "River X"
town = "My Town"
s = MonitoringStation(s_id, m_id, label, coord, trange, river, town)
assert s.station_id == s_id
assert s.measure_id == m_id
assert s.name == label
assert s.coord == coord
assert s.typical_range == trange
assert s.river == river
assert s.town == town
def test_consistency():
# Create stations with incorrect & correct data
s_id = "test-s-id"
m_id = "test-m-id"
label_1 = "Upper < lower limit"
label_2 = "No range"
label_3 = "Correct!"
coord = (-2.0, 4.0)
trange_1 = (5, 4)
trange_2 = None
trange_3 = (2, 3)
river = "River X"
town = "My Town"
s_false1 = MonitoringStation(s_id, m_id, label_1, coord, trange_1, river, town)
s_false2 = MonitoringStation(s_id, m_id, label_2, coord, trange_2, river, town)
s_true = MonitoringStation(s_id, m_id, label_3, coord, trange_3, river, town)
assert MonitoringStation.typical_range_consistent(s_false1) == False
assert MonitoringStation.typical_range_consistent(s_false2) == False
assert MonitoringStation.typical_range_consistent(s_true) == True
z = [s_false1, s_false2, s_true]
assert type(inconsistent_typical_range_stations(z)) == list
assert len(inconsistent_typical_range_stations(z)) == 2
assert inconsistent_typical_range_stations(z) == ["No range", "Upper < lower limit"] | 33.407407 | 88 | 0.682373 |
4a1e20bb4a39bdc6ef98f2373e2351e1581a59c4 | 24,201 | py | Python | test/test_numpy.py | necaris/orjson | 02d6805af278493f657c17305a2f0cc054f04078 | [
"Apache-2.0",
"MIT"
] | 3,041 | 2018-11-26T08:15:46.000Z | 2022-03-31T22:14:51.000Z | test/test_numpy.py | brandery/orjson | 02d6805af278493f657c17305a2f0cc054f04078 | [
"Apache-2.0",
"MIT"
] | 240 | 2018-11-25T20:01:02.000Z | 2022-03-31T19:48:08.000Z | test/test_numpy.py | brandery/orjson | 02d6805af278493f657c17305a2f0cc054f04078 | [
"Apache-2.0",
"MIT"
] | 145 | 2018-12-20T08:54:32.000Z | 2022-03-30T06:17:47.000Z | # SPDX-License-Identifier: (Apache-2.0 OR MIT)
import unittest
import pytest
import orjson
try:
import numpy
except ImportError:
numpy = None
def numpy_default(obj):
return obj.tolist()
@pytest.mark.skipif(numpy is None, reason="numpy is not installed")
class NumpyTests(unittest.TestCase):
def test_numpy_array_d1_uintp(self):
self.assertEqual(
orjson.dumps(
numpy.array([0, 18446744073709551615], numpy.uintp),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[0,18446744073709551615]",
)
def test_numpy_array_d1_intp(self):
self.assertEqual(
orjson.dumps(
numpy.array([-9223372036854775807, 9223372036854775807], numpy.intp),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[-9223372036854775807,9223372036854775807]",
)
def test_numpy_array_d1_i64(self):
self.assertEqual(
orjson.dumps(
numpy.array([-9223372036854775807, 9223372036854775807], numpy.int64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[-9223372036854775807,9223372036854775807]",
)
def test_numpy_array_d1_u64(self):
self.assertEqual(
orjson.dumps(
numpy.array([0, 18446744073709551615], numpy.uint64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[0,18446744073709551615]",
)
def test_numpy_array_d1_i8(self):
self.assertEqual(
orjson.dumps(
numpy.array([-128, 127], numpy.int8),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[-128,127]",
)
def test_numpy_array_d1_u8(self):
self.assertEqual(
orjson.dumps(
numpy.array([0, 255], numpy.uint8),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[0,255]",
)
def test_numpy_array_d1_i32(self):
self.assertEqual(
orjson.dumps(
numpy.array([-2147483647, 2147483647], numpy.int32),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[-2147483647,2147483647]",
)
def test_numpy_array_d1_u32(self):
self.assertEqual(
orjson.dumps(
numpy.array([0, 4294967295], numpy.uint32),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[0,4294967295]",
)
def test_numpy_array_d1_f32(self):
self.assertEqual(
orjson.dumps(
numpy.array([1.0, 3.4028235e38], numpy.float32),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[1.0,3.4028235e38]",
)
def test_numpy_array_d1_f64(self):
self.assertEqual(
orjson.dumps(
numpy.array([1.0, 1.7976931348623157e308], numpy.float64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[1.0,1.7976931348623157e308]",
)
def test_numpy_array_d1_bool(self):
self.assertEqual(
orjson.dumps(
numpy.array([True, False, False, True]),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[true,false,false,true]",
)
def test_numpy_array_d1_datetime64_years(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("1"),
numpy.datetime64("970"),
numpy.datetime64("1920"),
numpy.datetime64("1971"),
numpy.datetime64("2021"),
numpy.datetime64("2022"),
numpy.datetime64("2023"),
numpy.datetime64("9999"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["0001-01-01T00:00:00","0970-01-01T00:00:00","1920-01-01T00:00:00","1971-01-01T00:00:00","2021-01-01T00:00:00","2022-01-01T00:00:00","2023-01-01T00:00:00","9999-01-01T00:00:00"]',
)
def test_numpy_array_d1_datetime64_months(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01"),
numpy.datetime64("2022-01"),
numpy.datetime64("2023-01"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2022-01-01T00:00:00","2023-01-01T00:00:00"]',
)
def test_numpy_array_d1_datetime64_days(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01"),
numpy.datetime64("2021-01-01"),
numpy.datetime64("2021-01-01"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:00:00","2021-01-01T00:00:00"]',
)
def test_numpy_array_d1_datetime64_hours(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00"),
numpy.datetime64("2021-01-01T01"),
numpy.datetime64("2021-01-01T02"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T01:00:00","2021-01-01T02:00:00"]',
)
def test_numpy_array_d1_datetime64_minutes(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00"),
numpy.datetime64("2021-01-01T00:01"),
numpy.datetime64("2021-01-01T00:02"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:01:00","2021-01-01T00:02:00"]',
)
def test_numpy_array_d1_datetime64_seconds(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00:00"),
numpy.datetime64("2021-01-01T00:00:01"),
numpy.datetime64("2021-01-01T00:00:02"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:00:01","2021-01-01T00:00:02"]',
)
def test_numpy_array_d1_datetime64_milliseconds(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00:00"),
numpy.datetime64("2021-01-01T00:00:00.172"),
numpy.datetime64("2021-01-01T00:00:00.567"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:00:00.172000","2021-01-01T00:00:00.567000"]',
)
def test_numpy_array_d1_datetime64_microseconds(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00:00"),
numpy.datetime64("2021-01-01T00:00:00.172"),
numpy.datetime64("2021-01-01T00:00:00.567891"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:00:00.172000","2021-01-01T00:00:00.567891"]',
)
def test_numpy_array_d1_datetime64_nanoseconds(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00:00"),
numpy.datetime64("2021-01-01T00:00:00.172"),
numpy.datetime64("2021-01-01T00:00:00.567891234"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'["2021-01-01T00:00:00","2021-01-01T00:00:00.172000","2021-01-01T00:00:00.567891"]',
)
def test_numpy_array_d1_datetime64_picoseconds(self):
try:
orjson.dumps(
numpy.array(
[
numpy.datetime64("2021-01-01T00:00:00"),
numpy.datetime64("2021-01-01T00:00:00.172"),
numpy.datetime64("2021-01-01T00:00:00.567891234567"),
]
),
option=orjson.OPT_SERIALIZE_NUMPY,
)
assert False
except TypeError as exc:
self.assertEqual(
str(exc),
"unsupported numpy.datetime64 unit: picoseconds",
)
def test_numpy_array_d2_i64(self):
self.assertEqual(
orjson.dumps(
numpy.array([[1, 2, 3], [4, 5, 6]], numpy.int64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[1,2,3],[4,5,6]]",
)
def test_numpy_array_d2_f64(self):
self.assertEqual(
orjson.dumps(
numpy.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]], numpy.float64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[1.0,2.0,3.0],[4.0,5.0,6.0]]",
)
def test_numpy_array_d3_i8(self):
self.assertEqual(
orjson.dumps(
numpy.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], numpy.int8),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1,2],[3,4]],[[5,6],[7,8]]]",
)
def test_numpy_array_d3_u8(self):
self.assertEqual(
orjson.dumps(
numpy.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], numpy.uint8),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1,2],[3,4]],[[5,6],[7,8]]]",
)
def test_numpy_array_d3_i32(self):
self.assertEqual(
orjson.dumps(
numpy.array([[[1, 2], [3, 4]], [[5, 6], [7, 8]]], numpy.int32),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1,2],[3,4]],[[5,6],[7,8]]]",
)
def test_numpy_array_d3_i64(self):
self.assertEqual(
orjson.dumps(
numpy.array([[[1, 2], [3, 4], [5, 6], [7, 8]]], numpy.int64),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1,2],[3,4],[5,6],[7,8]]]",
)
def test_numpy_array_d3_f64(self):
self.assertEqual(
orjson.dumps(
numpy.array(
[[[1.0, 2.0], [3.0, 4.0]], [[5.0, 6.0], [7.0, 8.0]]], numpy.float64
),
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1.0,2.0],[3.0,4.0]],[[5.0,6.0],[7.0,8.0]]]",
)
def test_numpy_array_fortran(self):
array = numpy.array([[1, 2], [3, 4]], order="F")
assert array.flags["F_CONTIGUOUS"] == True
with self.assertRaises(orjson.JSONEncodeError):
orjson.dumps(array, option=orjson.OPT_SERIALIZE_NUMPY)
self.assertEqual(
orjson.dumps(
array, default=numpy_default, option=orjson.OPT_SERIALIZE_NUMPY
),
orjson.dumps(array.tolist()),
)
def test_numpy_array_non_contiguous_message(self):
array = numpy.array([[1, 2], [3, 4]], order="F")
assert array.flags["F_CONTIGUOUS"] == True
try:
orjson.dumps(array, option=orjson.OPT_SERIALIZE_NUMPY)
assert False
except TypeError as exc:
self.assertEqual(
str(exc),
"numpy array is not C contiguous; use ndarray.tolist() in default",
)
def test_numpy_array_unsupported_dtype(self):
array = numpy.array([[1, 2], [3, 4]], numpy.float16)
with self.assertRaises(orjson.JSONEncodeError) as cm:
orjson.dumps(array, option=orjson.OPT_SERIALIZE_NUMPY)
assert str(cm.exception) == "unsupported datatype in numpy array"
self.assertEqual(
orjson.dumps(
array, default=numpy_default, option=orjson.OPT_SERIALIZE_NUMPY
),
orjson.dumps(array.tolist()),
)
def test_numpy_array_d1(self):
array = numpy.array([1])
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_d2(self):
array = numpy.array([[1]])
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_d3(self):
array = numpy.array([[[1]]])
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_d4(self):
array = numpy.array([[[[1]]]])
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_4_stride(self):
array = numpy.random.rand(4, 4, 4, 4)
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_dimension_zero(self):
array = numpy.array(0)
assert array.ndim == 0
with self.assertRaises(orjson.JSONEncodeError):
orjson.dumps(array, option=orjson.OPT_SERIALIZE_NUMPY)
array = numpy.empty((0, 4, 2))
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
array = numpy.empty((4, 0, 2))
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
array = numpy.empty((2, 4, 0))
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_array_dimension_max(self):
array = numpy.random.rand(
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
)
assert array.ndim == 32
self.assertEqual(
orjson.loads(
orjson.dumps(
array,
option=orjson.OPT_SERIALIZE_NUMPY,
)
),
array.tolist(),
)
def test_numpy_scalar_int8(self):
self.assertEqual(
orjson.dumps(numpy.int8(0), option=orjson.OPT_SERIALIZE_NUMPY), b"0"
)
self.assertEqual(
orjson.dumps(numpy.int8(127), option=orjson.OPT_SERIALIZE_NUMPY),
b"127",
)
self.assertEqual(
orjson.dumps(numpy.int8(--128), option=orjson.OPT_SERIALIZE_NUMPY),
b"-128",
)
def test_numpy_scalar_int32(self):
self.assertEqual(
orjson.dumps(numpy.int32(1), option=orjson.OPT_SERIALIZE_NUMPY), b"1"
)
self.assertEqual(
orjson.dumps(numpy.int32(2147483647), option=orjson.OPT_SERIALIZE_NUMPY),
b"2147483647",
)
self.assertEqual(
orjson.dumps(numpy.int32(-2147483648), option=orjson.OPT_SERIALIZE_NUMPY),
b"-2147483648",
)
def test_numpy_scalar_int64(self):
self.assertEqual(
orjson.dumps(
numpy.int64(-9223372036854775808), option=orjson.OPT_SERIALIZE_NUMPY
),
b"-9223372036854775808",
)
self.assertEqual(
orjson.dumps(
numpy.int64(9223372036854775807), option=orjson.OPT_SERIALIZE_NUMPY
),
b"9223372036854775807",
)
def test_numpy_scalar_uint8(self):
self.assertEqual(
orjson.dumps(numpy.uint8(0), option=orjson.OPT_SERIALIZE_NUMPY), b"0"
)
self.assertEqual(
orjson.dumps(numpy.uint8(255), option=orjson.OPT_SERIALIZE_NUMPY),
b"255",
)
def test_numpy_scalar_uint32(self):
self.assertEqual(
orjson.dumps(numpy.uint32(0), option=orjson.OPT_SERIALIZE_NUMPY), b"0"
)
self.assertEqual(
orjson.dumps(numpy.uint32(4294967295), option=orjson.OPT_SERIALIZE_NUMPY),
b"4294967295",
)
def test_numpy_scalar_uint64(self):
self.assertEqual(
orjson.dumps(numpy.uint64(0), option=orjson.OPT_SERIALIZE_NUMPY), b"0"
)
self.assertEqual(
orjson.dumps(
numpy.uint64(18446744073709551615), option=orjson.OPT_SERIALIZE_NUMPY
),
b"18446744073709551615",
)
def test_numpy_scalar_float32(self):
self.assertEqual(
orjson.dumps(numpy.float32(1.0), option=orjson.OPT_SERIALIZE_NUMPY), b"1.0"
)
def test_numpy_scalar_float64(self):
self.assertEqual(
orjson.dumps(numpy.float64(123.123), option=orjson.OPT_SERIALIZE_NUMPY),
b"123.123",
)
def test_numpy_bool(self):
self.assertEqual(
orjson.dumps(
{"a": numpy.bool_(True), "b": numpy.bool_(False)},
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'{"a":true,"b":false}',
)
def test_numpy_datetime(self):
self.assertEqual(
orjson.dumps(
{
"year": numpy.datetime64("2021"),
"month": numpy.datetime64("2021-01"),
"day": numpy.datetime64("2021-01-01"),
"hour": numpy.datetime64("2021-01-01T00"),
"minute": numpy.datetime64("2021-01-01T00:00"),
"second": numpy.datetime64("2021-01-01T00:00:00"),
"milli": numpy.datetime64("2021-01-01T00:00:00.172"),
"micro": numpy.datetime64("2021-01-01T00:00:00.172576"),
"nano": numpy.datetime64("2021-01-01T00:00:00.172576789"),
},
option=orjson.OPT_SERIALIZE_NUMPY,
),
b'{"year":"2021-01-01T00:00:00","month":"2021-01-01T00:00:00","day":"2021-01-01T00:00:00","hour":"2021-01-01T00:00:00","minute":"2021-01-01T00:00:00","second":"2021-01-01T00:00:00","milli":"2021-01-01T00:00:00.172000","micro":"2021-01-01T00:00:00.172576","nano":"2021-01-01T00:00:00.172576"}',
)
def test_numpy_datetime_naive_utc(self):
self.assertEqual(
orjson.dumps(
{
"year": numpy.datetime64("2021"),
"month": numpy.datetime64("2021-01"),
"day": numpy.datetime64("2021-01-01"),
"hour": numpy.datetime64("2021-01-01T00"),
"minute": numpy.datetime64("2021-01-01T00:00"),
"second": numpy.datetime64("2021-01-01T00:00:00"),
"milli": numpy.datetime64("2021-01-01T00:00:00.172"),
"micro": numpy.datetime64("2021-01-01T00:00:00.172576"),
"nano": numpy.datetime64("2021-01-01T00:00:00.172576789"),
},
option=orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_NAIVE_UTC,
),
b'{"year":"2021-01-01T00:00:00+00:00","month":"2021-01-01T00:00:00+00:00","day":"2021-01-01T00:00:00+00:00","hour":"2021-01-01T00:00:00+00:00","minute":"2021-01-01T00:00:00+00:00","second":"2021-01-01T00:00:00+00:00","milli":"2021-01-01T00:00:00.172000+00:00","micro":"2021-01-01T00:00:00.172576+00:00","nano":"2021-01-01T00:00:00.172576+00:00"}',
)
def test_numpy_datetime_naive_utc_utc_z(self):
self.assertEqual(
orjson.dumps(
{
"year": numpy.datetime64("2021"),
"month": numpy.datetime64("2021-01"),
"day": numpy.datetime64("2021-01-01"),
"hour": numpy.datetime64("2021-01-01T00"),
"minute": numpy.datetime64("2021-01-01T00:00"),
"second": numpy.datetime64("2021-01-01T00:00:00"),
"milli": numpy.datetime64("2021-01-01T00:00:00.172"),
"micro": numpy.datetime64("2021-01-01T00:00:00.172576"),
"nano": numpy.datetime64("2021-01-01T00:00:00.172576789"),
},
option=orjson.OPT_SERIALIZE_NUMPY
| orjson.OPT_NAIVE_UTC
| orjson.OPT_UTC_Z,
),
b'{"year":"2021-01-01T00:00:00Z","month":"2021-01-01T00:00:00Z","day":"2021-01-01T00:00:00Z","hour":"2021-01-01T00:00:00Z","minute":"2021-01-01T00:00:00Z","second":"2021-01-01T00:00:00Z","milli":"2021-01-01T00:00:00.172000Z","micro":"2021-01-01T00:00:00.172576Z","nano":"2021-01-01T00:00:00.172576Z"}',
)
def test_numpy_datetime_omit_microseconds(self):
self.assertEqual(
orjson.dumps(
{
"year": numpy.datetime64("2021"),
"month": numpy.datetime64("2021-01"),
"day": numpy.datetime64("2021-01-01"),
"hour": numpy.datetime64("2021-01-01T00"),
"minute": numpy.datetime64("2021-01-01T00:00"),
"second": numpy.datetime64("2021-01-01T00:00:00"),
"milli": numpy.datetime64("2021-01-01T00:00:00.172"),
"micro": numpy.datetime64("2021-01-01T00:00:00.172576"),
"nano": numpy.datetime64("2021-01-01T00:00:00.172576789"),
},
option=orjson.OPT_SERIALIZE_NUMPY | orjson.OPT_OMIT_MICROSECONDS,
),
b'{"year":"2021-01-01T00:00:00","month":"2021-01-01T00:00:00","day":"2021-01-01T00:00:00","hour":"2021-01-01T00:00:00","minute":"2021-01-01T00:00:00","second":"2021-01-01T00:00:00","milli":"2021-01-01T00:00:00","micro":"2021-01-01T00:00:00","nano":"2021-01-01T00:00:00"}',
)
def test_numpy_repeated(self):
data = numpy.array([[[1, 2], [3, 4], [5, 6], [7, 8]]], numpy.int64)
for _ in range(0, 3):
self.assertEqual(
orjson.dumps(
data,
option=orjson.OPT_SERIALIZE_NUMPY,
),
b"[[[1,2],[3,4],[5,6],[7,8]]]",
)
| 34.474359 | 359 | 0.488699 |
4a1e21282625011cd694487871ef8b2c91f95370 | 6,250 | py | Python | examples/quant_engine_single_unit_feather_example_scr_expr_univ_ids.py | factset/analyticsapi-engines-python-sdk | fd066c537344667102c44a4a3d50bd7df7e545fe | [
"Apache-2.0"
] | 12 | 2019-12-01T10:34:50.000Z | 2022-03-14T00:11:21.000Z | examples/quant_engine_single_unit_feather_example_scr_expr_univ_ids.py | factset/analyticsapi-engines-python-sdk | fd066c537344667102c44a4a3d50bd7df7e545fe | [
"Apache-2.0"
] | 33 | 2019-12-01T12:14:01.000Z | 2022-03-22T04:50:30.000Z | examples/quant_engine_single_unit_feather_example_scr_expr_univ_ids.py | factset/analyticsapi-engines-python-sdk | fd066c537344667102c44a4a3d50bd7df7e545fe | [
"Apache-2.0"
] | 1 | 2019-12-01T10:34:52.000Z | 2019-12-01T10:34:52.000Z | import time
import os
from pathlib import Path
import pandas as pd
from fds.analyticsapi.engines import ApiException
from fds.analyticsapi.engines.api.quant_calculations_api import QuantCalculationsApi
from fds.analyticsapi.engines.api_client import ApiClient
from fds.analyticsapi.engines.configuration import Configuration
from fds.analyticsapi.engines.model.quant_calculation_parameters_root import QuantCalculationParametersRoot
from fds.analyticsapi.engines.model.quant_calculation_parameters import QuantCalculationParameters
from fds.analyticsapi.engines.model.quant_calculation_meta import QuantCalculationMeta
from fds.analyticsapi.engines.model.quant_identifier_universe import QuantIdentifierUniverse
from fds.analyticsapi.engines.model.quant_fds_date import QuantFdsDate
from fds.analyticsapi.engines.model.quant_screening_expression import QuantScreeningExpression
from urllib3 import Retry
host = "https://api.factset.com"
username = "<username-serial>"
password = "<apiKey>"
def main():
config = Configuration()
config.host = host
config.username = username
config.password = password
# add proxy and/or disable ssl verification according to your development environment
# config.proxy = "<proxyUrl>"
config.verify_ssl = False
# Setting configuration to retry api calls on http status codes of 429 and 503.
config.retries = Retry(total=3, status=3, status_forcelist=frozenset([429, 503]), backoff_factor=2,
raise_on_status=False)
api_client = ApiClient(config)
try:
identifierUniverse = QuantIdentifierUniverse(source="IdentifierUniverse",
universe_type="Equity",
identifiers=[
"IBM",
"MS",
"GE"
])
fdsDate = QuantFdsDate(source="FdsDate",
start_date="20050701", end_date="20051001", frequency="M", calendar="FIVEDAY")
screeningExpression = QuantScreeningExpression(source="ScreeningExpression",
expr="P_PRICE", name="Price")
screeningExpression1 = QuantScreeningExpression(source="ScreeningExpression",
expr="FF_EPS", name="Eps")
screeningExpression2 = QuantScreeningExpression(source="ScreeningExpression",
expr="FG_GICS_SECTOR", name="Sector")
quant_calculation_parameters = {"1": QuantCalculationParameters(
universe=identifierUniverse,
dates=fdsDate,
formulas=[screeningExpression, screeningExpression1, screeningExpression2])
}
# uncomment the below code line to setup cache control; max-stale=0 will be a fresh adhoc run and the max-stale value is in seconds.
# Results are by default cached for 12 hours; Setting max-stale=300 will fetch a cached result which is 5 minutes older.
# cache_control = "max-stale=0"
quant_calculations_meta = QuantCalculationMeta(format='Feather')
quant_calculation_parameter_root = QuantCalculationParametersRoot(
data=quant_calculation_parameters, meta=quant_calculations_meta)
quant_calculations_api = QuantCalculationsApi(api_client)
post_and_calculate_response = quant_calculations_api.post_and_calculate(
quant_calculation_parameters_root=quant_calculation_parameter_root)
# comment the above line and uncomment the below line to run the request with the cache_control header defined earlier
# post_and_calculate_response = quant_calculations_api.post_and_calculate(
# quant_calculation_parameters_root=quant_calculation_parameter_root, cache_control=cache_control)
if post_and_calculate_response[1] == 201:
output_calculation_result('data', post_and_calculate_response[0])
else:
calculation_id = post_and_calculate_response[0].data.calculationid
print("Calculation Id: " + calculation_id)
status_response = quant_calculations_api.get_calculation_status_by_id(id=calculation_id)
while status_response[1] == 202 and (status_response[0].data.status in ("Queued", "Executing")):
max_age = '5'
age_value = status_response[2].get("cache-control")
if age_value is not None:
max_age = age_value.replace("max-age=", "")
print('Sleeping: ' + max_age)
time.sleep(int(max_age))
status_response = quant_calculations_api.get_calculation_status_by_id(id=calculation_id)
for (calculation_unit_id, calculation_unit) in status_response[0].data.units.items():
if calculation_unit.status == "Success":
print("Calculation Unit Id: " +
calculation_unit_id + " Succeeded!!!")
result_response = quant_calculations_api.get_calculation_unit_result_by_id(id=calculation_id,
unit_id=calculation_unit_id)
print("Calculation Data")
output_calculation_result(
'data', result_response[0].read())
result_response = quant_calculations_api.get_calculation_unit_info_by_id(id=calculation_id,
unit_id=calculation_unit_id)
print("Calculation Info")
output_calculation_result(
'info', result_response[0].read())
else:
print("Calculation Unit Id:" +
calculation_unit_id + " Failed!!!")
print("Error message : " + str(calculation_unit.errors))
except ApiException as e:
print("Api exception Encountered")
print(e)
exit()
def output_calculation_result(output_prefix, result):
filename = Path(f'{output_prefix}-Output.ftr')
print(f'Writing output to {filename}')
filename.write_bytes(result)
df = pd.read_feather(filename)
print(df)
if __name__ == '__main__':
main()
| 48.076923 | 140 | 0.6592 |
4a1e2158151850be24e601655b11d229fa7e5dc4 | 3,241 | py | Python | webdriver/tests/forward/forward.py | spao234/wpt | 4b9447991bcb28f37b45532caf7f8e8747f9ad41 | [
"BSD-3-Clause"
] | null | null | null | webdriver/tests/forward/forward.py | spao234/wpt | 4b9447991bcb28f37b45532caf7f8e8747f9ad41 | [
"BSD-3-Clause"
] | 6 | 2021-03-31T20:00:14.000Z | 2022-03-12T00:50:17.000Z | webdriver/tests/forward/forward.py | spao234/wpt | 4b9447991bcb28f37b45532caf7f8e8747f9ad41 | [
"BSD-3-Clause"
] | 1 | 2020-05-10T17:24:35.000Z | 2020-05-10T17:24:35.000Z | from tests.support.asserts import assert_error, assert_success
from tests.support.inline import inline
def forward(session):
return session.transport.send(
"POST", "session/{session_id}/forward".format(**vars(session)))
def test_null_response_value(session):
session.url = inline("<div>")
session.url = inline("<p>")
session.back()
response = forward(session)
value = assert_success(response)
assert value is None
def test_no_top_browsing_context(session, closed_window):
response = forward(session)
assert_error(response, "no such window")
def test_no_browsing_context(session, closed_frame):
response = forward(session)
assert_success(response)
def test_no_browsing_history(session):
url = inline("<div id=foo>")
session.url = url
element = session.find.css("#foo", all=False)
response = forward(session)
assert_success(response)
assert session.url == url
assert element.property("id") == "foo"
def test_data_urls(session):
test_pages = [
inline("<p id=1>"),
inline("<p id=2>"),
]
for page in test_pages:
session.url = page
session.back()
assert session.url == test_pages[0]
response = forward(session)
assert_success(response)
assert session.url == test_pages[1]
def test_dismissed_beforeunload(session):
url_beforeunload = inline("""
<input type="text">
<script>
window.addEventListener("beforeunload", function (event) {
event.preventDefault();
});
</script>
""")
session.url = url_beforeunload
session.url = inline("<div id=foo>")
session.back()
element = session.find.css("input", all=False)
element.send_keys("bar")
response = forward(session)
assert_success(response)
assert session.url != url_beforeunload
def test_fragments(session, url):
test_pages = [
url("/common/blank.html"),
url("/common/blank.html#1234"),
url("/common/blank.html#5678"),
]
for page in test_pages:
session.url = page
session.back()
assert session.url == test_pages[1]
session.back()
assert session.url == test_pages[0]
response = forward(session)
assert_success(response)
assert session.url == test_pages[1]
response = forward(session)
assert_success(response)
assert session.url == test_pages[2]
def test_history_pushstate(session, url):
pushstate_page = inline("""
<script>
function pushState() {
history.pushState({foo: "bar"}, "", "#pushstate");
}
</script>
<a onclick="javascript:pushState();">click</a>
""")
session.url = pushstate_page
session.find.css("a", all=False).click()
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
session.back()
assert session.url == pushstate_page
assert session.execute_script("return history.state;") is None
response = forward(session)
assert_success(response)
assert session.url == "{}#pushstate".format(pushstate_page)
assert session.execute_script("return history.state;") == {"foo": "bar"}
| 24.368421 | 76 | 0.653502 |
4a1e22125c8d496581247ff39bb592ea881647bb | 157 | py | Python | code/exercises/exercise_04.py | DahlitzFlorian/python-basic-training | baedaee1259ed91e7403ab121519b1257270c700 | [
"Apache-2.0"
] | 3 | 2019-08-02T14:05:17.000Z | 2022-01-09T02:00:12.000Z | code/exercises/exercise_04.py | DahlitzFlorian/python-basic-training | baedaee1259ed91e7403ab121519b1257270c700 | [
"Apache-2.0"
] | null | null | null | code/exercises/exercise_04.py | DahlitzFlorian/python-basic-training | baedaee1259ed91e7403ab121519b1257270c700 | [
"Apache-2.0"
] | 1 | 2019-10-14T14:20:30.000Z | 2019-10-14T14:20:30.000Z | # Ask the user for a string and print out whether this string is a palindrome or not.
# A palindrome is a string that reads the same forwards and backwards.
| 52.333333 | 85 | 0.77707 |
4a1e22311be58669ca6060cd3adc33942c403f61 | 185 | py | Python | mockidp/__init__.py | fefeme/mock-idp | a74c8c5b378ae9f547e33e0a646e76696b78ed7a | [
"MIT"
] | null | null | null | mockidp/__init__.py | fefeme/mock-idp | a74c8c5b378ae9f547e33e0a646e76696b78ed7a | [
"MIT"
] | null | null | null | mockidp/__init__.py | fefeme/mock-idp | a74c8c5b378ae9f547e33e0a646e76696b78ed7a | [
"MIT"
] | null | null | null | # coding: utf-8
from flask import Flask
__version__ = "0.3.1"
app = Flask(__name__)
app.config['SECRET_KEY'] = 'you-will-never-guess'
import mockidp.saml.routes
import mockidp.routes
| 18.5 | 49 | 0.745946 |
4a1e227a17ade390eb76f5f5a6c242f664bb108d | 383 | py | Python | tools/_make_style_table.py | DarkCode01/rich | c4287eff031d03addac79fd9035e146c7d868b78 | [
"MIT"
] | 2 | 2021-05-11T19:27:06.000Z | 2021-05-12T06:08:08.000Z | tools/_make_style_table.py | antomuli/rich | 5097f44092a4ba4ef7741755b3752d2aebe772a0 | [
"MIT"
] | 2 | 2020-05-09T12:42:28.000Z | 2020-05-09T14:44:04.000Z | tools/_make_style_table.py | antomuli/rich | 5097f44092a4ba4ef7741755b3752d2aebe772a0 | [
"MIT"
] | 1 | 2020-08-14T13:47:25.000Z | 2020-08-14T13:47:25.000Z | """Generate a ANSI style table."""
def make_table():
table = []
for attributes in range(0, 512):
ansi_codes = []
for bit_no in range(0, 9):
bit = 1 << bit_no
if attributes & bit:
ansi_codes.append(str(1 + bit_no))
table.append(";".join(ansi_codes))
return table
table = make_table()
print(repr(table))
| 20.157895 | 50 | 0.545692 |
4a1e22e7cfb27daf7a2588a1050655095758c4a5 | 102,927 | py | Python | UMLRT2Kiltera_MM/UMLRT2Kiltera_MM_META.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 3 | 2017-06-02T19:26:27.000Z | 2021-06-14T04:25:45.000Z | UMLRT2Kiltera_MM/UMLRT2Kiltera_MM_META.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 8 | 2016-08-24T07:04:07.000Z | 2017-05-26T16:22:47.000Z | UMLRT2Kiltera_MM/UMLRT2Kiltera_MM_META.py | levilucio/SyVOLT | 7526ec794d21565e3efcc925a7b08ae8db27d46a | [
"MIT"
] | 1 | 2019-10-31T06:00:23.000Z | 2019-10-31T06:00:23.000Z | """
__UMLRT2Kiltera_MM_META.py_____________________________________________________
Automatically generated AToM3 button model (DO NOT MODIFY DIRECTLY)
Author: gehan
Modified: Sat Aug 30 18:25:18 2014
_______________________________________________________________________________
"""
from ASG_Buttons import *
from ButtonConfig import *
from ATOM3Enum import *
from ATOM3List import *
from ATOM3Float import *
from ATOM3Integer import *
from ATOM3Attribute import *
from ATOM3Constraint import *
from ATOM3Action import *
from ATOM3String import *
from ATOM3BottomType import *
from ATOM3Boolean import *
from ATOM3Appearance import *
from ATOM3Link import *
def UMLRT2Kiltera_MM_META(self, rootNode, ButtonsRootNode):
ButtonsRootNode.Formalism_Name.setValue('UMLRT2Kiltera_MM_META')
ButtonsRootNode.RowSize.setValue(4)
ButtonsRootNode.Formalism_File.setValue( 'UMLRT2Kiltera_MM_MM.py' )
self.globalPrecondition(rootNode)
self.objEdit=ButtonConfig(self)
self.objEdit.Contents.Text.setValue('Edit')
self.objEdit.Contents.Image.setValue('')
self.objEdit.Contents.lastSelected= 'Text'
self.objEdit.Drawing_Mode.setValue(0)
self.objEdit.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nself.modelAttributes(self.ASGroot.getASGbyName("UMLRT2Kiltera_MM_META")) ') )
self.objEdit.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objEdit)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objEdit.graphObject_ = new_obj
rootNode.addNode(self.objEdit)
self.globalAndLocalPostcondition(self.objEdit, rootNode)
self.globalPrecondition(rootNode)
self.objHelp=ButtonConfig(self)
self.objHelp.Contents.Text.setValue('Help')
self.objHelp.Contents.Image.setValue('')
self.objHelp.Contents.lastSelected= 'Text'
self.objHelp.Drawing_Mode.setValue(0)
self.objHelp.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nfrom HelpDialog import HelpDialog\nHelpDialog(["UMLRT2Kiltera_MM_METAHelp.txt"])\n ') )
self.objHelp.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objHelp)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objHelp.graphObject_ = new_obj
rootNode.addNode(self.objHelp)
self.globalAndLocalPostcondition(self.objHelp, rootNode)
self.globalPrecondition(rootNode)
self.objMatchModel=ButtonConfig(self)
self.objMatchModel.Contents.Text.setValue('New MatchModel')
self.objMatchModel.Contents.Image.setValue('')
self.objMatchModel.Contents.lastSelected= 'Text'
self.objMatchModel.Drawing_Mode.setValue(1)
self.objMatchModel.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewMatchModel (self, wherex, wherey)\n'))
self.objMatchModel.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objMatchModel)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objMatchModel.graphObject_ = new_obj
rootNode.addNode(self.objMatchModel)
self.globalAndLocalPostcondition(self.objMatchModel, rootNode)
self.globalPrecondition(rootNode)
self.objApplyModel=ButtonConfig(self)
self.objApplyModel.Contents.Text.setValue('New ApplyModel')
self.objApplyModel.Contents.Image.setValue('')
self.objApplyModel.Contents.lastSelected= 'Text'
self.objApplyModel.Drawing_Mode.setValue(1)
self.objApplyModel.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewApplyModel (self, wherex, wherey)\n'))
self.objApplyModel.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objApplyModel)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objApplyModel.graphObject_ = new_obj
rootNode.addNode(self.objApplyModel)
self.globalAndLocalPostcondition(self.objApplyModel, rootNode)
self.globalPrecondition(rootNode)
self.objMetaModelElement_S=ButtonConfig(self)
self.objMetaModelElement_S.Contents.Text.setValue('New MetaModelElement_S')
self.objMetaModelElement_S.Contents.Image.setValue('')
self.objMetaModelElement_S.Contents.lastSelected= 'Text'
self.objMetaModelElement_S.Drawing_Mode.setValue(1)
self.objMetaModelElement_S.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewMetaModelElement_S (self, wherex, wherey)\n'))
self.objMetaModelElement_S.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objMetaModelElement_S)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objMetaModelElement_S.graphObject_ = new_obj
rootNode.addNode(self.objMetaModelElement_S)
self.globalAndLocalPostcondition(self.objMetaModelElement_S, rootNode)
self.globalPrecondition(rootNode)
self.objMetaModelElement_T=ButtonConfig(self)
self.objMetaModelElement_T.Contents.Text.setValue('New MetaModelElement_T')
self.objMetaModelElement_T.Contents.Image.setValue('')
self.objMetaModelElement_T.Contents.lastSelected= 'Text'
self.objMetaModelElement_T.Drawing_Mode.setValue(1)
self.objMetaModelElement_T.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewMetaModelElement_T (self, wherex, wherey)\n'))
self.objMetaModelElement_T.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objMetaModelElement_T)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objMetaModelElement_T.graphObject_ = new_obj
rootNode.addNode(self.objMetaModelElement_T)
self.globalAndLocalPostcondition(self.objMetaModelElement_T, rootNode)
self.globalPrecondition(rootNode)
self.objElement=ButtonConfig(self)
self.objElement.Contents.Text.setValue('New Element')
self.objElement.Contents.Image.setValue('')
self.objElement.Contents.lastSelected= 'Text'
self.objElement.Drawing_Mode.setValue(1)
self.objElement.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewElement (self, wherex, wherey)\n'))
self.objElement.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objElement)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objElement.graphObject_ = new_obj
rootNode.addNode(self.objElement)
self.globalAndLocalPostcondition(self.objElement, rootNode)
self.globalPrecondition(rootNode)
self.objNamedElement=ButtonConfig(self)
self.objNamedElement.Contents.Text.setValue('New NamedElement')
self.objNamedElement.Contents.Image.setValue('')
self.objNamedElement.Contents.lastSelected= 'Text'
self.objNamedElement.Drawing_Mode.setValue(1)
self.objNamedElement.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewNamedElement (self, wherex, wherey)\n'))
self.objNamedElement.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objNamedElement)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objNamedElement.graphObject_ = new_obj
rootNode.addNode(self.objNamedElement)
self.globalAndLocalPostcondition(self.objNamedElement, rootNode)
self.globalPrecondition(rootNode)
self.objTrigger_S=ButtonConfig(self)
self.objTrigger_S.Contents.Text.setValue('New Trigger_S')
self.objTrigger_S.Contents.Image.setValue('')
self.objTrigger_S.Contents.lastSelected= 'Text'
self.objTrigger_S.Drawing_Mode.setValue(1)
self.objTrigger_S.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewTrigger_S (self, wherex, wherey)\n'))
self.objTrigger_S.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objTrigger_S)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objTrigger_S.graphObject_ = new_obj
rootNode.addNode(self.objTrigger_S)
self.globalAndLocalPostcondition(self.objTrigger_S, rootNode)
self.globalPrecondition(rootNode)
self.objAction=ButtonConfig(self)
self.objAction.Contents.Text.setValue('New Action')
self.objAction.Contents.Image.setValue('')
self.objAction.Contents.lastSelected= 'Text'
self.objAction.Drawing_Mode.setValue(1)
self.objAction.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewAction (self, wherex, wherey)\n'))
self.objAction.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objAction)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objAction.graphObject_ = new_obj
rootNode.addNode(self.objAction)
self.globalAndLocalPostcondition(self.objAction, rootNode)
self.globalPrecondition(rootNode)
self.objPortRef=ButtonConfig(self)
self.objPortRef.Contents.Text.setValue('New PortRef')
self.objPortRef.Contents.Image.setValue('')
self.objPortRef.Contents.lastSelected= 'Text'
self.objPortRef.Drawing_Mode.setValue(1)
self.objPortRef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPortRef (self, wherex, wherey)\n'))
self.objPortRef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objPortRef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPortRef.graphObject_ = new_obj
rootNode.addNode(self.objPortRef)
self.globalAndLocalPostcondition(self.objPortRef, rootNode)
self.globalPrecondition(rootNode)
self.objPortConnectorRef=ButtonConfig(self)
self.objPortConnectorRef.Contents.Text.setValue('New PortConnectorRef')
self.objPortConnectorRef.Contents.Image.setValue('')
self.objPortConnectorRef.Contents.lastSelected= 'Text'
self.objPortConnectorRef.Drawing_Mode.setValue(1)
self.objPortConnectorRef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPortConnectorRef (self, wherex, wherey)\n'))
self.objPortConnectorRef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objPortConnectorRef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPortConnectorRef.graphObject_ = new_obj
rootNode.addNode(self.objPortConnectorRef)
self.globalAndLocalPostcondition(self.objPortConnectorRef, rootNode)
self.globalPrecondition(rootNode)
self.objStateMachineElement=ButtonConfig(self)
self.objStateMachineElement.Contents.Text.setValue('New StateMachineElement')
self.objStateMachineElement.Contents.Image.setValue('')
self.objStateMachineElement.Contents.lastSelected= 'Text'
self.objStateMachineElement.Drawing_Mode.setValue(1)
self.objStateMachineElement.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewStateMachineElement (self, wherex, wherey)\n'))
self.objStateMachineElement.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objStateMachineElement)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objStateMachineElement.graphObject_ = new_obj
rootNode.addNode(self.objStateMachineElement)
self.globalAndLocalPostcondition(self.objStateMachineElement, rootNode)
self.globalPrecondition(rootNode)
self.objProtocol=ButtonConfig(self)
self.objProtocol.Contents.Text.setValue('New Protocol')
self.objProtocol.Contents.Image.setValue('')
self.objProtocol.Contents.lastSelected= 'Text'
self.objProtocol.Drawing_Mode.setValue(1)
self.objProtocol.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewProtocol (self, wherex, wherey)\n'))
self.objProtocol.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objProtocol)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objProtocol.graphObject_ = new_obj
rootNode.addNode(self.objProtocol)
self.globalAndLocalPostcondition(self.objProtocol, rootNode)
self.globalPrecondition(rootNode)
self.objSignal=ButtonConfig(self)
self.objSignal.Contents.Text.setValue('New Signal')
self.objSignal.Contents.Image.setValue('')
self.objSignal.Contents.lastSelected= 'Text'
self.objSignal.Drawing_Mode.setValue(1)
self.objSignal.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSignal (self, wherex, wherey)\n'))
self.objSignal.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objSignal)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSignal.graphObject_ = new_obj
rootNode.addNode(self.objSignal)
self.globalAndLocalPostcondition(self.objSignal, rootNode)
self.globalPrecondition(rootNode)
self.objPort=ButtonConfig(self)
self.objPort.Contents.Text.setValue('New Port')
self.objPort.Contents.Image.setValue('')
self.objPort.Contents.lastSelected= 'Text'
self.objPort.Drawing_Mode.setValue(1)
self.objPort.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPort (self, wherex, wherey)\n'))
self.objPort.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objPort)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPort.graphObject_ = new_obj
rootNode.addNode(self.objPort)
self.globalAndLocalPostcondition(self.objPort, rootNode)
self.globalPrecondition(rootNode)
self.objVertex=ButtonConfig(self)
self.objVertex.Contents.Text.setValue('New Vertex')
self.objVertex.Contents.Image.setValue('')
self.objVertex.Contents.lastSelected= 'Text'
self.objVertex.Drawing_Mode.setValue(1)
self.objVertex.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewVertex (self, wherex, wherey)\n'))
self.objVertex.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objVertex)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objVertex.graphObject_ = new_obj
rootNode.addNode(self.objVertex)
self.globalAndLocalPostcondition(self.objVertex, rootNode)
self.globalPrecondition(rootNode)
self.objInitialPoint=ButtonConfig(self)
self.objInitialPoint.Contents.Text.setValue('New InitialPoint')
self.objInitialPoint.Contents.Image.setValue('')
self.objInitialPoint.Contents.lastSelected= 'Text'
self.objInitialPoint.Drawing_Mode.setValue(1)
self.objInitialPoint.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewInitialPoint (self, wherex, wherey)\n'))
self.objInitialPoint.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objInitialPoint)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objInitialPoint.graphObject_ = new_obj
rootNode.addNode(self.objInitialPoint)
self.globalAndLocalPostcondition(self.objInitialPoint, rootNode)
self.globalPrecondition(rootNode)
self.objEntryPoint=ButtonConfig(self)
self.objEntryPoint.Contents.Text.setValue('New EntryPoint')
self.objEntryPoint.Contents.Image.setValue('')
self.objEntryPoint.Contents.lastSelected= 'Text'
self.objEntryPoint.Drawing_Mode.setValue(1)
self.objEntryPoint.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewEntryPoint (self, wherex, wherey)\n'))
self.objEntryPoint.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objEntryPoint)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objEntryPoint.graphObject_ = new_obj
rootNode.addNode(self.objEntryPoint)
self.globalAndLocalPostcondition(self.objEntryPoint, rootNode)
self.globalPrecondition(rootNode)
self.objExitPoint=ButtonConfig(self)
self.objExitPoint.Contents.Text.setValue('New ExitPoint')
self.objExitPoint.Contents.Image.setValue('')
self.objExitPoint.Contents.lastSelected= 'Text'
self.objExitPoint.Drawing_Mode.setValue(1)
self.objExitPoint.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewExitPoint (self, wherex, wherey)\n'))
self.objExitPoint.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objExitPoint)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objExitPoint.graphObject_ = new_obj
rootNode.addNode(self.objExitPoint)
self.globalAndLocalPostcondition(self.objExitPoint, rootNode)
self.globalPrecondition(rootNode)
self.objTransition=ButtonConfig(self)
self.objTransition.Contents.Text.setValue('New Transition')
self.objTransition.Contents.Image.setValue('')
self.objTransition.Contents.lastSelected= 'Text'
self.objTransition.Drawing_Mode.setValue(1)
self.objTransition.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewTransition (self, wherex, wherey)\n'))
self.objTransition.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objTransition)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objTransition.graphObject_ = new_obj
rootNode.addNode(self.objTransition)
self.globalAndLocalPostcondition(self.objTransition, rootNode)
self.globalPrecondition(rootNode)
self.objStateMachine=ButtonConfig(self)
self.objStateMachine.Contents.Text.setValue('New StateMachine')
self.objStateMachine.Contents.Image.setValue('')
self.objStateMachine.Contents.lastSelected= 'Text'
self.objStateMachine.Drawing_Mode.setValue(1)
self.objStateMachine.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewStateMachine (self, wherex, wherey)\n'))
self.objStateMachine.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objStateMachine)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objStateMachine.graphObject_ = new_obj
rootNode.addNode(self.objStateMachine)
self.globalAndLocalPostcondition(self.objStateMachine, rootNode)
self.globalPrecondition(rootNode)
self.objState=ButtonConfig(self)
self.objState.Contents.Text.setValue('New State')
self.objState.Contents.Image.setValue('')
self.objState.Contents.lastSelected= 'Text'
self.objState.Drawing_Mode.setValue(1)
self.objState.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewState (self, wherex, wherey)\n'))
self.objState.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objState)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objState.graphObject_ = new_obj
rootNode.addNode(self.objState)
self.globalAndLocalPostcondition(self.objState, rootNode)
self.globalPrecondition(rootNode)
self.objCapsule=ButtonConfig(self)
self.objCapsule.Contents.Text.setValue('New Capsule')
self.objCapsule.Contents.Image.setValue('')
self.objCapsule.Contents.lastSelected= 'Text'
self.objCapsule.Drawing_Mode.setValue(1)
self.objCapsule.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewCapsule (self, wherex, wherey)\n'))
self.objCapsule.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objCapsule)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objCapsule.graphObject_ = new_obj
rootNode.addNode(self.objCapsule)
self.globalAndLocalPostcondition(self.objCapsule, rootNode)
self.globalPrecondition(rootNode)
self.objPackageContainer=ButtonConfig(self)
self.objPackageContainer.Contents.Text.setValue('New PackageContainer')
self.objPackageContainer.Contents.Image.setValue('')
self.objPackageContainer.Contents.lastSelected= 'Text'
self.objPackageContainer.Drawing_Mode.setValue(1)
self.objPackageContainer.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPackageContainer (self, wherex, wherey)\n'))
self.objPackageContainer.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objPackageContainer)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPackageContainer.graphObject_ = new_obj
rootNode.addNode(self.objPackageContainer)
self.globalAndLocalPostcondition(self.objPackageContainer, rootNode)
self.globalPrecondition(rootNode)
self.objModel_S=ButtonConfig(self)
self.objModel_S.Contents.Text.setValue('New Model_S')
self.objModel_S.Contents.Image.setValue('')
self.objModel_S.Contents.lastSelected= 'Text'
self.objModel_S.Drawing_Mode.setValue(1)
self.objModel_S.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewModel_S (self, wherex, wherey)\n'))
self.objModel_S.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objModel_S)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objModel_S.graphObject_ = new_obj
rootNode.addNode(self.objModel_S)
self.globalAndLocalPostcondition(self.objModel_S, rootNode)
self.globalPrecondition(rootNode)
self.objPackage=ButtonConfig(self)
self.objPackage.Contents.Text.setValue('New Package')
self.objPackage.Contents.Image.setValue('')
self.objPackage.Contents.lastSelected= 'Text'
self.objPackage.Drawing_Mode.setValue(1)
self.objPackage.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPackage (self, wherex, wherey)\n'))
self.objPackage.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objPackage)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPackage.graphObject_ = new_obj
rootNode.addNode(self.objPackage)
self.globalAndLocalPostcondition(self.objPackage, rootNode)
self.globalPrecondition(rootNode)
self.objCapsuleRole=ButtonConfig(self)
self.objCapsuleRole.Contents.Text.setValue('New CapsuleRole')
self.objCapsuleRole.Contents.Image.setValue('')
self.objCapsuleRole.Contents.lastSelected= 'Text'
self.objCapsuleRole.Drawing_Mode.setValue(1)
self.objCapsuleRole.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewCapsuleRole (self, wherex, wherey)\n'))
self.objCapsuleRole.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objCapsuleRole)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objCapsuleRole.graphObject_ = new_obj
rootNode.addNode(self.objCapsuleRole)
self.globalAndLocalPostcondition(self.objCapsuleRole, rootNode)
self.globalPrecondition(rootNode)
self.objPortConnector=ButtonConfig(self)
self.objPortConnector.Contents.Text.setValue('New PortConnector')
self.objPortConnector.Contents.Image.setValue('')
self.objPortConnector.Contents.lastSelected= 'Text'
self.objPortConnector.Drawing_Mode.setValue(1)
self.objPortConnector.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPortConnector (self, wherex, wherey)\n'))
self.objPortConnector.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objPortConnector)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPortConnector.graphObject_ = new_obj
rootNode.addNode(self.objPortConnector)
self.globalAndLocalPostcondition(self.objPortConnector, rootNode)
self.globalPrecondition(rootNode)
self.objThread=ButtonConfig(self)
self.objThread.Contents.Text.setValue('New Thread')
self.objThread.Contents.Image.setValue('')
self.objThread.Contents.lastSelected= 'Text'
self.objThread.Drawing_Mode.setValue(1)
self.objThread.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewThread (self, wherex, wherey)\n'))
self.objThread.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objThread)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objThread.graphObject_ = new_obj
rootNode.addNode(self.objThread)
self.globalAndLocalPostcondition(self.objThread, rootNode)
self.globalPrecondition(rootNode)
self.objPhysicalThread=ButtonConfig(self)
self.objPhysicalThread.Contents.Text.setValue('New PhysicalThread')
self.objPhysicalThread.Contents.Image.setValue('')
self.objPhysicalThread.Contents.lastSelected= 'Text'
self.objPhysicalThread.Drawing_Mode.setValue(1)
self.objPhysicalThread.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPhysicalThread (self, wherex, wherey)\n'))
self.objPhysicalThread.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objPhysicalThread)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPhysicalThread.graphObject_ = new_obj
rootNode.addNode(self.objPhysicalThread)
self.globalAndLocalPostcondition(self.objPhysicalThread, rootNode)
self.globalPrecondition(rootNode)
self.objLogicalThread=ButtonConfig(self)
self.objLogicalThread.Contents.Text.setValue('New LogicalThread')
self.objLogicalThread.Contents.Image.setValue('')
self.objLogicalThread.Contents.lastSelected= 'Text'
self.objLogicalThread.Drawing_Mode.setValue(1)
self.objLogicalThread.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewLogicalThread (self, wherex, wherey)\n'))
self.objLogicalThread.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objLogicalThread)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objLogicalThread.graphObject_ = new_obj
rootNode.addNode(self.objLogicalThread)
self.globalAndLocalPostcondition(self.objLogicalThread, rootNode)
self.globalPrecondition(rootNode)
self.objPortType=ButtonConfig(self)
self.objPortType.Contents.Text.setValue('New PortType')
self.objPortType.Contents.Image.setValue('')
self.objPortType.Contents.lastSelected= 'Text'
self.objPortType.Drawing_Mode.setValue(1)
self.objPortType.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPortType (self, wherex, wherey)\n'))
self.objPortType.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objPortType)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPortType.graphObject_ = new_obj
rootNode.addNode(self.objPortType)
self.globalAndLocalPostcondition(self.objPortType, rootNode)
self.globalPrecondition(rootNode)
self.objBASE0=ButtonConfig(self)
self.objBASE0.Contents.Text.setValue('New BASE0')
self.objBASE0.Contents.Image.setValue('')
self.objBASE0.Contents.lastSelected= 'Text'
self.objBASE0.Drawing_Mode.setValue(1)
self.objBASE0.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewBASE0 (self, wherex, wherey)\n'))
self.objBASE0.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objBASE0)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objBASE0.graphObject_ = new_obj
rootNode.addNode(self.objBASE0)
self.globalAndLocalPostcondition(self.objBASE0, rootNode)
self.globalPrecondition(rootNode)
self.objCONJUGATE1=ButtonConfig(self)
self.objCONJUGATE1.Contents.Text.setValue('New CONJUGATE1')
self.objCONJUGATE1.Contents.Image.setValue('')
self.objCONJUGATE1.Contents.lastSelected= 'Text'
self.objCONJUGATE1.Drawing_Mode.setValue(1)
self.objCONJUGATE1.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewCONJUGATE1 (self, wherex, wherey)\n'))
self.objCONJUGATE1.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objCONJUGATE1)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objCONJUGATE1.graphObject_ = new_obj
rootNode.addNode(self.objCONJUGATE1)
self.globalAndLocalPostcondition(self.objCONJUGATE1, rootNode)
self.globalPrecondition(rootNode)
self.objSignalType=ButtonConfig(self)
self.objSignalType.Contents.Text.setValue('New SignalType')
self.objSignalType.Contents.Image.setValue('')
self.objSignalType.Contents.lastSelected= 'Text'
self.objSignalType.Drawing_Mode.setValue(1)
self.objSignalType.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSignalType (self, wherex, wherey)\n'))
self.objSignalType.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objSignalType)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSignalType.graphObject_ = new_obj
rootNode.addNode(self.objSignalType)
self.globalAndLocalPostcondition(self.objSignalType, rootNode)
self.globalPrecondition(rootNode)
self.objOUT1=ButtonConfig(self)
self.objOUT1.Contents.Text.setValue('New OUT1')
self.objOUT1.Contents.Image.setValue('')
self.objOUT1.Contents.lastSelected= 'Text'
self.objOUT1.Drawing_Mode.setValue(1)
self.objOUT1.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewOUT1 (self, wherex, wherey)\n'))
self.objOUT1.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objOUT1)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objOUT1.graphObject_ = new_obj
rootNode.addNode(self.objOUT1)
self.globalAndLocalPostcondition(self.objOUT1, rootNode)
self.globalPrecondition(rootNode)
self.objIN0=ButtonConfig(self)
self.objIN0.Contents.Text.setValue('New IN0')
self.objIN0.Contents.Image.setValue('')
self.objIN0.Contents.lastSelected= 'Text'
self.objIN0.Drawing_Mode.setValue(1)
self.objIN0.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewIN0 (self, wherex, wherey)\n'))
self.objIN0.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objIN0)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objIN0.graphObject_ = new_obj
rootNode.addNode(self.objIN0)
self.globalAndLocalPostcondition(self.objIN0, rootNode)
self.globalPrecondition(rootNode)
self.objRoleType=ButtonConfig(self)
self.objRoleType.Contents.Text.setValue('New RoleType')
self.objRoleType.Contents.Image.setValue('')
self.objRoleType.Contents.lastSelected= 'Text'
self.objRoleType.Drawing_Mode.setValue(1)
self.objRoleType.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewRoleType (self, wherex, wherey)\n'))
self.objRoleType.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objRoleType)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objRoleType.graphObject_ = new_obj
rootNode.addNode(self.objRoleType)
self.globalAndLocalPostcondition(self.objRoleType, rootNode)
self.globalPrecondition(rootNode)
self.objFIXED0=ButtonConfig(self)
self.objFIXED0.Contents.Text.setValue('New FIXED0')
self.objFIXED0.Contents.Image.setValue('')
self.objFIXED0.Contents.lastSelected= 'Text'
self.objFIXED0.Drawing_Mode.setValue(1)
self.objFIXED0.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewFIXED0 (self, wherex, wherey)\n'))
self.objFIXED0.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objFIXED0)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objFIXED0.graphObject_ = new_obj
rootNode.addNode(self.objFIXED0)
self.globalAndLocalPostcondition(self.objFIXED0, rootNode)
self.globalPrecondition(rootNode)
self.objOPTIONAL1=ButtonConfig(self)
self.objOPTIONAL1.Contents.Text.setValue('New OPTIONAL1')
self.objOPTIONAL1.Contents.Image.setValue('')
self.objOPTIONAL1.Contents.lastSelected= 'Text'
self.objOPTIONAL1.Drawing_Mode.setValue(1)
self.objOPTIONAL1.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewOPTIONAL1 (self, wherex, wherey)\n'))
self.objOPTIONAL1.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objOPTIONAL1)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objOPTIONAL1.graphObject_ = new_obj
rootNode.addNode(self.objOPTIONAL1)
self.globalAndLocalPostcondition(self.objOPTIONAL1, rootNode)
self.globalPrecondition(rootNode)
self.objPLUGIN2=ButtonConfig(self)
self.objPLUGIN2.Contents.Text.setValue('New PLUGIN2')
self.objPLUGIN2.Contents.Image.setValue('')
self.objPLUGIN2.Contents.lastSelected= 'Text'
self.objPLUGIN2.Drawing_Mode.setValue(1)
self.objPLUGIN2.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPLUGIN2 (self, wherex, wherey)\n'))
self.objPLUGIN2.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objPLUGIN2)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPLUGIN2.graphObject_ = new_obj
rootNode.addNode(self.objPLUGIN2)
self.globalAndLocalPostcondition(self.objPLUGIN2, rootNode)
self.globalPrecondition(rootNode)
self.objTransitionType=ButtonConfig(self)
self.objTransitionType.Contents.Text.setValue('New TransitionType')
self.objTransitionType.Contents.Image.setValue('')
self.objTransitionType.Contents.lastSelected= 'Text'
self.objTransitionType.Drawing_Mode.setValue(1)
self.objTransitionType.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewTransitionType (self, wherex, wherey)\n'))
self.objTransitionType.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objTransitionType)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objTransitionType.graphObject_ = new_obj
rootNode.addNode(self.objTransitionType)
self.globalAndLocalPostcondition(self.objTransitionType, rootNode)
self.globalPrecondition(rootNode)
self.objSIBLING0=ButtonConfig(self)
self.objSIBLING0.Contents.Text.setValue('New SIBLING0')
self.objSIBLING0.Contents.Image.setValue('')
self.objSIBLING0.Contents.lastSelected= 'Text'
self.objSIBLING0.Drawing_Mode.setValue(1)
self.objSIBLING0.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSIBLING0 (self, wherex, wherey)\n'))
self.objSIBLING0.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objSIBLING0)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSIBLING0.graphObject_ = new_obj
rootNode.addNode(self.objSIBLING0)
self.globalAndLocalPostcondition(self.objSIBLING0, rootNode)
self.globalPrecondition(rootNode)
self.objIN1=ButtonConfig(self)
self.objIN1.Contents.Text.setValue('New IN1')
self.objIN1.Contents.Image.setValue('')
self.objIN1.Contents.lastSelected= 'Text'
self.objIN1.Drawing_Mode.setValue(1)
self.objIN1.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewIN1 (self, wherex, wherey)\n'))
self.objIN1.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objIN1)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objIN1.graphObject_ = new_obj
rootNode.addNode(self.objIN1)
self.globalAndLocalPostcondition(self.objIN1, rootNode)
self.globalPrecondition(rootNode)
self.objOUT2=ButtonConfig(self)
self.objOUT2.Contents.Text.setValue('New OUT2')
self.objOUT2.Contents.Image.setValue('')
self.objOUT2.Contents.lastSelected= 'Text'
self.objOUT2.Drawing_Mode.setValue(1)
self.objOUT2.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewOUT2 (self, wherex, wherey)\n'))
self.objOUT2.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objOUT2)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objOUT2.graphObject_ = new_obj
rootNode.addNode(self.objOUT2)
self.globalAndLocalPostcondition(self.objOUT2, rootNode)
self.globalPrecondition(rootNode)
self.objDef=ButtonConfig(self)
self.objDef.Contents.Text.setValue('New Def')
self.objDef.Contents.Image.setValue('')
self.objDef.Contents.lastSelected= 'Text'
self.objDef.Drawing_Mode.setValue(1)
self.objDef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewDef (self, wherex, wherey)\n'))
self.objDef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objDef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objDef.graphObject_ = new_obj
rootNode.addNode(self.objDef)
self.globalAndLocalPostcondition(self.objDef, rootNode)
self.globalPrecondition(rootNode)
self.objExpr=ButtonConfig(self)
self.objExpr.Contents.Text.setValue('New Expr')
self.objExpr.Contents.Image.setValue('')
self.objExpr.Contents.lastSelected= 'Text'
self.objExpr.Drawing_Mode.setValue(1)
self.objExpr.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewExpr (self, wherex, wherey)\n'))
self.objExpr.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objExpr)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objExpr.graphObject_ = new_obj
rootNode.addNode(self.objExpr)
self.globalAndLocalPostcondition(self.objExpr, rootNode)
self.globalPrecondition(rootNode)
self.objPattern=ButtonConfig(self)
self.objPattern.Contents.Text.setValue('New Pattern')
self.objPattern.Contents.Image.setValue('')
self.objPattern.Contents.lastSelected= 'Text'
self.objPattern.Drawing_Mode.setValue(1)
self.objPattern.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPattern (self, wherex, wherey)\n'))
self.objPattern.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objPattern)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPattern.graphObject_ = new_obj
rootNode.addNode(self.objPattern)
self.globalAndLocalPostcondition(self.objPattern, rootNode)
self.globalPrecondition(rootNode)
self.objProc=ButtonConfig(self)
self.objProc.Contents.Text.setValue('New Proc')
self.objProc.Contents.Image.setValue('')
self.objProc.Contents.lastSelected= 'Text'
self.objProc.Drawing_Mode.setValue(1)
self.objProc.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewProc (self, wherex, wherey)\n'))
self.objProc.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objProc)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objProc.graphObject_ = new_obj
rootNode.addNode(self.objProc)
self.globalAndLocalPostcondition(self.objProc, rootNode)
self.globalPrecondition(rootNode)
self.objProcDef=ButtonConfig(self)
self.objProcDef.Contents.Text.setValue('New ProcDef')
self.objProcDef.Contents.Image.setValue('')
self.objProcDef.Contents.lastSelected= 'Text'
self.objProcDef.Drawing_Mode.setValue(1)
self.objProcDef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewProcDef (self, wherex, wherey)\n'))
self.objProcDef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objProcDef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objProcDef.graphObject_ = new_obj
rootNode.addNode(self.objProcDef)
self.globalAndLocalPostcondition(self.objProcDef, rootNode)
self.globalPrecondition(rootNode)
self.objFuncDef=ButtonConfig(self)
self.objFuncDef.Contents.Text.setValue('New FuncDef')
self.objFuncDef.Contents.Image.setValue('')
self.objFuncDef.Contents.lastSelected= 'Text'
self.objFuncDef.Drawing_Mode.setValue(1)
self.objFuncDef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewFuncDef (self, wherex, wherey)\n'))
self.objFuncDef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objFuncDef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objFuncDef.graphObject_ = new_obj
rootNode.addNode(self.objFuncDef)
self.globalAndLocalPostcondition(self.objFuncDef, rootNode)
self.globalPrecondition(rootNode)
self.objName=ButtonConfig(self)
self.objName.Contents.Text.setValue('New Name')
self.objName.Contents.Image.setValue('')
self.objName.Contents.lastSelected= 'Text'
self.objName.Drawing_Mode.setValue(1)
self.objName.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewName (self, wherex, wherey)\n'))
self.objName.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objName)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objName.graphObject_ = new_obj
rootNode.addNode(self.objName)
self.globalAndLocalPostcondition(self.objName, rootNode)
self.globalPrecondition(rootNode)
self.objPythonRef=ButtonConfig(self)
self.objPythonRef.Contents.Text.setValue('New PythonRef')
self.objPythonRef.Contents.Image.setValue('')
self.objPythonRef.Contents.lastSelected= 'Text'
self.objPythonRef.Drawing_Mode.setValue(1)
self.objPythonRef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPythonRef (self, wherex, wherey)\n'))
self.objPythonRef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objPythonRef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPythonRef.graphObject_ = new_obj
rootNode.addNode(self.objPythonRef)
self.globalAndLocalPostcondition(self.objPythonRef, rootNode)
self.globalPrecondition(rootNode)
self.objModule=ButtonConfig(self)
self.objModule.Contents.Text.setValue('New Module')
self.objModule.Contents.Image.setValue('')
self.objModule.Contents.lastSelected= 'Text'
self.objModule.Drawing_Mode.setValue(1)
self.objModule.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewModule (self, wherex, wherey)\n'))
self.objModule.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objModule)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objModule.graphObject_ = new_obj
rootNode.addNode(self.objModule)
self.globalAndLocalPostcondition(self.objModule, rootNode)
self.globalPrecondition(rootNode)
self.objNull=ButtonConfig(self)
self.objNull.Contents.Text.setValue('New Null')
self.objNull.Contents.Image.setValue('')
self.objNull.Contents.lastSelected= 'Text'
self.objNull.Drawing_Mode.setValue(1)
self.objNull.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewNull (self, wherex, wherey)\n'))
self.objNull.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objNull)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objNull.graphObject_ = new_obj
rootNode.addNode(self.objNull)
self.globalAndLocalPostcondition(self.objNull, rootNode)
self.globalPrecondition(rootNode)
self.objTrigger_T=ButtonConfig(self)
self.objTrigger_T.Contents.Text.setValue('New Trigger_T')
self.objTrigger_T.Contents.Image.setValue('')
self.objTrigger_T.Contents.lastSelected= 'Text'
self.objTrigger_T.Drawing_Mode.setValue(1)
self.objTrigger_T.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewTrigger_T (self, wherex, wherey)\n'))
self.objTrigger_T.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objTrigger_T)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objTrigger_T.graphObject_ = new_obj
rootNode.addNode(self.objTrigger_T)
self.globalAndLocalPostcondition(self.objTrigger_T, rootNode)
self.globalPrecondition(rootNode)
self.objListen=ButtonConfig(self)
self.objListen.Contents.Text.setValue('New Listen')
self.objListen.Contents.Image.setValue('')
self.objListen.Contents.lastSelected= 'Text'
self.objListen.Drawing_Mode.setValue(1)
self.objListen.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewListen (self, wherex, wherey)\n'))
self.objListen.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objListen)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objListen.graphObject_ = new_obj
rootNode.addNode(self.objListen)
self.globalAndLocalPostcondition(self.objListen, rootNode)
self.globalPrecondition(rootNode)
self.objConditionBranch=ButtonConfig(self)
self.objConditionBranch.Contents.Text.setValue('New ConditionBranch')
self.objConditionBranch.Contents.Image.setValue('')
self.objConditionBranch.Contents.lastSelected= 'Text'
self.objConditionBranch.Drawing_Mode.setValue(1)
self.objConditionBranch.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewConditionBranch (self, wherex, wherey)\n'))
self.objConditionBranch.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objConditionBranch)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objConditionBranch.graphObject_ = new_obj
rootNode.addNode(self.objConditionBranch)
self.globalAndLocalPostcondition(self.objConditionBranch, rootNode)
self.globalPrecondition(rootNode)
self.objListenBranch=ButtonConfig(self)
self.objListenBranch.Contents.Text.setValue('New ListenBranch')
self.objListenBranch.Contents.Image.setValue('')
self.objListenBranch.Contents.lastSelected= 'Text'
self.objListenBranch.Drawing_Mode.setValue(1)
self.objListenBranch.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewListenBranch (self, wherex, wherey)\n'))
self.objListenBranch.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objListenBranch)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objListenBranch.graphObject_ = new_obj
rootNode.addNode(self.objListenBranch)
self.globalAndLocalPostcondition(self.objListenBranch, rootNode)
self.globalPrecondition(rootNode)
self.objSite=ButtonConfig(self)
self.objSite.Contents.Text.setValue('New Site')
self.objSite.Contents.Image.setValue('')
self.objSite.Contents.lastSelected= 'Text'
self.objSite.Drawing_Mode.setValue(1)
self.objSite.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSite (self, wherex, wherey)\n'))
self.objSite.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objSite)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSite.graphObject_ = new_obj
rootNode.addNode(self.objSite)
self.globalAndLocalPostcondition(self.objSite, rootNode)
self.globalPrecondition(rootNode)
self.objModel_T=ButtonConfig(self)
self.objModel_T.Contents.Text.setValue('New Model_T')
self.objModel_T.Contents.Image.setValue('')
self.objModel_T.Contents.lastSelected= 'Text'
self.objModel_T.Drawing_Mode.setValue(1)
self.objModel_T.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewModel_T (self, wherex, wherey)\n'))
self.objModel_T.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objModel_T)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objModel_T.graphObject_ = new_obj
rootNode.addNode(self.objModel_T)
self.globalAndLocalPostcondition(self.objModel_T, rootNode)
self.globalPrecondition(rootNode)
self.objMatchCase=ButtonConfig(self)
self.objMatchCase.Contents.Text.setValue('New MatchCase')
self.objMatchCase.Contents.Image.setValue('')
self.objMatchCase.Contents.lastSelected= 'Text'
self.objMatchCase.Drawing_Mode.setValue(1)
self.objMatchCase.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewMatchCase (self, wherex, wherey)\n'))
self.objMatchCase.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objMatchCase)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objMatchCase.graphObject_ = new_obj
rootNode.addNode(self.objMatchCase)
self.globalAndLocalPostcondition(self.objMatchCase, rootNode)
self.globalPrecondition(rootNode)
self.objCondition=ButtonConfig(self)
self.objCondition.Contents.Text.setValue('New Condition')
self.objCondition.Contents.Image.setValue('')
self.objCondition.Contents.lastSelected= 'Text'
self.objCondition.Drawing_Mode.setValue(1)
self.objCondition.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewCondition (self, wherex, wherey)\n'))
self.objCondition.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objCondition)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objCondition.graphObject_ = new_obj
rootNode.addNode(self.objCondition)
self.globalAndLocalPostcondition(self.objCondition, rootNode)
self.globalPrecondition(rootNode)
self.objNew=ButtonConfig(self)
self.objNew.Contents.Text.setValue('New New')
self.objNew.Contents.Image.setValue('')
self.objNew.Contents.lastSelected= 'Text'
self.objNew.Drawing_Mode.setValue(1)
self.objNew.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewNew (self, wherex, wherey)\n'))
self.objNew.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objNew)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objNew.graphObject_ = new_obj
rootNode.addNode(self.objNew)
self.globalAndLocalPostcondition(self.objNew, rootNode)
self.globalPrecondition(rootNode)
self.objDelay=ButtonConfig(self)
self.objDelay.Contents.Text.setValue('New Delay')
self.objDelay.Contents.Image.setValue('')
self.objDelay.Contents.lastSelected= 'Text'
self.objDelay.Drawing_Mode.setValue(1)
self.objDelay.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewDelay (self, wherex, wherey)\n'))
self.objDelay.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objDelay)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objDelay.graphObject_ = new_obj
rootNode.addNode(self.objDelay)
self.globalAndLocalPostcondition(self.objDelay, rootNode)
self.globalPrecondition(rootNode)
self.objPar=ButtonConfig(self)
self.objPar.Contents.Text.setValue('New Par')
self.objPar.Contents.Image.setValue('')
self.objPar.Contents.lastSelected= 'Text'
self.objPar.Drawing_Mode.setValue(1)
self.objPar.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPar (self, wherex, wherey)\n'))
self.objPar.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objPar)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPar.graphObject_ = new_obj
rootNode.addNode(self.objPar)
self.globalAndLocalPostcondition(self.objPar, rootNode)
self.globalPrecondition(rootNode)
self.objParIndexed=ButtonConfig(self)
self.objParIndexed.Contents.Text.setValue('New ParIndexed')
self.objParIndexed.Contents.Image.setValue('')
self.objParIndexed.Contents.lastSelected= 'Text'
self.objParIndexed.Drawing_Mode.setValue(1)
self.objParIndexed.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewParIndexed (self, wherex, wherey)\n'))
self.objParIndexed.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objParIndexed)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objParIndexed.graphObject_ = new_obj
rootNode.addNode(self.objParIndexed)
self.globalAndLocalPostcondition(self.objParIndexed, rootNode)
self.globalPrecondition(rootNode)
self.objInst=ButtonConfig(self)
self.objInst.Contents.Text.setValue('New Inst')
self.objInst.Contents.Image.setValue('')
self.objInst.Contents.lastSelected= 'Text'
self.objInst.Drawing_Mode.setValue(1)
self.objInst.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewInst (self, wherex, wherey)\n'))
self.objInst.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objInst)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objInst.graphObject_ = new_obj
rootNode.addNode(self.objInst)
self.globalAndLocalPostcondition(self.objInst, rootNode)
self.globalPrecondition(rootNode)
self.objLocalDef=ButtonConfig(self)
self.objLocalDef.Contents.Text.setValue('New LocalDef')
self.objLocalDef.Contents.Image.setValue('')
self.objLocalDef.Contents.lastSelected= 'Text'
self.objLocalDef.Drawing_Mode.setValue(1)
self.objLocalDef.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewLocalDef (self, wherex, wherey)\n'))
self.objLocalDef.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objLocalDef)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objLocalDef.graphObject_ = new_obj
rootNode.addNode(self.objLocalDef)
self.globalAndLocalPostcondition(self.objLocalDef, rootNode)
self.globalPrecondition(rootNode)
self.objSeq=ButtonConfig(self)
self.objSeq.Contents.Text.setValue('New Seq')
self.objSeq.Contents.Image.setValue('')
self.objSeq.Contents.lastSelected= 'Text'
self.objSeq.Drawing_Mode.setValue(1)
self.objSeq.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSeq (self, wherex, wherey)\n'))
self.objSeq.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objSeq)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSeq.graphObject_ = new_obj
rootNode.addNode(self.objSeq)
self.globalAndLocalPostcondition(self.objSeq, rootNode)
self.globalPrecondition(rootNode)
self.objConditionSet=ButtonConfig(self)
self.objConditionSet.Contents.Text.setValue('New ConditionSet')
self.objConditionSet.Contents.Image.setValue('')
self.objConditionSet.Contents.lastSelected= 'Text'
self.objConditionSet.Drawing_Mode.setValue(1)
self.objConditionSet.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewConditionSet (self, wherex, wherey)\n'))
self.objConditionSet.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objConditionSet)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objConditionSet.graphObject_ = new_obj
rootNode.addNode(self.objConditionSet)
self.globalAndLocalPostcondition(self.objConditionSet, rootNode)
self.globalPrecondition(rootNode)
self.objMatch=ButtonConfig(self)
self.objMatch.Contents.Text.setValue('New Match')
self.objMatch.Contents.Image.setValue('')
self.objMatch.Contents.lastSelected= 'Text'
self.objMatch.Drawing_Mode.setValue(1)
self.objMatch.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewMatch (self, wherex, wherey)\n'))
self.objMatch.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objMatch)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objMatch.graphObject_ = new_obj
rootNode.addNode(self.objMatch)
self.globalAndLocalPostcondition(self.objMatch, rootNode)
self.globalPrecondition(rootNode)
self.objPrint=ButtonConfig(self)
self.objPrint.Contents.Text.setValue('New Print')
self.objPrint.Contents.Image.setValue('')
self.objPrint.Contents.lastSelected= 'Text'
self.objPrint.Drawing_Mode.setValue(1)
self.objPrint.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewPrint (self, wherex, wherey)\n'))
self.objPrint.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objPrint)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objPrint.graphObject_ = new_obj
rootNode.addNode(self.objPrint)
self.globalAndLocalPostcondition(self.objPrint, rootNode)
self.globalPrecondition(rootNode)
self.objAttribute=ButtonConfig(self)
self.objAttribute.Contents.Text.setValue('New Attribute')
self.objAttribute.Contents.Image.setValue('')
self.objAttribute.Contents.lastSelected= 'Text'
self.objAttribute.Drawing_Mode.setValue(1)
self.objAttribute.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewAttribute (self, wherex, wherey)\n'))
self.objAttribute.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objAttribute)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objAttribute.graphObject_ = new_obj
rootNode.addNode(self.objAttribute)
self.globalAndLocalPostcondition(self.objAttribute, rootNode)
self.globalPrecondition(rootNode)
self.objExpression=ButtonConfig(self)
self.objExpression.Contents.Text.setValue('New Expression')
self.objExpression.Contents.Image.setValue('')
self.objExpression.Contents.lastSelected= 'Text'
self.objExpression.Drawing_Mode.setValue(1)
self.objExpression.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewExpression (self, wherex, wherey)\n'))
self.objExpression.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objExpression)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objExpression.graphObject_ = new_obj
rootNode.addNode(self.objExpression)
self.globalAndLocalPostcondition(self.objExpression, rootNode)
self.globalPrecondition(rootNode)
self.objEquation=ButtonConfig(self)
self.objEquation.Contents.Text.setValue('New Equation')
self.objEquation.Contents.Image.setValue('')
self.objEquation.Contents.lastSelected= 'Text'
self.objEquation.Drawing_Mode.setValue(1)
self.objEquation.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewEquation (self, wherex, wherey)\n'))
self.objEquation.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objEquation)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objEquation.graphObject_ = new_obj
rootNode.addNode(self.objEquation)
self.globalAndLocalPostcondition(self.objEquation, rootNode)
self.globalPrecondition(rootNode)
self.objOperation=ButtonConfig(self)
self.objOperation.Contents.Text.setValue('New Operation')
self.objOperation.Contents.Image.setValue('')
self.objOperation.Contents.lastSelected= 'Text'
self.objOperation.Drawing_Mode.setValue(1)
self.objOperation.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewOperation (self, wherex, wherey)\n'))
self.objOperation.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objOperation)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objOperation.graphObject_ = new_obj
rootNode.addNode(self.objOperation)
self.globalAndLocalPostcondition(self.objOperation, rootNode)
self.globalPrecondition(rootNode)
self.objAdd=ButtonConfig(self)
self.objAdd.Contents.Text.setValue('New Add')
self.objAdd.Contents.Image.setValue('')
self.objAdd.Contents.lastSelected= 'Text'
self.objAdd.Drawing_Mode.setValue(1)
self.objAdd.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewAdd (self, wherex, wherey)\n'))
self.objAdd.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objAdd)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objAdd.graphObject_ = new_obj
rootNode.addNode(self.objAdd)
self.globalAndLocalPostcondition(self.objAdd, rootNode)
self.globalPrecondition(rootNode)
self.objSubtract=ButtonConfig(self)
self.objSubtract.Contents.Text.setValue('New Subtract')
self.objSubtract.Contents.Image.setValue('')
self.objSubtract.Contents.lastSelected= 'Text'
self.objSubtract.Drawing_Mode.setValue(1)
self.objSubtract.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewSubtract (self, wherex, wherey)\n'))
self.objSubtract.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(135, 80,self.objSubtract)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objSubtract.graphObject_ = new_obj
rootNode.addNode(self.objSubtract)
self.globalAndLocalPostcondition(self.objSubtract, rootNode)
self.globalPrecondition(rootNode)
self.objConcat=ButtonConfig(self)
self.objConcat.Contents.Text.setValue('New Concat')
self.objConcat.Contents.Image.setValue('')
self.objConcat.Contents.lastSelected= 'Text'
self.objConcat.Drawing_Mode.setValue(1)
self.objConcat.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewConcat (self, wherex, wherey)\n'))
self.objConcat.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(260, 150,self.objConcat)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objConcat.graphObject_ = new_obj
rootNode.addNode(self.objConcat)
self.globalAndLocalPostcondition(self.objConcat, rootNode)
self.globalPrecondition(rootNode)
self.objConstant=ButtonConfig(self)
self.objConstant.Contents.Text.setValue('New Constant')
self.objConstant.Contents.Image.setValue('')
self.objConstant.Contents.lastSelected= 'Text'
self.objConstant.Drawing_Mode.setValue(1)
self.objConstant.Action.setValue(('ActionButton1', (['Python', 'OCL'], 1), (['PREcondition', 'POSTcondition'], 1),(['EDIT', 'SAVE', 'CREATE', 'CONNECT', 'DELETE', 'DISCONNECT', 'TRANSFORM', 'SELECT', 'DRAG', 'DROP', 'MOVE OBJECT'], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]), '# This method has as parameters:\n# - wherex : X Position in window coordinates where the user clicked.\n# - wherey : Y Position in window coordinates where the user clicked.\nnewPlace = self.createNewConstant (self, wherex, wherey)\n'))
self.objConstant.graphClass_= graph_ButtonConfig
if self.genGraphics:
from graph_ButtonConfig import *
new_obj = graph_ButtonConfig(10, 10,self.objConstant)
new_obj.DrawObject(self.UMLmodel)
self.UMLmodel.addtag_withtag('ButtonConfig', new_obj.tag)
else: new_obj = None
self.objConstant.graphObject_ = new_obj
rootNode.addNode(self.objConstant)
self.globalAndLocalPostcondition(self.objConstant, rootNode)
newfunction = UMLRT2Kiltera_MM_META
loadedMMName = 'Buttons'
atom3version = '0.3'
| 67.938614 | 535 | 0.72668 |
4a1e246ab4a0d349788cbcc9523f979b2f56fee4 | 1,263 | py | Python | foundation_tenant/models/bizmula/documenttype.py | smegurus/smegurus-django | 053973b5ff0b997c52bfaca8daf8e07db64a877c | [
"BSD-4-Clause"
] | 1 | 2020-07-16T10:58:23.000Z | 2020-07-16T10:58:23.000Z | foundation_tenant/models/bizmula/documenttype.py | smegurus/smegurus-django | 053973b5ff0b997c52bfaca8daf8e07db64a877c | [
"BSD-4-Clause"
] | 13 | 2018-11-30T02:29:39.000Z | 2022-03-11T23:35:49.000Z | foundation_tenant/models/bizmula/documenttype.py | smegurus/smegurus-django | 053973b5ff0b997c52bfaca8daf8e07db64a877c | [
"BSD-4-Clause"
] | null | null | null | from django.db import models
from django.utils.translation import ugettext_lazy as _
class DocumentTypeManager(models.Manager):
def delete_all(self):
items = DocumentType.objects.all()
for item in items.all():
item.delete()
class DocumentType(models.Model):
"""
The supported document types that our system can support.
"""
class Meta:
app_label = 'foundation_tenant'
db_table = 'smeg_document_types'
verbose_name = _('Document Type')
verbose_name_plural = _('Document Types')
objects = DocumentTypeManager()
text = models.CharField(
_("Text"),
max_length=127,
help_text=_('The name of this Document Type.'),
blank=True,
null=True,
)
is_master = models.BooleanField( # CONTROLLED BY EMPLOYEES ONLY
_("Is Master"),
default=False,
help_text=_('Variable controls whether the Module this Document belongs to is the master document of the Module.'),
)
stage_num = models.PositiveSmallIntegerField(
_("Stage Number"),
help_text=_('Track what stage this Document belongs to.'),
default=1,
db_index=True,
)
def __str__(self):
return str(self.text)
| 28.704545 | 123 | 0.637371 |
4a1e246c376ad4013fc615680355ec0bfa07f343 | 1,805 | py | Python | abstractive_summary/AbstractiveSummary.py | oikeusministerio/summarization | 5f20ff2753b0889827b29bc1ef5bf33748c20287 | [
"MIT"
] | 4 | 2018-06-21T06:52:28.000Z | 2022-01-18T01:01:10.000Z | abstractive_summary/AbstractiveSummary.py | oikeusministerio/summarization | 5f20ff2753b0889827b29bc1ef5bf33748c20287 | [
"MIT"
] | 2 | 2018-06-08T07:59:06.000Z | 2021-03-31T18:42:11.000Z | abstractive_summary/AbstractiveSummary.py | oikeusministerio/summarization | 5f20ff2753b0889827b29bc1ef5bf33748c20287 | [
"MIT"
] | null | null | null | import pandas as pd
import numpy as np
import tensorflow as tf
class AbstractiveSummary:
def __init__(self, dictionary):
self.batch_size = 16
self.checkpoint = "./abstractive_summary/best_model.ckpt"
self.dictionary = dictionary
self.reverse_dictionary = dict(zip(range(len(dictionary.keys())),dictionary.keys()))
def summarize(self, text, input_sequence, length):
loaded_graph = tf.Graph()
with tf.Session(graph=loaded_graph) as sess:
# Load saved model
loader = tf.train.import_meta_graph(self.checkpoint + '.meta')
loader.restore(sess, self.checkpoint)
input_data = loaded_graph.get_tensor_by_name('input:0')
logits = loaded_graph.get_tensor_by_name('predictions:0')
text_length = loaded_graph.get_tensor_by_name('text_length:0')
summary_length = loaded_graph.get_tensor_by_name('summary_length:0')
keep_prob = loaded_graph.get_tensor_by_name('keep_prob:0')
#Multiply by batch_size to match the model's input parameters
answer_logits = sess.run(logits, {input_data: [input_sequence]*self.batch_size,
summary_length: [np.random.randint(length,length + 3)],
text_length: [len(input_sequence)]*self.batch_size,
keep_prob: 1.0})[0]
# Remove the padding from the tweet
pad = self.dictionary["<PAD>"]
print('Original Text:', text)
print('\nSummary')
print(' Word Ids: {}'.format([i for i in answer_logits if i != pad]))
print(' Response Words: {}'.format(" ".join([self.reverse_dictionary[i] for i in answer_logits if i != pad]))) | 46.282051 | 119 | 0.613296 |
4a1e253e96f65c2df9a4d35139edaba9080de1b9 | 4,151 | py | Python | gpflow/training/scipy_optimizer.py | NuhaSaud/GPflow | 763fd2d3d2f4664575a0e4b63584574f1d5ef7cb | [
"Apache-2.0"
] | null | null | null | gpflow/training/scipy_optimizer.py | NuhaSaud/GPflow | 763fd2d3d2f4664575a0e4b63584574f1d5ef7cb | [
"Apache-2.0"
] | null | null | null | gpflow/training/scipy_optimizer.py | NuhaSaud/GPflow | 763fd2d3d2f4664575a0e4b63584574f1d5ef7cb | [
"Apache-2.0"
] | 1 | 2020-12-15T09:56:33.000Z | 2020-12-15T09:56:33.000Z | # Copyright 2017 Artem Artemev @awav
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..core.errors import GPflowError
from ..core.compilable import Build
from ..models.model import Model
from . import optimizer
from . import external_optimizer
class ScipyOptimizer(optimizer.Optimizer):
def __init__(self, **kwargs):
self._optimizer_kwargs = kwargs
self._optimizer = None
self._model = None
def make_optimize_tensor(self, model, session=None, var_list=None, **kwargs):
"""
Make SciPy optimization tensor.
The `make_optimize_tensor` method builds optimization tensor and initializes
all necessary variables created by optimizer.
:param model: GPflow model.
:param session: Tensorflow session.
:param var_list: List of variables for training.
:param kwargs: Scipy optional optimization parameters,
- `maxiter`, maximal number of iterations to perform.
- `disp`, if True, prints convergence messages.
:return: Tensorflow operation.
"""
session = model.enquire_session(session)
with session.as_default():
var_list = self._gen_var_list(model, var_list)
optimizer_kwargs = self._optimizer_kwargs.copy()
options = optimizer_kwargs.get('options', {})
options.update(kwargs)
optimizer_kwargs.update(dict(options=options))
objective = model.objective
optimizer = external_optimizer.ScipyOptimizerInterface(
objective, var_list=var_list, **optimizer_kwargs)
model.initialize(session=session)
return optimizer
def minimize(self, model, session=None, var_list=None, feed_dict=None,
maxiter=1000, disp=False, initialize=False, anchor=True, **kwargs):
"""
Minimizes objective function of the model.
:param model: GPflow model with objective tensor.
:param session: Session where optimization will be run.
:param var_list: List of extra variables which should be trained during optimization.
:param feed_dict: Feed dictionary of tensors passed to session run method.
:param maxiter: Number of run interation. Note: scipy optimizer can do early stopping
if model converged.
:param disp: ScipyOptimizer option. Set to True to print convergence messages.
:param initialize: If `True` model parameters will be re-initialized even if they were
initialized before for gotten session.
:param anchor: If `True` trained parameters computed during optimization at
particular session will be synchronized with internal parameter values.
:param kwargs: This is a dictionary of extra parameters for session run method.
"""
if model is None or not isinstance(model, Model):
raise ValueError('Unknown type passed for optimization.')
if model.is_built_coherence() is Build.NO:
raise GPflowError('Model is not built.')
session = model.enquire_session(session)
self._model = model
optimizer = self.make_optimize_tensor(model, session,
var_list=var_list, maxiter=maxiter, disp=disp)
self._optimizer = optimizer
feed_dict = self._gen_feed_dict(model, feed_dict)
optimizer.minimize(session=session, feed_dict=feed_dict, **kwargs)
if anchor:
model.anchor(session)
@property
def model(self):
return self._model
@property
def optimizer(self):
return self._optimizer
| 42.357143 | 94 | 0.67815 |
4a1e25607aa5f8c0cec6726875a2d6ec1192ba82 | 2,423 | py | Python | examples/Python/ReconstructionSystem/integrate_scene.py | martinruenz/Open3D | 30983e89956dcd233531870ca20e87e6769ba903 | [
"MIT"
] | 3 | 2018-04-24T21:17:24.000Z | 2018-06-02T12:44:06.000Z | examples/Python/ReconstructionSystem/integrate_scene.py | martinruenz/Open3D | 30983e89956dcd233531870ca20e87e6769ba903 | [
"MIT"
] | null | null | null | examples/Python/ReconstructionSystem/integrate_scene.py | martinruenz/Open3D | 30983e89956dcd233531870ca20e87e6769ba903 | [
"MIT"
] | 1 | 2020-03-31T14:30:40.000Z | 2020-03-31T14:30:40.000Z | # Open3D: www.open3d.org
# The MIT License (MIT)
# See license file or visit www.open3d.org for details
# examples/Python/ReconstructionSystem/integrate_scene.py
import numpy as np
import math
import sys
from open3d import *
sys.path.append("../Utility")
from file import *
sys.path.append(".")
from make_fragments import *
def scalable_integrate_rgb_frames(path_dataset, intrinsic, config):
[color_files, depth_files] = get_rgbd_file_lists(path_dataset)
n_files = len(color_files)
n_fragments = int(math.ceil(float(n_files) / \
config['n_frames_per_fragment']))
volume = ScalableTSDFVolume(voxel_length = config["tsdf_cubic_size"]/512.0,
sdf_trunc = 0.04, color_type = TSDFVolumeColorType.RGB8)
pose_graph_fragment = read_pose_graph(join(
path_dataset, config["template_refined_posegraph_optimized"]))
for fragment_id in range(len(pose_graph_fragment.nodes)):
pose_graph_rgbd = read_pose_graph(join(path_dataset,
config["template_fragment_posegraph_optimized"] % fragment_id))
for frame_id in range(len(pose_graph_rgbd.nodes)):
frame_id_abs = fragment_id * \
config['n_frames_per_fragment'] + frame_id
print("Fragment %03d / %03d :: integrate rgbd frame %d (%d of %d)."
% (fragment_id, n_fragments-1, frame_id_abs, frame_id+1,
len(pose_graph_rgbd.nodes)))
rgbd = read_rgbd_image(color_files[frame_id_abs],
depth_files[frame_id_abs], False, config)
pose = np.dot(pose_graph_fragment.nodes[fragment_id].pose,
pose_graph_rgbd.nodes[frame_id].pose)
volume.integrate(rgbd, intrinsic, np.linalg.inv(pose))
mesh = volume.extract_triangle_mesh()
mesh.compute_vertex_normals()
if config["debug_mode"]:
draw_geometries([mesh])
mesh_name = join(path_dataset, config["template_global_mesh"])
write_triangle_mesh(mesh_name, mesh, False, True)
def run(config):
print("integrate the whole RGBD sequence using estimated camera pose.")
if config["path_intrinsic"]:
intrinsic = read_pinhole_camera_intrinsic(config["path_intrinsic"])
else:
intrinsic = PinholeCameraIntrinsic(
PinholeCameraIntrinsicParameters.PrimeSenseDefault)
scalable_integrate_rgb_frames(config["path_dataset"], intrinsic, config)
| 39.721311 | 79 | 0.69253 |
4a1e257e2a6774827cd953c3fdf69c833af22049 | 27,473 | py | Python | perma_web/perma/views/common.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | null | null | null | perma_web/perma/views/common.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | null | null | null | perma_web/perma/views/common.py | rachelaus/perma | 36c05080520ea3ffce465dbc383795c060fa4112 | [
"MIT",
"Unlicense"
] | null | null | null | from ratelimit.decorators import ratelimit
from datetime import timedelta
from dateutil.tz import tzutc
from io import StringIO
from link_header import Link as Rel, LinkHeader
from urllib.parse import urlencode
import time
from timegate.utils import closest
from warcio.timeutils import datetime_to_http_date
from werkzeug.http import parse_date
from django.forms import widgets
from django.shortcuts import render, get_object_or_404, redirect
from django.http import (HttpResponse, HttpResponseRedirect, HttpResponsePermanentRedirect,
JsonResponse, HttpResponseNotFound, HttpResponseBadRequest)
from django.urls import reverse, NoReverseMatch
from django.conf import settings
from django.core.files.storage import default_storage
from django.utils import timezone
from django.views.generic import TemplateView
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.cache import cache_control
from django.views.decorators.clickjacking import xframe_options_exempt
from django.utils.six.moves.http_client import responses
from perma.wsgi_utils import retry_on_exception
from ..models import Link, Registrar, Organization, LinkUser
from ..forms import ContactForm
from ..utils import (if_anonymous, ratelimit_ip_key, redirect_to_download,
protocol, stream_warc_if_permissible, set_options_headers,
timemap_url, timegate_url, memento_url, memento_data_for_url, url_with_qs_and_hash,
get_client_ip, remove_control_characters)
from ..email import send_admin_email, send_user_email_copy_admins
import logging
logger = logging.getLogger(__name__)
valid_serve_types = ['image', 'warc_download']
class DirectTemplateView(TemplateView):
extra_context = None
def get_context_data(self, **kwargs):
""" Override Django's TemplateView to allow passing in extra_context. """
context = super(self.__class__, self).get_context_data(**kwargs)
if self.extra_context is not None:
for key, value in self.extra_context.items():
if callable(value):
context[key] = value()
else:
context[key] = value
return context
def landing(request):
"""
The landing page
"""
if request.user.is_authenticated and request.get_host() not in request.META.get('HTTP_REFERER',''):
return HttpResponseRedirect(reverse('create_link'))
else:
# orgs_count = Organization.objects.count()
# users_count = LinkUser.objects.count()
# links_count = Link.objects.filter(is_private=False).count()
return render(request, 'landing.html', {
'this_page': 'landing',
# 'orgs_count': orgs_count, 'users_count': users_count, 'links_count': links_count,
})
def about(request):
"""
The about page
"""
partners = sorted(Registrar.objects.filter(show_partner_status=True), key=lambda r: r.partner_display_name or r.name)
halfway_point = int(len(partners)/2)
# sending two sets of arrays so that we can separate them
# into two columns alphabetically, the right way
partners_first_col = partners[:halfway_point] if len(partners) > 0 else []
partners_last_col = partners[halfway_point:] if len(partners) > 0 else []
return render(request, 'about.html', {
'partners': partners,
'partners_first_col': partners_first_col,
'partners_last_col': partners_last_col
})
def faq(request):
"""
The faq page
"""
registrars_count = Registrar.objects.approved().count()
orgs_count = Organization.objects.all().count()
users_count = LinkUser.objects.all().count()
links_count = Link.objects.filter(is_private=False).count()
return render(request, 'docs/faq.html', {'registrars_count': registrars_count,
'orgs_count': orgs_count, 'users_count': users_count, 'links_count': links_count,})
def stats(request):
"""
The global stats
"""
return render(request, 'stats.html')
@if_anonymous(cache_control(max_age=settings.CACHE_MAX_AGES['single_permalink']))
@ratelimit(rate=settings.MINUTE_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.HOUR_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.DAY_LIMIT, block=True, key=ratelimit_ip_key)
def single_permalink(request, guid):
"""
Given a Perma ID, serve it up.
"""
raw_user_agent = request.META.get('HTTP_USER_AGENT', '')
# Create a canonical version of guid (non-alphanumerics removed, hyphens every 4 characters, uppercase),
# and forward to that if it's different from current guid.
canonical_guid = Link.get_canonical_guid(guid)
# We only do the redirect if the correctly-formatted GUID actually exists --
# this prevents actual 404s from redirecting with weird formatting.
link = get_object_or_404(Link.objects.all_with_deleted(), guid=canonical_guid)
if canonical_guid != guid:
return HttpResponsePermanentRedirect(reverse('single_permalink', args=[canonical_guid]))
# Forward to replacement link if replacement_link is set.
if link.replacement_link_id:
return HttpResponseRedirect(reverse('single_permalink', args=[link.replacement_link_id]))
# If we get an unrecognized archive type (which could be an old type like 'live' or 'pdf'), forward to default version
serve_type = request.GET.get('type')
if serve_type is None:
serve_type = 'source'
elif serve_type not in valid_serve_types:
return HttpResponsePermanentRedirect(reverse('single_permalink', args=[canonical_guid]))
# serve raw WARC
if serve_type == 'warc_download':
return stream_warc_if_permissible(link, request.user)
# handle requested capture type
if serve_type == 'image':
capture = link.screenshot_capture
# not all Perma Links have screenshots; if no screenshot is present,
# forward to primary capture for playback or for appropriate error message
if (not capture or capture.status != 'success') and link.primary_capture:
return HttpResponseRedirect(reverse('single_permalink', args=[guid]))
else:
capture = link.primary_capture
# if primary capture did not work, but screenshot did work, forward to screenshot
if (not capture or capture.status != 'success') and link.screenshot_capture and link.screenshot_capture.status == 'success':
return HttpResponseRedirect(reverse('single_permalink', args=[guid])+"?type=image")
try:
capture_mime_type = capture.mime_type()
except AttributeError:
# If capture is deleted, then mime type does not exist. Catch error.
capture_mime_type = None
# Special handling for mobile pdf viewing because it can be buggy
# Redirecting to a download page if on mobile
redirect_to_download_view = redirect_to_download(capture_mime_type, raw_user_agent)
# If this record was just created by the current user, we want to do some special-handling:
# for instance, show them a message in the template, and give the playback extra time to initialize
new_record = request.user.is_authenticated and link.created_by_id == request.user.id and not link.user_deleted \
and link.creation_timestamp > timezone.now() - timedelta(seconds=300)
# Provide the max upload size, in case the upload form is used
max_size = settings.MAX_ARCHIVE_FILE_SIZE / 1024 / 1024
if not link.submitted_description:
link.submitted_description = "This is an archive of %s from %s" % (link.submitted_url, link.creation_timestamp.strftime("%A %d, %B %Y"))
logger.info(f"Preparing context for {link.guid}")
context = {
'link': link,
'redirect_to_download_view': redirect_to_download_view,
'mime_type': capture_mime_type,
'can_view': request.user.can_view(link),
'can_edit': request.user.can_edit(link),
'can_delete': request.user.can_delete(link),
'can_toggle_private': request.user.can_toggle_private(link),
'capture': capture,
'serve_type': serve_type,
'new_record': new_record,
'this_page': 'single_link',
'max_size': max_size,
'link_url': settings.HOST + '/' + link.guid,
'protocol': protocol(),
}
if context['can_view'] and link.can_play_back():
if new_record:
logger.info(f"Ensuring warc for {link.guid} has finished uploading.")
def assert_exists(filename):
assert default_storage.exists(filename)
try:
retry_on_exception(assert_exists, args=[link.warc_storage_file()], exception=AssertionError, attempts=settings.WARC_AVAILABLE_RETRIES)
except AssertionError:
logger.error(f"Made {settings.WARC_AVAILABLE_RETRIES} attempts to get {link.guid}'s warc; still not available.")
# Let's consider this a HTTP 200, I think...
return render(request, 'archive/playback-delayed.html', context, status=200)
context['client_side_playback'] = request.GET.get('client-side') if (
request.GET.get('client-side') and
settings.OFFER_CLIENT_SIDE_PLAYBACK and
not request.user.is_anonymous and
request.user.offer_client_side_playback
) else ''
if context['client_side_playback']:
logger.info(f'Using client-side playback for {link.guid}')
else:
# Play back using Webrecorder
try:
logger.info(f"Initializing play back of {link.guid}")
wr_username = link.init_replay_for_user(request)
except Exception: # noqa
# We are experiencing many varieties of transient flakiness in playback:
# second attempts, triggered by refreshing the page, almost always seem to work.
# While we debug... let's give playback a second try here, and see if this
# noticeably improves user experience.
logger.exception(f"First attempt to init replay of {link.guid} failed. (Retrying: observe whether this error recurs.)")
time.sleep(settings.WR_PLAYBACK_RETRY_AFTER)
logger.info(f"Initializing play back of {link.guid} (2nd try)")
wr_username = link.init_replay_for_user(request)
logger.info(f"Updating context with WR playback information for {link.guid}")
context.update({
'wr_host': settings.PLAYBACK_HOST,
'wr_prefix': link.wr_iframe_prefix(wr_username),
'wr_url': capture.url,
'wr_timestamp': link.creation_timestamp.strftime('%Y%m%d%H%M%S'),
})
logger.info(f"Rendering template for {link.guid}")
response = render(request, 'archive/single-link.html', context)
# Adjust status code
if link.user_deleted:
response.status_code = 410
elif not context['can_view'] and link.is_private:
response.status_code = 403
# Add memento headers, when appropriate
logger.info(f"Deciding whether to include memento headers for {link.guid}")
if link.is_visible_to_memento():
logger.info(f"Including memento headers for {link.guid}")
response['Memento-Datetime'] = datetime_to_http_date(link.creation_timestamp)
# impose an arbitrary length-limit on the submitted URL, so that this header doesn't become illegally large
url = link.submitted_url[:500]
# strip control characters from url, if somehow they slipped in prior to https://github.com/harvard-lil/perma/commit/272b3a79d94a795142940281c9444b45c24a05db
url = remove_control_characters(url)
response['Link'] = str(
LinkHeader([
Rel(url, rel='original'),
Rel(timegate_url(request, url), rel='timegate'),
Rel(timemap_url(request, url, 'link'), rel='timemap', type='application/link-format'),
Rel(timemap_url(request, url, 'json'), rel='timemap', type='application/json'),
Rel(timemap_url(request, url, 'html'), rel='timemap', type='text/html'),
Rel(memento_url(request, link), rel='memento', datetime=datetime_to_http_date(link.creation_timestamp)),
])
)
logger.info(f"Returning response for {link.guid}")
return response
@xframe_options_exempt
def set_iframe_session_cookie(request):
"""
The <iframe> used for Perma Link playback serves content from Webrecorder.
If the Perma Link is private, playback requires a WR session cookie.
The cookie's value is set via a WR api call during Perma's
`link.init_replay_for_user` and is stored in Perma's session data.
If the iframe requests a resource without the cookie,
WR will redirect here. This route in turn redirects back to WR with the
session cookie as a GET param. WR sets the cookie in the browser, and then,
finally, redirects to the originally requested resource.
"""
if request.method == 'OPTIONS':
# no redirects required; subsequent requests from the browser get the cookie
response = HttpResponse()
else:
cookie = urlencode({'cookie': request.session.get('wr_private_session_cookie')})
query = request.META.get('QUERY_STRING', '')
if not cookie:
user = 'Anonymous'
if request.user.is_authenticated:
user = f"User {request.user.id}"
logger.error(f'No WR cookie found in session! User: {user}. Session keys: {request.session.keys()}.')
return render(request, 'archive/archive-error.html', {
'err_url': f'_set_session?{query}',
'timestamp': timezone.now(),
'err_msg': 'Missing cookie',
})
url = protocol() + settings.PLAYBACK_HOST + f'/_set_session?{query}&{cookie}'
response = HttpResponseRedirect(url)
response['Cache-Control'] = 'no-cache'
# set CORS headers (for both OPTIONS and actual redirect)
set_options_headers(request, response)
return response
def serve_warc(request, guid):
"""
This is a redundant route for downloading a warc, for use in client-side playback,
which has specific requirements:
- the warc must be served from a URL ending in `.warc`
- the response cannot be streamed
"""
canonical_guid = Link.get_canonical_guid(guid)
link = get_object_or_404(Link.objects.all_with_deleted(), guid=canonical_guid)
return stream_warc_if_permissible(link, request.user, stream=False)
def replay_service_worker(request):
"""
The service worker required for client-side playback:
"""
return HttpResponse(f'importScripts("{ settings.SERVICE_WORKER_URL }");\n', content_type='application/x-javascript')
@if_anonymous(cache_control(max_age=settings.CACHE_MAX_AGES['timemap']))
@ratelimit(rate=settings.MINUTE_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.HOUR_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.DAY_LIMIT, block=True, key=ratelimit_ip_key)
def timemap(request, response_format, url):
url = url_with_qs_and_hash(url, request.META['QUERY_STRING'])
data = memento_data_for_url(request, url)
if data:
if response_format == 'json':
response = JsonResponse(data)
elif response_format == 'html':
response = render(request, 'memento/timemap.html', data)
else:
content_type = 'application/link-format'
file = StringIO()
file.writelines(f"{line},\n" for line in [
Rel(data['original_uri'], rel='original'),
Rel(data['timegate_uri'], rel='timegate'),
Rel(data['self'], rel='self', type='application/link-format'),
Rel(data['timemap_uri']['link_format'], rel='timemap', type='application/link-format'),
Rel(data['timemap_uri']['json_format'], rel='timemap', type='application/json'),
Rel(data['timemap_uri']['html_format'], rel='timemap', type='text/html')
] + [
Rel(memento['uri'], rel='memento', datetime=datetime_to_http_date(memento['datetime'])) for memento in data['mementos']['list']
])
file.seek(0)
response = HttpResponse(file, content_type=f'{content_type}')
else:
if response_format == 'html':
response = render(request, 'memento/timemap.html', {"original_uri": url}, status=404)
else:
response = HttpResponseNotFound('404 page not found\n')
response['X-Memento-Count'] = str(len(data['mementos']['list'])) if data else 0
return response
@if_anonymous(cache_control(max_age=settings.CACHE_MAX_AGES['timegate']))
@ratelimit(rate=settings.MINUTE_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.HOUR_LIMIT, block=True, key=ratelimit_ip_key)
@ratelimit(rate=settings.DAY_LIMIT, block=True, key=ratelimit_ip_key)
def timegate(request, url):
# impose an arbitrary length-limit on the submitted URL, so that the headers don't become illegally large
url = url_with_qs_and_hash(url, request.META['QUERY_STRING'])[:500]
data = memento_data_for_url(request, url)
if not data:
return HttpResponseNotFound('404 page not found\n')
accept_datetime = request.META.get('HTTP_ACCEPT_DATETIME')
if accept_datetime:
accept_datetime = parse_date(accept_datetime)
if not accept_datetime:
return HttpResponseBadRequest('Invalid value for Accept-Datetime.')
else:
accept_datetime = timezone.now()
accept_datetime = accept_datetime.replace(tzinfo=tzutc())
target, target_datetime = closest(map(lambda m: m.values(), data['mementos']['list']), accept_datetime)
response = redirect(target)
response['Vary'] = 'accept-datetime'
response['Link'] = str(
LinkHeader([
Rel(data['original_uri'], rel='original'),
Rel(data['timegate_uri'], rel='timegate'),
Rel(data['timemap_uri']['link_format'], rel='timemap', type='application/link-format'),
Rel(data['timemap_uri']['json_format'], rel='timemap', type='application/json'),
Rel(data['timemap_uri']['html_format'], rel='timemap', type='text/html'),
Rel(data['mementos']['first']['uri'], rel='first memento', datetime=datetime_to_http_date(data['mementos']['first']['datetime'])),
Rel(data['mementos']['last']['uri'], rel='last memento', datetime=datetime_to_http_date(data['mementos']['last']['datetime'])),
Rel(target, rel='memento', datetime=datetime_to_http_date(target_datetime)),
])
)
return response
def rate_limit(request, exception):
"""
When a user hits a rate limit, send them here.
"""
return render(request, "rate_limit.html")
@csrf_exempt
@ratelimit(rate=settings.MINUTE_LIMIT, block=True, key=ratelimit_ip_key)
def contact(request):
"""
Our contact form page
"""
def affiliation_string():
affiliation_string = ''
if request.user.is_authenticated:
if request.user.registrar:
affiliation_string = u"{} (Registrar)".format(request.user.registrar.name)
else:
affiliations = [u"{} ({})".format(org.name, org.registrar.name) for org in request.user.organizations.all().order_by('registrar')]
if affiliations:
affiliation_string = u', '.join(affiliations)
return affiliation_string
def formatted_organization_list(registrar):
organization_string = u''
if request.user.is_organization_user:
orgs = [org.name for org in request.user.organizations.filter(registrar=registrar)]
org_count = len(orgs)
if org_count > 2:
organization_string = u", ".join(orgs[:-1]) + u" and " + orgs[-1]
elif org_count == 2:
organization_string = u"{} and {}".format(orgs[0], orgs[1])
elif org_count == 1:
organization_string = orgs[0]
else:
# this should never happen, consider raising an exception
organization_string = '(error retrieving organization list)'
return organization_string
def handle_registrar_fields(form):
if request.user.is_supported_by_registrar():
registrars = set(org.registrar for org in request.user.organizations.all())
if len(registrars) > 1:
form.fields['registrar'].choices = [(registrar.id, registrar.name) for registrar in registrars]
if len(registrars) == 1:
form.fields['registrar'].widget = widgets.HiddenInput()
registrar = registrars.pop()
form.fields['registrar'].initial = registrar.id
form.fields['registrar'].choices = [(registrar.id, registrar.email)]
else:
del form.fields['registrar']
return form
if request.method == 'POST':
form = handle_registrar_fields(ContactForm(request.POST))
# Only send email if box2 is filled out and box1 is not.
# box1 is display: none, so should never be filled out except by spam bots.
if form.data.get('box1'):
user_ip = get_client_ip(request)
logger.info(f"Suppressing invalid contact email from {user_ip}: {form.data}")
return HttpResponseRedirect(reverse('contact_thanks'))
if form.is_valid():
# Assemble info for email
from_address = form.cleaned_data['email']
subject = "[perma-contact] " + form.cleaned_data['subject']
context = {
"message": form.cleaned_data['box2'],
"from_address": from_address,
"referer": form.cleaned_data['referer'],
"affiliation_string": affiliation_string()
}
if request.user.is_supported_by_registrar():
# Send to all active registar users for registrar and cc Perma
reg_id = form.cleaned_data['registrar']
context["organization_string"] = formatted_organization_list(registrar=reg_id)
send_user_email_copy_admins(
subject,
from_address,
[user.email for user in Registrar.objects.get(id=reg_id).active_registrar_users()],
request,
'email/registrar_contact.txt',
context
)
# redirect to a new URL:
return HttpResponseRedirect(
reverse('contact_thanks') + "?{}".format(urlencode({'registrar': reg_id}))
)
else:
# Send only to the admins
send_admin_email(
subject,
from_address,
request,
'email/admin/contact.txt',
context
)
# redirect to a new URL:
return HttpResponseRedirect(reverse('contact_thanks'))
else:
return render(request, 'contact.html', {'form': form})
else:
# Our contact form serves a couple of purposes
# If we get a message parameter, we're getting a message from the create form
# about a failed archive
#
# If we get a flagged parameter, we're getting the guid of an archive from the
# Flag as inappropriate button on an archive page
#
# We likely want to clean up this contact for logic if we tack much else on
subject = request.GET.get('subject', '')
message = request.GET.get('message', '')
upgrade = request.GET.get('upgrade', '')
if upgrade == 'organization' :
subject = 'Upgrade to Unlimited Account'
message = "My organization is interested in a subscription to Perma.cc."
else:
# all other values of `upgrade` are disallowed
upgrade = None
flagged_archive_guid = request.GET.get('flag', '')
if flagged_archive_guid:
subject = 'Reporting Inappropriate Content'
message = 'http://perma.cc/%s contains material that is inappropriate.' % flagged_archive_guid
form = handle_registrar_fields(
ContactForm(
initial={
'box2': message,
'subject': subject,
'referer': request.META.get('HTTP_REFERER', ''),
'email': getattr(request.user, 'email', '')
}
)
)
return render(request, 'contact.html', {'form': form, 'upgrade': upgrade})
def contact_thanks(request):
"""
The page users are delivered at after submitting the contact form.
"""
registrar = Registrar.objects.filter(pk=request.GET.get('registrar', '-1')).first()
return render(request, 'contact-thanks.html', {'registrar': registrar})
def robots_txt(request):
"""
robots.txt
"""
from ..urls import urlpatterns
disallowed_prefixes = ['_', 'archive-', 'api_key', 'errors', 'log', 'manage', 'password', 'register', 'service', 'settings', 'sign-up']
allow = []
# some urlpatterns do not have names
names = [urlpattern.name for urlpattern in urlpatterns if urlpattern.name is not None]
for name in names:
# urlpatterns that take parameters can't be reversed
try:
url = reverse(name)
disallowed = any(url[1:].startswith(prefix) for prefix in disallowed_prefixes)
if not disallowed and url != '/':
allow.append(url)
except NoReverseMatch:
pass
disallow = list(Link.GUID_CHARACTER_SET) + disallowed_prefixes
return render(request, 'robots.txt', {'allow': allow, 'disallow': disallow}, content_type='text/plain; charset=utf-8')
@xframe_options_exempt
@csrf_exempt
def archive_error(request):
"""
Replay content not found error page
"""
# handle cors options for error page redirect from cors
if request.method == 'OPTIONS':
response = HttpResponse()
set_options_headers(request, response)
return response
reported_status = request.GET.get('status')
status_code = int(reported_status or '200')
if status_code != 404:
# We only want to return 404 and 200 here, to avoid complications with Cloudflare.
# Other error statuses always (?) indicate some problem with WR, not a status code we
# need or want to pass on to the user.
status_code = 200
response = render(request, 'archive/archive-error.html', {
'err_url': request.GET.get('url'),
'timestamp': request.GET.get('timestamp'),
'status': f'{status_code} {responses.get(status_code)}',
'err_msg': request.GET.get('error'),
}, status=status_code)
# even if not setting full headers (eg. if Origin is not set)
# still set set Access-Control-Allow-Origin to content host to avoid Chrome CORB issues
set_options_headers(request, response, always_set_allowed_origin=True)
return response
| 44.16881 | 165 | 0.651076 |
4a1e2763d7b6cc85877e8a783415c2a072fea66e | 861 | py | Python | Coding the Matrix/00_the_function/dictutil.py | bustinstuff/linearalgebra | a343ba57785f4d76e38e04d97892da54199ceeac | [
"MIT"
] | null | null | null | Coding the Matrix/00_the_function/dictutil.py | bustinstuff/linearalgebra | a343ba57785f4d76e38e04d97892da54199ceeac | [
"MIT"
] | null | null | null | Coding the Matrix/00_the_function/dictutil.py | bustinstuff/linearalgebra | a343ba57785f4d76e38e04d97892da54199ceeac | [
"MIT"
] | null | null | null | def dict2list(dct, keylist):
"""
task 0.6.3
procedure taking a dictionary and a keylist
output should be a list of the dict keylist values
e.g. input dct={'a':'A', 'b':'B', 'c':'C'} keylist=['b', 'c', 'a'] output=['B', 'C', 'A']
"""
return [dct[key] for key in keylist]
def list2dict(L, keylist):
"""
task 0.6.3
procedure taking a list of letters and a keylist
output should be a dict. that maps them together
e.g. input L=['A', 'B', 'C'] keylist=['a', 'b', 'c'] output={'a':'A', 'b':'B', 'c':'C'}
"""
return {k:L[i] for i,k in enumerate(keylist)}
def listrange2dict(L):
"""
task 0.6.4
procedure taking a list
output should be a dictionary of the given list
e.g. input L=['A', 'B', 'C'] output={0:'A', 1:'B', 2:'C'}
"""
return {i:k for i,k in enumerate(L)}
| 26.090909 | 93 | 0.5482 |
4a1e27ab9fa9f09282a7d0934d69375db488cfc7 | 502 | py | Python | solutions/tier_02/python/uri_2343_cacadores_de_mitos.py | EstevaoNaval/URI_repository | 373681078f237231a6ec2c5a2ab04be434f54968 | [
"MIT"
] | null | null | null | solutions/tier_02/python/uri_2343_cacadores_de_mitos.py | EstevaoNaval/URI_repository | 373681078f237231a6ec2c5a2ab04be434f54968 | [
"MIT"
] | null | null | null | solutions/tier_02/python/uri_2343_cacadores_de_mitos.py | EstevaoNaval/URI_repository | 373681078f237231a6ec2c5a2ab04be434f54968 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
qntOcorrenciaRaio = int(input())
caiuMesmoLugar = 0
dictCoordRaio = [501 * [0] for x in range(501)]
for indexOcorrencia in range(qntOcorrenciaRaio):
listCoordRaioCaiu = list(map(int, input().split()))
if caiuMesmoLugar == 0:
if dictCoordRaio[listCoordRaioCaiu[0]][listCoordRaioCaiu[1]] == 0:
dictCoordRaio[listCoordRaioCaiu[0]][listCoordRaioCaiu[1]] = 1
else:
caiuMesmoLugar = 1
else:
break
print(caiuMesmoLugar) | 31.375 | 74 | 0.649402 |
4a1e27c3c6511ecb711f546d078ceb0ef621330a | 3,130 | py | Python | model/evaluation_audio.py | ggzhang0071/multimodal-speech-emotion | 2c3f491442dd057a8dc2215cf1f39bcdf55b6318 | [
"MIT"
] | 194 | 2019-01-13T19:05:29.000Z | 2022-03-29T20:13:28.000Z | model/evaluation_audio.py | ggzhang0071/multimodal-speech-emotion | 2c3f491442dd057a8dc2215cf1f39bcdf55b6318 | [
"MIT"
] | 10 | 2019-02-07T12:28:52.000Z | 2021-03-20T14:47:09.000Z | model/evaluation_audio.py | ggzhang0071/multimodal-speech-emotion | 2c3f491442dd057a8dc2215cf1f39bcdf55b6318 | [
"MIT"
] | 63 | 2019-02-03T13:19:46.000Z | 2022-03-20T07:51:04.000Z | #-*- coding: utf-8 -*-
"""
what : evaluation
"""
from tensorflow.core.framework import summary_pb2
from random import shuffle
import numpy as np
from scipy.stats import rankdata
from project_config import *
"""
desc :
inputs:
sess : tf session
model : model for test
data : such as the dev_set, test_set...
return:
sum_batch_ce : sum cross_entropy
accr : accuracy
"""
def run_test(sess, model, batch_gen, data):
list_batch_ce = []
list_batch_correct = []
list_pred = []
list_label = []
max_loop = len(data) / model.batch_size
remaining = len(data) % model.batch_size
# evaluate data ( N of chunk (batch_size) + remaining( +1) )
for test_itr in xrange( max_loop + 1 ):
raw_encoder_inputs, raw_encoder_seq, raw_encoder_prosody, raw_label = batch_gen.get_batch(
data=data,
batch_size=model.batch_size,
encoder_size=model.encoder_size,
is_test=True,
start_index= (test_itr* model.batch_size)
)
# prepare data which will be push from pc to placeholder
input_feed = {}
input_feed[model.encoder_inputs] = raw_encoder_inputs
input_feed[model.encoder_seq] = raw_encoder_seq
input_feed[model.encoder_prosody] = raw_encoder_prosody
input_feed[model.y_labels] = raw_label
input_feed[model.dr_prob] = 1.0 # no drop out while evaluating
try:
bpred, bloss = sess.run([model.batch_pred, model.batch_loss], input_feed)
except:
print "excepetion occurs in valid step : " + str(test_itr)
pass
# remaining data case (last iteration)
if test_itr == (max_loop):
bpred = bpred[:remaining]
bloss = bloss[:remaining]
raw_label = raw_label[:remaining]
# batch loss
list_batch_ce.extend( bloss )
# batch accuracy
list_pred.extend( np.argmax(bpred, axis=1) )
list_label.extend( np.argmax(raw_label, axis=1) )
if IS_LOGGING:
with open( '../analysis/inference_log/audio.txt', 'w' ) as f:
f.write( ' '.join( [str(x) for x in list_pred] ) )
with open( '../analysis/inference_log/audio_label.txt', 'w' ) as f:
f.write( ' '.join( [str(x) for x in list_label] ) )
list_batch_correct = [1 for x, y in zip(list_pred,list_label) if x==y]
sum_batch_ce = np.sum( list_batch_ce )
accr = np.sum ( list_batch_correct ) / float( len(data) )
value1 = summary_pb2.Summary.Value(tag="valid_loss", simple_value=sum_batch_ce)
value2 = summary_pb2.Summary.Value(tag="valid_accuracy", simple_value=accr )
summary = summary_pb2.Summary(value=[value1, value2])
return sum_batch_ce, accr, summary, list_pred
| 32.604167 | 98 | 0.566454 |
4a1e294ad8f3b1a23adea50990143f93c132eab0 | 383 | py | Python | client/basic.py | Luanxpg9/django-generic-api | 27de5de1c10815346a3071577c043f5f698b479e | [
"Apache-2.0"
] | null | null | null | client/basic.py | Luanxpg9/django-generic-api | 27de5de1c10815346a3071577c043f5f698b479e | [
"Apache-2.0"
] | null | null | null | client/basic.py | Luanxpg9/django-generic-api | 27de5de1c10815346a3071577c043f5f698b479e | [
"Apache-2.0"
] | null | null | null | import requests
#endpoint = "https://httpbin.org"
#endpoint_status = "https://httpbin.org/status/200"
#endpoint_anything = "https://httpbin.org/anything"
endpoint = "http://localhost:8000"
get_response = requests.get(endpoint_anything, json={
"query": "Hello World"}) # API -> Method
print(get_response.json()) # Print whatever the Rest API returns
| 29.461538 | 69 | 0.678851 |
4a1e2a8c9c01e20bb7c55a43716c8b8c58f02eb9 | 9,354 | py | Python | coinone/views.py | poohc300/auto-trade-system | d4950f2d8ef06747b53c62358e62b0df7d99d822 | [
"MIT"
] | null | null | null | coinone/views.py | poohc300/auto-trade-system | d4950f2d8ef06747b53c62358e62b0df7d99d822 | [
"MIT"
] | null | null | null | coinone/views.py | poohc300/auto-trade-system | d4950f2d8ef06747b53c62358e62b0df7d99d822 | [
"MIT"
] | null | null | null | from django.shortcuts import render
from .models import CoinoneService
from rest_framework.views import APIView
from django.http import HttpResponse, JsonResponse
from django.views.decorators.csrf import csrf_exempt
from rest_framework.permissions import IsAuthenticated, AllowAny, IsAdminUser
from rest_framework.decorators import permission_classes, authentication_classes
from drf_yasg.utils import swagger_auto_schema
from drf_yasg import openapi
import json
class OrderBookView(APIView):
@swagger_auto_schema(
operation_summary="오더북 리스트 조회하기",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency')
}
),
tag=['coinone'],
operation_description='retrieve orderbook list'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
_access_token = data['access_token']
_secret_key = data['secret_key']
_currency = data['currency']
coinone = CoinoneService(
access_token=_access_token,
secret_key=_secret_key
)
try:
result = coinone.get_orderbook(
currency=_currency
)
return HttpResponse(result)
except Exception as e:
return HttpResponse(e)
class OrderView(APIView):
@swagger_auto_schema(
operation_summary="주문하기",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'order_type' : openapi.Schema(type=openapi.TYPE_STRING, description='order_type'),
'price' : openapi.Schema(type=openapi.TYPE_STRING, description='price'),
'qty' : openapi.Schema(type=openapi.TYPE_STRING, description='qty'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency')
}
),
tag=['coinone'],
operation_description='create order'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
print(kwargs)
result = coinone.create_order(
**kwargs
)
return HttpResponse(result)
except Exception as e:
return HttpResponse(e)
class AccountView(APIView):
@swagger_auto_schema(
operation_summary="계좌 정보",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
}
),
tag=['coinone'],
operation_description='retrieve balance'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
print(data)
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
result = coinone.get_balance(
**kwargs
)
return JsonResponse(result)
except Exception as e:
return HttpResponse(e)
class OrderView(APIView):
@swagger_auto_schema(
operation_summary="주문하기",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'order_type' : openapi.Schema(type=openapi.TYPE_STRING, description='order_type'),
'price' : openapi.Schema(type=openapi.TYPE_STRING, description='price'),
'qty' : openapi.Schema(type=openapi.TYPE_STRING, description='qty'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency')
}
),
tag=['coinone'],
operation_description='주문하기 order_type이 0이면 지정가 매수 1이면 지정가 매도\
2이면 시장가 매수 3이면 시장가 매도 '
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
print(kwargs)
result = coinone.create_order(
**kwargs
)
return JsonResponse({"message": "주문이 완료되었습니다"})
except Exception as e:
return HttpResponse(e)
class OrderCancelView(APIView):
@swagger_auto_schema(
operation_summary="미체결 주문 취소",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'qty' : openapi.Schema(type=openapi.TYPE_STRING, description='qty'),
'price' : openapi.Schema(type=openapi.TYPE_STRING, description='price'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency'),
'order_id' : openapi.Schema(type=openapi.TYPE_STRING, description='order id'),
'is_ask' : openapi.Schema(type=openapi.TYPE_STRING, description='1이면 매도 0이면 매수')
}
),
tag=['coinone'],
operation_description='cancel order'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
print(data)
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
result = coinone.cancel_coin_order(
**kwargs
)
return JsonResponse(result)
except Exception as e:
return HttpResponse(e)
@permission_classes([AllowAny])
class TransactionHistoryView(APIView):
@swagger_auto_schema(
operation_summary="트랜잭션 정보",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'price' : openapi.Schema(type=openapi.TYPE_STRING, description='price'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency'),
'is_coin' : openapi.Schema(type=openapi.TYPE_STRING, description='1이면 코인 0이면 KRW')
}
),
tag=['coinone'],
operation_description='retrieve transaction history'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
print(data)
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
if kwargs['is_coin'] == "1":
result = coinone.get_coin_transaction_history(
**kwargs
)
else:
result = coinone.get_krw_transaction_history(
**kwargs
)
return JsonResponse(result)
except Exception as e:
return HttpResponse(e)
@permission_classes([AllowAny])
class OrderlistView(APIView):
@swagger_auto_schema(
operation_summary="지정가 주문 정보",
request_body=openapi.Schema(
type=openapi.TYPE_OBJECT,
properties={
'access_token' : openapi.Schema(type=openapi.TYPE_STRING, description='api key'),
'secret_key' : openapi.Schema(type=openapi.TYPE_STRING, description='api secret'),
'currency' : openapi.Schema(type=openapi.TYPE_STRING, description='currency'),
}
),
tag=['coinone'],
operation_description='retrieve orderlist'
)
def post(self, request, *args, **kwargs) -> HttpResponse:
data = request.data
print(data)
kwargs = data
coinone = CoinoneService(
access_token=data['access_token'],
secret_key=data['secret_key']
)
try:
result = coinone.get_limit_orders(
**kwargs
)
return JsonResponse(result)
except Exception as e:
return HttpResponse(e) | 35.56654 | 98 | 0.590977 |
4a1e2adbd3c312d35b7601ffecc8cd386ff38e02 | 919 | py | Python | sobel_operator.py | LeslieWongCV/CV | 517902122348d23456700e05806557b0c62d6c3a | [
"MIT"
] | 2 | 2020-12-02T07:49:33.000Z | 2022-01-17T16:48:38.000Z | sobel_operator.py | LeslieWongCV/CV | 517902122348d23456700e05806557b0c62d6c3a | [
"MIT"
] | null | null | null | sobel_operator.py | LeslieWongCV/CV | 517902122348d23456700e05806557b0c62d6c3a | [
"MIT"
] | null | null | null | import cv2
import numpy as np
import scipy
from scipy import signal
LENA = cv2.imread('/Users/leslie/Desktop/革命成果-学术/LENA_FULL.jpg',0) #选1为真彩色读取方式
# cv2.namedWindow("Image")
# cv2.imshow('image',LENA)
# cv2.waitKey (0)
row, colum = LENA.shape #增加channel接受色彩通道,but 'convolve2d inputs must both be 2D arrays'
LENA_f = np.copy(LENA)
LENA_f_ = LENA_f.astype('float')
#for i in range(0,2): 怎么把分别卷积的3层再合成一张图?
G_x = np.array([[-1,0,1],[-2,0,2],[-1,0,1]]) #Sobel Operator
G_y = np.array([[-1,-2,-1],[0,0,0],[1,2,1]])
X = scipy.signal.convolve2d(LENA_f_,G_x,'same')
Y = scipy.signal.convolve2d(LENA_f_,G_y,'same')
# X = np.convolve(G_x,LENA_f_,'same') numpy.convolve 只支持1D卷积
# Y = np.convolve(G_y,LENA_f_,'same')
X_abs = abs(X)
Y_abs = abs(Y)
G = X_abs + Y_abs
sharp = G + LENA_f_
sharp = np.where(sharp<0,0,np.where(sharp>255,255,sharp))
sharp = sharp.astype('uint8')
cv2.imshow('Sharp',sharp)
cv2.waitKey()
| 20.422222 | 87 | 0.68444 |
4a1e2b01054686c89e52e91ad942317ed2ef1604 | 718 | py | Python | clients/python/test/test_reset_creation.py | adipolak/lakeFS | 798e7b7d42f20dba2261c7ecbdc56a5c9131a28c | [
"Apache-2.0"
] | 1 | 2022-02-02T08:52:15.000Z | 2022-02-02T08:52:15.000Z | clients/python/test/test_reset_creation.py | adipolak/lakeFS | 798e7b7d42f20dba2261c7ecbdc56a5c9131a28c | [
"Apache-2.0"
] | null | null | null | clients/python/test/test_reset_creation.py | adipolak/lakeFS | 798e7b7d42f20dba2261c7ecbdc56a5c9131a28c | [
"Apache-2.0"
] | null | null | null | """
lakeFS API
lakeFS HTTP API # noqa: E501
The version of the OpenAPI document: 0.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import sys
import unittest
import lakefs_client
from lakefs_client.model.reset_creation import ResetCreation
class TestResetCreation(unittest.TestCase):
"""ResetCreation unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testResetCreation(self):
"""Test ResetCreation"""
# FIXME: construct object with mandatory attributes with example values
# model = ResetCreation() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| 19.405405 | 79 | 0.671309 |
4a1e2c6c5263992b4cc803aa878db4144adf6beb | 989 | py | Python | think/qPrac4_LS_strat.py | jasonyum/math-puzzles | 006b51d92a942efe469873735a204cf976f479fd | [
"MIT"
] | null | null | null | think/qPrac4_LS_strat.py | jasonyum/math-puzzles | 006b51d92a942efe469873735a204cf976f479fd | [
"MIT"
] | null | null | null | think/qPrac4_LS_strat.py | jasonyum/math-puzzles | 006b51d92a942efe469873735a204cf976f479fd | [
"MIT"
] | null | null | null | # Pipeline imports
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.psychsignal import stocktwits
from quantopian.pipeline.factors import SimpleMovingAverage
from quantopian.pipeline.experimental import QTradableStocksUS
# Pipeline definition
def make_pipeline():
base_universe = QTradableStocksUS()
sentiment_score = SimpleMovingAverage(
inputs=[stocktwits.bull_minus_bear],
window_length=3,
)
return Pipeline(
columns={
'sentiment_score': sentiment_score,
},
screen=base_universe
)
'''
Note the way that pipelines work...
It's all about defining a base_universe = QTradableStocksUS()
And then what you do is you define a sentiment_score as one of the factors.
That factor comes from the library pipeline.factors import SimpleMovingAverage( ... )
Then, once base_universe and sentiment_score are defined, you return Pipeline ( columns = { ...}, screen = base_universe)
'''
| 27.472222 | 121 | 0.734075 |
4a1e2ca31281c6e6500133be3ad74e9d1e58198d | 20,440 | py | Python | kombu/tests/transport/test_pubsub.py | cldcvr/kombu | 5399cd83a361e0708956ca7e4708d4474f6939a9 | [
"BSD-3-Clause"
] | null | null | null | kombu/tests/transport/test_pubsub.py | cldcvr/kombu | 5399cd83a361e0708956ca7e4708d4474f6939a9 | [
"BSD-3-Clause"
] | null | null | null | kombu/tests/transport/test_pubsub.py | cldcvr/kombu | 5399cd83a361e0708956ca7e4708d4474f6939a9 | [
"BSD-3-Clause"
] | 1 | 2020-11-20T13:21:57.000Z | 2020-11-20T13:21:57.000Z | import unittest
from mock import patch, call, Mock, MagicMock, PropertyMock
from kombu.transport.pubsub import Channel, Message, QoS
from kombu.exceptions import ChannelError
from google.cloud import tasks_v2
from google.api_core.exceptions import AlreadyExists
class InnerMsg(object):
def __init__(self, **kwargs):
self.data = kwargs.get("data", None)
self.ack_id = kwargs.get("ackId", None)
self.message_id = kwargs.get("msgId", None)
class OuterMsg(object):
def __init__(self, **kwargs):
self.message = InnerMsg(**kwargs)
class TestChannel(unittest.TestCase):
''' TestChannel '''
def setUp(self):
''' setUp '''
mkConn = MagicMock()
mkid = mkConn.return_value._avail_channel_ids = MagicMock()
mkid.return_value.pop = MagicMock(return_value="foo")
self.mocktrans = mkConn.client.\
transport_options = MagicMock(return_value={})
mkConn.return_value.QoS = MagicMock()
self.channel = Channel(mkConn)
def test__get_topic_name(self):
''' test__get_topic_name '''
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "fizzbuzz"
rVal = self.channel._get_topic_path("foobar")
self.assertEqual(rVal, "projects/fizzbuzz/topics/foobar")
def test__get_subscription_name(self):
''' test__get_subscription_name '''
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "fizzbuzz"
rVal = self.channel._get_subscription_name("foobar")
self.assertEqual(rVal, "projects/fizzbuzz/subscriptions/foobar")
@patch('kombu.transport.pubsub.Channel.project_id', new_callable=PropertyMock)
def test__new_queue_from_client(self, mkID):
''' test__new_queue_from_client '''
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "fizzbuzz"
self.channel._get_subscription_name = MagicMock(
return_value='/foo/bar')
subscription_path = self.channel._new_queue("foo")
self.assertEquals(subscription_path, "/foo/bar")
def test__new_queue_from_cache(self):
''' test__new_queue_from_cache '''
self.channel._queue_cache = {"foo": "QueueFoo"}
subscription_path = self.channel._new_queue("foo")
self.assertEquals(subscription_path, "QueueFoo")
def test__get_from_subscription_pull(self):
''' test__get_from_subscription_pull '''
msg1, msg2 = OuterMsg(msgId=1), OuterMsg(msgId=2)
with patch('kombu.transport.pubsub.Channel.subscriber',
new_callable=PropertyMock) as mkSub:
with patch('kombu.transport.pubsub.Channel.qos',
new_callable=PropertyMock) as mkQoS:
with patch('kombu.transport.pubsub.Channel._execution_type')\
as mkExeType:
mkExeType.return_value = ''
mkAppend = mkQoS.return_value.append = MagicMock()
newQ = self.channel._new_queue = MagicMock(
return_value="foo/bar")
mkQ = MagicMock()
mkQ.empty = MagicMock(return_value=True)
mkQ.full = MagicMock(return_value=False)
mkPut = mkQ.put = MagicMock()
mkGet = mkQ.get = MagicMock(return_value=msg1)
self.channel.temp_cache["foo/bar"] = mkQ
resp = MagicMock()
resp.received_messages = [msg1, msg2]
mkSub.return_value.pull = MagicMock(return_value=resp)
qosCalls = [
call(1, (msg1, "foo/bar")),
call(2, (msg2, "foo/bar"))
]
putCalls = [
call(msg1),
call(msg2)
]
msg = self.channel._get("foo")
self.assertIsInstance(msg, OuterMsg)
self.assertEqual(msg.message.message_id, 1)
newQ.assert_called_with("foo")
mkAppend.assert_has_calls(qosCalls)
mkPut.assert_has_calls(putCalls)
mkGet.assert_called_with(block=True)
def test__get_from_temp_cache(self):
''' test__get_from_temp_cache '''
msg = OuterMsg(msgId=1)
newQ = self.channel._new_queue = MagicMock(
return_value="foo/bar")
mkQ = MagicMock()
mkQ.empty = MagicMock(return_value=False)
mkGet = mkQ.get = MagicMock(return_value=msg)
self.channel.temp_cache["foo/bar"] = mkQ
msg = self.channel._get("foo")
self.assertIsInstance(msg, OuterMsg)
self.assertEqual(msg.message.message_id, 1)
newQ.assert_called_with("foo")
mkGet.assert_called_with(block=True)
def test_queue_declare_successful(self):
''' test_queue_declare_successful '''
newQ = self.channel._new_queue = MagicMock()
with patch('kombu.transport.pubsub.uuid') as mkID:
with patch('kombu.transport.pubsub.queue_declare_ok_t') as mkQok:
mkQok.return_value = "ok"
mkID.return_value = "foo"
rVal = self.channel.queue_declare(queue="test")
self.assertEqual(rVal, "ok")
newQ.assert_called_with("test")
mkQok.assert_called_with("test", 0, 0)
def test_queue_declare_raises_exception(self):
''' test_queue_declare_raises_exception '''
with patch('kombu.transport.pubsub.uuid') as mkID:
mkID.return_value = "foo"
with self.assertRaises(ChannelError):
self.channel.queue_declare(
queue="test", passive=True)
def test_queue_bind_creates_subscription(self):
''' test_queue_bind_creates_subscription '''
with patch('kombu.transport.pubsub.Channel.subscriber',
new_callable=PropertyMock) as mkSub:
with patch('kombu.transport.pubsub.Channel.ack_deadline_seconds',
new_callable=PropertyMock) as mkAck:
with patch('kombu.transport.pubsub.Channel.state') as mkState:
with patch('kombu.transport.pubsub.Channel._execution_type')\
as mkExeType:
mkExeType.return_value = ''
mkState.exchanges = {"test_ex": "TEST_EX"}
mkAck.return_value = 60
self.channel._new_queue = MagicMock(return_value="foo")
subcription = mkSub.return_value.create_subscription =\
MagicMock(return_value="/foo/bar")
self.channel.\
queue_bind(queue="test_q", exchange="test_ex")
subcription.assert_called_with(
"foo", "TEST_EX", ack_deadline_seconds=60)
def test_queue_bind_already_exists(self):
''' test_queue_bind_already_exists '''
with patch('kombu.transport.pubsub.Channel.subscriber',
new_callable=PropertyMock) as mkSub:
with patch('kombu.transport.pubsub.Channel.ack_deadline_seconds',
new_callable=PropertyMock) as mkAck:
with patch('kombu.transport.pubsub.Channel.state') as mkState:
with patch('kombu.transport.pubsub.Channel._execution_type')\
as mkExeType:
mkExeType.return_value = ''
mkState.exchanges = {"test_ex": "TEST_EX"}
mkAck.return_value = 60
self.channel._new_queue = MagicMock(return_value="foo")
mkCreate = mkSub.return_value.create_subscription =\
Mock(side_effect=AlreadyExists(1))
rVal = self.channel.\
queue_bind(queue="test_q", exchange="test_ex")
mkCreate.assert_called_with(
"foo", "TEST_EX", ack_deadline_seconds=60)
self.assertIsNone(rVal)
def test_queue_bind_raises_exception(self):
''' test_queue_bind_raises_exception '''
with patch('kombu.transport.pubsub.Channel.subscriber',
new_callable=PropertyMock) as mkSub:
with patch('kombu.transport.pubsub.Channel.ack_deadline_seconds',
new_callable=PropertyMock) as mkAck:
with patch('kombu.transport.pubsub.Channel.state',
new_callable=PropertyMock) as mkState:
with patch(
'kombu.transport.pubsub.Channel._execution_type')\
as mkExeType:
mkExeType.return_value = ''
mkState.exchanges = {"test_ex": "TEST_EX"}
mkAck.return_value = 60
self.channel._new_queue = MagicMock(return_value="foo")
mkCreate = mkSub.return_value.create_subscription =\
Mock(side_effect=Exception)
with self.assertRaises(Exception):
self.channel.\
queue_bind(queue="test_q", exchange="test_ex")
mkCreate.assert_called_with(
"foo", "TEST_EX", ack_deadline_seconds=60)
@patch('kombu.transport.pubsub.Channel.state', new_callable=PropertyMock)
def test_exchange_declare_create_topic(self, mkState):
''' test_exchange_declare_create_topic '''
with patch('kombu.transport.pubsub.Channel.publisher',
new_callable=PropertyMock) as mkPub:
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "test_project_id"
mkState.return_value.exchanges = {}
path = self.channel._get_topic_path =\
MagicMock(return_value="topic/foo")
topic = mkPub.return_value.create_topic = MagicMock()
self.channel.exchange_declare(exchange="test_ex")
path.assert_called_with("test_ex")
topic.assert_called_with("topic/foo")
self.assertEqual(
mkState.return_value.exchanges["test_ex"], "topic/foo")
@patch('kombu.transport.pubsub.Channel.state', new_callable=PropertyMock)
def test_exchange_declare_already_exists(self, mkState):
''' test_exchange_declare_already_exists '''
with patch('kombu.transport.pubsub.Channel.publisher',
new_callable=PropertyMock) as mkPub:
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "test_project_id"
mkState.return_value.exchanges = {}
path = self.channel._get_topic_path =\
MagicMock(return_value="topic/foo")
topic = mkPub.return_value.create_topic = MagicMock(
side_effect=AlreadyExists(1))
self.channel.exchange_declare(exchange="test_ex")
path.assert_called_with("test_ex")
topic.assert_called_with("topic/foo")
self.assertEqual(
mkState.return_value.exchanges["test_ex"], "topic/foo")
@patch('kombu.transport.pubsub.Channel.state', new_callable=PropertyMock)
def test_exchange_declare_raises_expection(self, mkState):
''' test_exchange_declare_raises_expection '''
with patch('kombu.transport.pubsub.Channel.publisher',
new_callable=PropertyMock) as mkPub:
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "test_project_id"
mkState.return_value.exchanges = {}
path = mkPub.return_value.topic_path =\
MagicMock(return_value="topic/foo")
topic = mkPub.return_value.create_topic = MagicMock(
side_effect=Exception)
with self.assertRaises(Exception):
self.channel.exchange_declare(exchange="test_ex")
path.assert_called_with("test_project_id", "test_ex")
topic.assert_called_with("topic/foo")
@patch('kombu.transport.pubsub.dumps')
def test_basic_publish_calls_create_cloud_task(self, mkDumps):
''' test_basic_publish_calls_create_cloud_task '''
message = {"body": '{"eta": null}'}
with patch('kombu.transport.pubsub.Channel._publish') as mkPublish:
mkDumps.return_value = message
self.channel.basic_publish(message, "topic/foo")
mkPublish.assert_called_with("topic/foo", message)
@patch('kombu.transport.pubsub.dumps')
def test_basic_publish_calls_publish(self, mkDumps):
''' test_basic_publish_calls_publish '''
message = {"body": '{"eta": 10}'}
with patch('kombu.transport.pubsub.Channel._create_cloud_task')\
as mkCreate:
mkDumps.return_value = message
self.channel.basic_publish(message, "topic/foo")
mkCreate.assert_called_with("topic/foo", message)
@patch('kombu.transport.pubsub.dumps')
def test__publish(self, mkDumps):
''' test_basic_publish '''
mkDumps.return_value = '{"body": "{\\"eta\\": null}"}'
with patch('kombu.transport.pubsub.Channel.publisher',
new_callable=PropertyMock) as mkPub:
with patch('kombu.transport.pubsub.Channel.project_id',
new_callable=PropertyMock) as mkID:
mkID.return_value = "test_project_id"
path = mkPub.return_value.topic_path = MagicMock(
return_value="topic/foo")
future = MagicMock()
future.result = MagicMock(return_value="foo")
publish = mkPub.return_value.publish = MagicMock(
return_value=future)
rVal = self.channel._publish("test_ex", {"body": '{"eta": null}'})
path.assert_called_with("test_project_id", "test_ex")
mkDumps.assert_called_with({"body": '{"eta": null}'})
publish.assert_called_with("topic/foo", '{"body": "{\\"eta\\": null}"}')
self.assertEqual(rVal, "foo")
@patch('kombu.transport.pubsub.dumps')
def test__create_cloud_task(self, mkDumps):
''' test__create_cloud_task '''
message = {"body": '{"eta": 10}'}
mkDumps.return_value = message
with patch('kombu.transport.pubsub.Channel._get_task') as mkGet:
with patch('kombu.transport.pubsub.Channel.cloud_task',
new_callable=PropertyMock) as mkCreateTask:
with patch('kombu.transport.pubsub.Channel.cloud_task_queue_path',
new_callable=PropertyMock) as mktaskPath:
mktaskPath.return_value = "dummy_path"
mkGet.return_value = {"task": "dummy"}
mkCreate = mkCreateTask.return_value.create_task = MagicMock()
self.channel._create_cloud_task("test_ex", message)
mkCreate.assert_called_with("dummy_path", {"task": "dummy"})
mkGet.assert_called_with(10, "test_ex", message)
def test__get_task(self):
''' test__get_task '''
with patch('kombu.transport.pubsub.Channel.transport_options',
new_callable=PropertyMock) as mkTransport:
with patch('kombu.transport.pubsub.Channel.service_account_email',
new_callable=PropertyMock) as mkEmail:
with patch('kombu.transport.pubsub.Channel.cloud_task_queue_path',
new_callable=PropertyMock) as mktaskPath:
mktaskPath.return_value = "dummy_path"
mkEmail.return_value = "[email protected]"
mkTransport.return_value = {"CLOUD_FUNCTION_PUBLISHER": "dummy_func"}
message = {"body": '{"eta": "2000-01-01 12:00:00.000000"}'}
eta = "2000-01-01 12:00:00.000000"
rVal = self.channel._get_task(eta, "test_ex", message)
self.assertEqual(rVal["http_request"]["url"], "dummy_func")
@patch('google.cloud.pubsub_v1.PublisherClient')
def test_publisher_creates_connection(self, mkPub):
''' test_publisher '''
mkPub.return_value = MagicMock()
rVal = self.channel.publisher
mkPub.assert_called()
self.assertIsInstance(rVal, MagicMock)
@patch('google.cloud.pubsub_v1.SubscriberClient')
def test_subscriber_creates_connection(self, mkSub):
''' test_publisher '''
mkSub.return_value = MagicMock()
rVal = self.channel.subscriber
mkSub.assert_called()
self.assertIsInstance(rVal, MagicMock)
def test_transport_options(self):
''' test_transport_options '''
out = self.channel.transport_options
self.assertEquals(out, self.mocktrans)
def test_project_id_returns_id(self):
''' test_project_id_returns_id '''
mock_out = self.mocktrans.get.return_value = {
'PROJECT_ID': 'mock_project_id'}
out = self.channel.project_id
self.assertEquals(out, mock_out)
@patch('os.getenv')
def test_project_id_get_id(self, mkOs):
''' test_project_id_get_id '''
mkOs.return_value = 'mockValue'
self.mocktrans.get.return_value = None
rVal = self.channel.project_id
self.assertEquals(rVal, 'mockValue')
def test_max_messages(self):
''' test_max_messages '''
mock_out = self.mocktrans.get.return_value = {
'PROJECT_ID': 'mock_project_id'}
out = self.channel.max_messages
self.assertEquals(out, mock_out)
def test_ack_deadline_seconds(self):
''' test_ack_deadline_seconds '''
mock_out = self.mocktrans.get.return_value = {
'PROJECT_ID': 'mock_project_id'}
out = self.channel.ack_deadline_seconds
self.assertEquals(out, mock_out)
class TestQoS(unittest.TestCase):
''' TestQoS '''
def setUp(self):
mkChannel = MagicMock()
self.qos = QoS(mkChannel)
def test_append(self):
''' test_append '''
self.qos.append('foo', 'bar')
self.assertEqual(self.qos._not_yet_acked['foo'], 'bar')
def test_ack(self):
''' test_ack '''
mkPop = self.qos._not_yet_acked.pop =\
MagicMock(return_value=(InnerMsg(ackId=1), "foo/bar"))
mkAck = self.qos._channel.subscriber.acknowledge =\
MagicMock()
self.qos.ack('foo')
mkPop.assert_called_with('foo')
mkAck.assert_called_with("foo/bar", [1])
class TestMessage(unittest.TestCase):
''' TestMessage '''
DUMMY_BODY = {
'body': '{"args": [2, 1], "taskset": null, task": "multiply"}',
'headers': {},
'content-type': 'application/json',
'properties': {
'reply_to': 'cfc92bde-1edb-3bdf-aa3b-224b4a21be00',
'correlation_id': '248fdf79-75bb-4368-81b7-34b0b60196e6',
'delivery_mode': 2,
'delivery_info': {'priority': 0}
},
'content-encoding': u'utf-8'
}
def setUp(self):
with patch('kombu.transport.pubsub.loads') as mkloads:
mkloads.return_value = self.DUMMY_BODY
self.msg = Message(
MagicMock(), OuterMsg(delivery_tag=1))
@patch('kombu.transport.pubsub.Message.channel')
def test_ack(self, mkChannel):
'''test_ack'''
mkAck = mkChannel.basic_ack = MagicMock()
self.msg.ack()
mkAck.assert_called_with(self.msg.delivery_tag)
| 47.205543 | 89 | 0.590166 |
4a1e2eadafff678a35eb5220106cb17efe480cc8 | 8,016 | py | Python | tests/handlers/test_base_handler.py | appotry/thumbor | c2e75918da09ddd3086e8eeaca00d1d2747cf57c | [
"MIT"
] | null | null | null | tests/handlers/test_base_handler.py | appotry/thumbor | c2e75918da09ddd3086e8eeaca00d1d2747cf57c | [
"MIT"
] | null | null | null | tests/handlers/test_base_handler.py | appotry/thumbor | c2e75918da09ddd3086e8eeaca00d1d2747cf57c | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/thumbor/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com [email protected]
import shutil
import tempfile
from os.path import abspath, dirname, join
from shutil import which
from urllib.parse import quote
from preggy import expect
import tornado.web
from tornado.testing import gen_test
from tests.fixtures.images import (
alabama1,
default_image,
invalid_quantization,
space_image,
)
from tests.base import TestCase
from thumbor.handlers import BaseHandler
from thumbor.config import Config
from thumbor.context import Context, ServerParameters
from thumbor.engines.pil import Engine
from thumbor.importer import Importer
# pylint: disable=broad-except,abstract-method,attribute-defined-outside-init,line-too-long,too-many-public-methods
# pylint: disable=too-many-lines
JPEGTRAN_AVAILABLE = which("jpegtran") is not None
EXIFTOOL_AVAILABLE = which("exiftool") is not None
class ErrorHandler(BaseHandler):
def get(self):
self._error(403)
class BaseHandlerTestApp(tornado.web.Application):
def __init__(self, context):
self.context = context
super().__init__([(r"/error", ErrorHandler)])
class BaseImagingTestCase(TestCase):
def setUp(self):
self.root_path = tempfile.mkdtemp()
self.loader_path = abspath(join(dirname(__file__), "../fixtures/images/"))
self.base_uri = "/image"
super().setUp()
def tearDown(self):
shutil.rmtree(self.root_path)
class ImagingOperationsTestCase(BaseImagingTestCase):
def get_context(self):
cfg = Config(SECURITY_KEY="ACME-SEC")
cfg.LOADER = "thumbor.loaders.file_loader"
cfg.FILE_LOADER_ROOT_PATH = self.loader_path
cfg.STORAGE = "thumbor.storages.file_storage"
cfg.FILE_STORAGE_ROOT_PATH = self.root_path
cfg.QUALITY = "keep"
cfg.SVG_DPI = 200
importer = Importer(cfg)
importer.import_modules()
server = ServerParameters(8889, "localhost", "thumbor.conf", None, "info", None)
server.security_key = "ACME-SEC"
return Context(server, cfg, importer)
@gen_test
async def test_can_get_image(self):
response = await self.async_fetch("/unsafe/smart/image.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(default_image())
@gen_test
async def test_can_get_image_without_extension(self):
response = await self.async_fetch("/unsafe/smart/image")
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(default_image())
@gen_test
async def test_get_unknown_image_returns_not_found(self):
response = await self.async_fetch("/unsafe/smart/imag")
expect(response.code).to_equal(404)
@gen_test
async def test_can_get_unicode_image(self):
response = await self.async_fetch(
u"/unsafe/%s"
% quote(u"15967251_212831_19242645_АгатавЗоопарке.jpg".encode("utf-8"))
)
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(default_image())
@gen_test
async def test_can_get_signed_regular_image(self):
response = await self.async_fetch(
"/_wIUeSaeHw8dricKG2MGhqu5thk=/smart/image.jpg"
)
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(default_image())
@gen_test
async def test_url_without_unsafe_or_hash_fails(self):
response = await self.async_fetch("/alabama1_ap620%C3%A9.jpg")
expect(response.code).to_equal(400)
@gen_test
async def test_url_without_image(self):
response = await self.async_fetch("/unsafe/")
expect(response.code).to_equal(400)
@gen_test
async def test_utf8_encoded_image_name_with_encoded_url(self):
url = "/lc6e3kkm_2Ww7NWho8HPOe-sqLU=/smart/alabama1_ap620%C3%A9.jpg"
response = await self.async_fetch(url)
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(alabama1())
@gen_test
async def test_url_with_encoded_hash(self):
url = "/%D1%80=/alabama1_ap620%C3%A9.jpg"
response = await self.async_fetch(url)
expect(response.code).to_equal(400)
@gen_test
async def test_image_with_spaces_on_url(self):
response = await self.async_fetch(u"/unsafe/image%20space.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(space_image())
@gen_test
async def test_can_get_image_with_filter(self):
response = await self.async_fetch(
"/5YRxzS2yxZxj9SZ50SoZ11eIdDI=/filters:fill(blue)/image.jpg"
)
expect(response.code).to_equal(200)
@gen_test
async def test_can_get_image_with_invalid_quantization_table(self):
response = await self.async_fetch("/unsafe/invalid_quantization.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_similar_to(invalid_quantization())
@gen_test
async def test_getting_invalid_image_returns_bad_request(self):
response = await self.async_fetch("/unsafe/image_invalid.jpg")
expect(response.code).to_equal(400)
@gen_test
async def test_getting_invalid_watermark_returns_bad_request(self):
response = await self.async_fetch(
"/unsafe/filters:watermark(boom.jpg,0,0,0)/image.jpg"
)
expect(response.code).to_equal(400)
@gen_test
async def test_can_read_monochromatic_jpeg(self):
response = await self.async_fetch("/unsafe/grayscale.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_jpeg()
@gen_test
async def test_can_read_image_with_small_width_and_no_height(self):
response = await self.async_fetch("/unsafe/0x0:1681x596/1x/image.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_jpeg()
@gen_test
async def test_can_read_cmyk_jpeg(self):
response = await self.async_fetch("/unsafe/cmyk.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_jpeg()
@gen_test
async def test_can_read_cmyk_jpeg_as_png(self):
response = await self.async_fetch("/unsafe/filters:format(png)/cmyk.jpg")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
@gen_test
async def test_can_read_image_svg_with_px_units_and_convert_png(self):
response = await self.async_fetch("/unsafe/Commons-logo.svg")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
engine = Engine(self.context)
engine.load(response.body, ".png")
expect(engine.size).to_equal((1024, 1376))
@gen_test
async def test_can_read_image_svg_with_inch_units_and_convert_png(self):
response = await self.async_fetch("/unsafe/Commons-logo-inches.svg")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
engine = Engine(self.context)
engine.load(response.body, ".png")
expect(engine.size).to_equal((2000, 2600))
@gen_test
async def test_can_read_8bit_tiff_as_png(self):
response = await self.async_fetch("/unsafe/gradient_8bit.tif")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
@gen_test
async def test_can_read_16bit_lsb_tiff_as_png(self):
response = await self.async_fetch("/unsafe/gradient_lsb_16bperchannel.tif")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
@gen_test
async def test_can_read_16bit_msb_tiff_as_png(self):
response = await self.async_fetch("/unsafe/gradient_msb_16bperchannel.tif")
expect(response.code).to_equal(200)
expect(response.body).to_be_png()
| 35.157895 | 115 | 0.702096 |
4a1e2f667148e2f17921d39d0824565f234e1ebf | 459 | py | Python | RoboticsLanguage/Transformers/Events/Language.py | robotcaresystems/roboticslanguage | 3bb7a2bf64ab8e9068889713fbeb18a45cd5a3ed | [
"Apache-2.0"
] | 64 | 2018-05-15T14:36:44.000Z | 2022-03-09T05:00:31.000Z | RoboticsLanguage/Transformers/Events/Language.py | robotcaresystems/roboticslanguage | 3bb7a2bf64ab8e9068889713fbeb18a45cd5a3ed | [
"Apache-2.0"
] | 9 | 2018-04-17T21:12:27.000Z | 2019-11-08T20:53:32.000Z | RoboticsLanguage/Transformers/Events/Language.py | robotcaresystems/roboticslanguage | 3bb7a2bf64ab8e9068889713fbeb18a45cd5a3ed | [
"Apache-2.0"
] | 10 | 2018-03-27T12:09:12.000Z | 2021-02-16T08:07:26.000Z | #
# This is the Robotics Language compiler
#
# Parameters.py: Definition of the parameters for this package
#
# Created on: 05 September, 2018
# Author: Gabriel Lopes
# Licence: license
# Copyright: copyright
#
from RoboticsLanguage.Base.Types import arguments, returns
language = {
'when': {
'definition': {
'arguments': arguments('boolean anything*'),
'returns': returns('nothing')
}
}
}
| 21.857143 | 64 | 0.620915 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.