content
stringlengths 7
928k
| avg_line_length
float64 3.5
33.8k
| max_line_length
int64 6
139k
| alphanum_fraction
float64 0.08
0.96
| licenses
sequence | repository_name
stringlengths 7
104
| path
stringlengths 4
230
| size
int64 7
928k
| lang
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|
# Script that uses meshgrid to get map coordinates and then plots
# the DEM in 3d.
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from osgeo import gdal
ds = gdal.Open(r'D:\osgeopy-data\Washington\dem\sthelens_utm.tif')
band = ds.GetRasterBand(1)
ov_band = band.GetOverview(band.GetOverviewCount() - 3)
data = ov_band.ReadAsArray()
# Calculate bounding coordinates.
geotransform = ds.GetGeoTransform()
minx = geotransform[0]
maxy = geotransform[3]
maxx = minx + ov_band.XSize * geotransform[1]
miny = maxy + ov_band.YSize * geotransform[5]
# Get the x and y arrays.
x = np.arange(minx, maxx, geotransform[1])
y = np.arange(maxy, miny, geotransform[5])
x, y = np.meshgrid(x[:ov_band.XSize], y[:ov_band.YSize])
# Make the 3D plot.
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot_surface(x, y, data, cmap='gist_earth', lw=0)
plt.axis('equal')
# # Change the viewpoint and turn the ticks off.
# ax.view_init(elev=55, azim=60)
# plt.axis('off')
# # Create an animation.
# import matplotlib.animation as animation
# def animate(i):
# ax.view_init(elev=65, azim=i)
# anim = animation.FuncAnimation(
# fig, animate, frames=range(0, 360, 10), interval=100)
# plt.axis('off')
# # If you have FFmpeg and it's in your path, you can save the
# # animation.
# anim.save('d:/temp/helens.mp4', 'ffmpeg')
plt.show()
| 27.4 | 66 | 0.713869 | [
"MIT"
] | ClaudeCoulombe/osgeopy-code | Chapter13/listing13_7.py | 1,370 | Python |
# -*- coding: utf-8 -*-
"""
:mod:`plots` -- Tests data plots
================================
.. module:: plots
:platform: Unix, Windows
:synopsis: Tests of the raster plots and processed data plots.
.. moduleauthor:: Andre Rocha <[email protected]>
"""
import matplotlib.pyplot as plt
from matplotlib.testing.decorators import image_comparison
from src.rocha import plots
@image_comparison(baseline_images=['test_plot'],
extensions=['png'])
def test_plot():
"""
Test the rasters plot as multiples subplots.
"""
rasters = ['data/relatives/forest_111.tif',
'data/relatives/forest_112.tif',
'data/relatives/forest_113.tif',
'data/relatives/forest_121.tif',
'data/relatives/forest_122.tif',
'data/relatives/forest_123.tif',
'data/relatives/forest_211.tif',
'data/relatives/forest_212.tif',
'data/relatives/forest_213.tif',
'data/relatives/forest_221.tif',
'data/relatives/forest_222.tif',
'data/relatives/forest_223.tif']
title = 'Mean precipitation (mm/day)'
subtitles = ['HadGEM2 RCP4.5', 'HadGEM2 RCP8.5', 'MIROC5 RCP4.5', 'MIROC5 RCP8.5']
labels = ['2011-2040', '2041-2070', '2071-2100']
color = 'RdYlBu_r'
rows = 3
cols = 4
plots.maps(rasters, rows, cols, color, title, subtitles, labels) | 31.977778 | 86 | 0.599027 | [
"MIT"
] | rochamatcomp/python-rocha | tests/test_plots.py | 1,439 | Python |
from models.flowpp_cifar import CifarFlowPP
from models.rvae import RVAE
from models.modules import init_mode
model_registry = {
# MNIST
'rvae_mnist': lambda **kwargs: RVAE(z_size=16, h_size=40, kl_min=0.1,
x_channels=1, **kwargs),
# CIFAR
'tiny_rvae': lambda **kwargs: RVAE(z_size=2, h_size=2, kl_min=0.1, **kwargs),
'rvae': lambda **kwargs: RVAE(z_size=8, h_size=256, kl_min=0.1, **kwargs),
'tiny_flow_pp': lambda **kwargs: CifarFlowPP(hdim=4, blocks=1, dequant_blocks=1, mix_components=1,
attn_version=False, force_float32_cond=False, **kwargs),
'flow_pp': lambda **kwargs: CifarFlowPP(hdim=120, blocks=10, dequant_blocks=2, mix_components=8, attn_version=False,
force_float32_cond=False, dropout=0.2, **kwargs),
'flow_pp_wide': lambda **kwargs: CifarFlowPP(hdim=180, blocks=10, dequant_blocks=2, mix_components=8, attn_version=False,
force_float32_cond=False, dropout=0.2, **kwargs)
}
| 48.173913 | 125 | 0.611011 | [
"MIT"
] | tom-bird/binary-gen-models | models/__init__.py | 1,108 | Python |
"""
A python script to run a parameter sweep
"""
# Python tools
import numpy as np
import yaml
import os
# WISDEM tools
from wisdem.aeroelasticse import runFAST_pywrapper, CaseGen_General
from wisdem.aeroelasticse.Util import FileTools
# ROSCO tools
from ROSCO_toolbox import controller as ROSCO_controller
from ROSCO_toolbox import turbine as ROSCO_turbine
from ROSCO_toolbox import utilities as ROSCO_utilities
from pCrunch import CaseGen_Control, Analysis, Processing
# FLAGS
eagle = True
multi = False
# Controller tuning yaml
if eagle:
parameter_filename = '/home/nabbas/Documents/TurbineModels/ControllerYamls/BAR.yaml'
else:
parameter_filename = '../../Turbine_Tuning/BAR/BAR.yaml'
# Generate case inputs for control related stuff
input_params = ['zeta_flp', 'omega_flp']
DISCON_params = ['Flp_Kp', 'Flp_Ki']
# values = [[0.7], [2.73]]
values = [np.around(np.arange(0.5, 2.5, 0.05), decimals=3), # use np.around to avoid precision issues
np.around(np.arange(2.2, 3.5, 0.05) , decimals=3)]
group = 1
# Some path specifics/
if eagle:
FAST_InputFile = 'BAR_10p_75s.fst' # FAST input file (ext=.fst)
FAST_directory = '/projects/bar/nabbas/TurbineModels/BAR_10p_75s'
FAST_runDirectory = '/projects/bar/nabbas/batch_GainSweep_10p_75s_2'
wind_dir = '/projects/bar/nabbas/TurbineModels/wind'
dll_filename = '/home/nabbas/ROSCO_toolbox/ROSCO/build/libdiscon.so'
Turbsim_exe = 'turbsim'
FAST_exe = 'openfast'
else:
FAST_InputFile = 'OpenFAST_BAR_10.fst' # FAST input file (ext=.fst)
FAST_directory = '/Users/nabbas/Documents/TurbineModels/BAR/OpenFAST_Models/BAR_10/'
FAST_runDirectory = 'temp'
wind_dir = '/Users/nabbas/Documents/TurbineModels/BAR/wind'
dll_filename = '/Users/nabbas/Documents/TurbineModels/TurbineControllers/FortranControllers/ROSCO/build/libdiscon.dylib'
Turbsim_exe = 'turbsim_dev'
FAST_exe = 'openfast_dev'
case_name_base = 'BAR_10p_75s'
debug_level = 2
# Wind
WindType = [3]
Uref = [8.25, 10.25]
seeds = [13428, 1524]
# Time
TMax = 330
# Turbine Definition
D = 206 # Rotor Diameter
z_hub = 137 # Tower Height
# Multiprocessing/Eagle related
if eagle:
cores = 36
else:
cores = 4
# Initialize CaseGen
cgc = CaseGen_Control.CaseGen_Control(parameter_filename)
# Modify some parameters
cgc.path_params['FAST_InputFile'] = FAST_InputFile
cgc.path_params['FAST_directory'] = FAST_directory
cgc.AnalysisTime = TMax
cgc.case_name_base = case_name_base
cgc.D = D
cgc.z_hub = z_hub
cgc.debug_level = debug_level
cgc.overwrite = True
# Generate wind speeds
cgc.seeds = seeds
cgc.wind_dir = wind_dir
cgc.Turbsim_exe = Turbsim_exe
wind_file, wind_file_type = cgc.gen_turbwind(Uref)
# Generate control case inputs
# NOTE: Usually, group=1 is easiest. Then some baseline characteristics in group 0, etc...
case_inputs, tuning_inputs = cgc.gen_control_cases(input_params, DISCON_params, values, group)
# Add time specification if group 0
if group == 0:
ci_key = list(case_inputs.keys())[0]
TMax_list = [TMax]*len(case_inputs[ci_key]['vals'])
case_inputs[("Fst", "TMax")] = {'vals': TMax_list, 'group': 0}
else:
case_inputs[("Fst", "TMax")] = {'vals': [TMax], 'group': 0}
# DISCON
case_inputs[('ServoDyn', 'DLL_FileName')] = {'vals': [dll_filename], 'group': 0}
# Wind
case_inputs[("InflowWind", "WindType")] = {'vals': [wind_file_type], 'group': 0}
case_inputs[("InflowWind", "Filename")] = {'vals': [wind_file], 'group': 0}
# FAST details
fastBatch = runFAST_pywrapper.runFAST_pywrapper_batch(FAST_ver='OpenFAST', dev_branch=True)
fastBatch.FAST_exe = FAST_exe # Path to executable
fastBatch.FAST_InputFile = FAST_InputFile
fastBatch.FAST_directory = FAST_directory
fastBatch.FAST_runDirectory = FAST_runDirectory
fastBatch.debug_level = debug_level
# Generate cases
case_list, case_name_list = CaseGen_General.CaseGen_General(
case_inputs, dir_matrix=fastBatch.FAST_runDirectory, namebase=case_name_base)
# Append case matrix with controller tuning parameters
for file in os.listdir(fastBatch.FAST_runDirectory):
if file.endswith(".yaml"):
yfile = file
yamldata = FileTools.load_yaml(os.path.join(fastBatch.FAST_runDirectory, yfile), package=1)
CaseGen_Control.append_case_matrix_yaml(
fastBatch.FAST_runDirectory, yfile, tuning_inputs, 'tuning_inputs')
# Make sure flags are on
var_out = [
# ElastoDyn
"BldPitch1", "BldPitch2", "BldPitch3", "Azimuth", "RotSpeed", "GenSpeed", "NacYaw",
"OoPDefl1", "IPDefl1", "TwstDefl1", "OoPDefl2", "IPDefl2", "TwstDefl2", "OoPDefl3",
"IPDefl3", "TwstDefl3", "TwrClrnc1", "TwrClrnc2", "TwrClrnc3", "NcIMUTAxs", "NcIMUTAys",
"NcIMUTAzs", "TTDspFA", "TTDspSS", "TTDspTwst", "PtfmSurge", "PtfmSway", "PtfmHeave",
"PtfmRoll", "PtfmPitch", "PtfmYaw", "PtfmTAxt", "PtfmTAyt", "PtfmTAzt", "RootFxc1",
"RootFyc1", "RootFzc1", "RootMxc1", "RootMyc1", "RootMzc1", "RootFxc2", "RootFyc2",
"RootFzc2", "RootMxc2", "RootMyc2", "RootMzc2", "RootFxc3", "RootFyc3", "RootFzc3",
"RootMxc3", "RootMyc3", "RootMzc3", "Spn1MLxb1", "Spn1MLyb1", "Spn1MLzb1", "Spn1MLxb2",
"Spn1MLyb2", "Spn1MLzb2", "Spn1MLxb3", "Spn1MLyb3", "Spn1MLzb3", "RotThrust", "LSSGagFya",
"LSSGagFza", "RotTorq", "LSSGagMya", "LSSGagMza", "YawBrFxp", "YawBrFyp", "YawBrFzp",
"YawBrMxp", "YawBrMyp", "YawBrMzp", "TwrBsFxt", "TwrBsFyt", "TwrBsFzt", "TwrBsMxt",
"TwrBsMyt", "TwrBsMzt", "TwHt1MLxt", "TwHt1MLyt", "TwHt1MLzt",
# ServoDyn
"GenPwr", "GenTq",
# AeroDyn15
"RtArea", "RtVAvgxh", "B1N3Clrnc", "B2N3Clrnc", "B3N3Clrnc",
"RtAeroCp", 'RtAeroCq', 'RtAeroCt', 'RtTSR',
# InflowWind
"Wind1VelX", "Wind1VelY", "Wind1VelZ",
# FLAPS
# "BLFLAP1", "BLFLAP2", "BLFLAP3", "RtVAvgxh", "OoPDefl1")
]
channels = {}
for var in var_out:
channels[var] = True
fastBatch.channels = channels
fastBatch.case_list = case_list
fastBatch.case_name_list = case_name_list
if multi:
fastBatch.run_multi(cores)
# fastBatch.run_mpi()
else:
fastBatch.run_serial()
# Post processing
case_info = FileTools.load_yaml(FAST_runDirectory + '/case_matrix.yaml', package=1)
outfiles = [FAST_runDirectory + fname + '.outb' for fname in case_info['Case_Name']]
fp = Processing.FAST_Processing()
fp.OpenFAST_outfile_list = outfiles
fp.t0 = 30
fp.parallel_analysis = True
fp.verbose=True
fp.results_dir = os.path.join(run_dir,'stats')
fp.save_LoadRanking = True
fp.save_SummaryStats = True
stats, load_ranking = fp.batch_processing()
| 34.280423 | 124 | 0.722334 | [
"Apache-2.0"
] | NREL/pCrunch | runBatch/run_FlapGainSweep_BAR.py | 6,479 | Python |
import requests
import time
from bs4 import BeautifulSoup
class crawl:
"""Keep track of time between scrape requests.
args:
wait: time between requests
retry_max: number of times to retry
"""
def __init__(self, wait, retry_max):
self.wait = wait
self.retry_max = retry_max
self.last_scrape = time.time() - (self.wait * 0.5)
# can let user scrape faster the first time.
def since_scrape(self):
return (time.time() - self.last_scrape) > self.wait
def wait_till(self):
while not self.since_scrape():
time.sleep(1)
def get(self, url):
count = 0
while count < self.retry_max:
time.sleep(self.wait * count) # sleep for successively longer times
try:
self.wait_till()
response = requests.get(url)
self.last_scrape = time.time()
if response.status_code == requests.codes.ok:
return response
else:
raise Exception(
"Non-standard issue connecting to "
+ f"{url}: {response.status_code}."
)
except requests.exceptions.RequestException as e:
pass
count += 1
def get_html(self, url):
return self.get(url).text
def get_soup(self, url):
return BeautifulSoup(self.get(url).text, "html.parser")
def get_json(self, url):
return self.get(url).json()
| 29.226415 | 80 | 0.550032 | [
"Unlicense"
] | seanbreckenridge/MALUserVsAverage | manual_crawler.py | 1,549 | Python |
# Copyright 2019 Uber Technologies, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
import json
from six.moves import zip
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import optimizers
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import serialization
def save_tf_keras_optimizer(optimizer, h5py_file):
if isinstance(optimizer, optimizers.TFOptimizer):
logging.warning(
'TensorFlow optimizers do not '
'make it possible to access '
'optimizer attributes or optimizer state '
'after instantiation. '
'As a result, we cannot save the optimizer '
'as part of the model save file.'
'You will have to compile your model again after loading it. '
'Prefer using a Keras optimizer instead '
'(see keras.io/optimizers).')
else:
h5py_file.attrs['training_config'] = json.dumps(
{
'optimizer_config': {
'class_name': optimizer.__class__.__name__,
'config': optimizer.get_config()
}
},
default=serialization.get_json_type).encode('utf8')
# Save optimizer weights.
symbolic_weights = getattr(optimizer, 'weights')
if symbolic_weights:
optimizer_weights_group = h5py_file.create_group('optimizer_weights')
weight_values = K.batch_get_value(symbolic_weights)
weight_names = []
for w, val in zip(symbolic_weights, weight_values):
name = str(w.name)
weight_names.append(name.encode('utf8'))
optimizer_weights_group.attrs['weight_names'] = weight_names
for name, val in zip(weight_names, weight_values):
param_dset = optimizer_weights_group.create_dataset(
name, val.shape, dtype=val.dtype)
if not val.shape:
# scalar
param_dset[()] = val
else:
param_dset[:] = val
h5py_file.flush()
def load_tf_keras_optimizer(h5py_file, custom_objects=None):
if not custom_objects:
custom_objects = {}
def convert_custom_objects(obj):
"""Handles custom object lookup.
Arguments:
obj: object, dict, or list.
Returns:
The same structure, where occurrences
of a custom object name have been replaced
with the custom object.
"""
if isinstance(obj, list):
deserialized = []
for value in obj:
deserialized.append(convert_custom_objects(value))
return deserialized
if isinstance(obj, dict):
deserialized = {}
for key, value in obj.items():
deserialized[key] = convert_custom_objects(value)
return deserialized
if obj in custom_objects:
return custom_objects[obj]
return obj
optimizer, optimizer_weight_values = None, None
# instantiate optimizer
training_config = h5py_file.attrs.get('training_config')
training_config = json.loads(training_config.decode('utf-8'))
optimizer_config = training_config['optimizer_config']
optimizer = optimizers.deserialize(optimizer_config, custom_objects=custom_objects)
if 'optimizer_weights' in h5py_file:
optimizer_weights_group = h5py_file['optimizer_weights']
optimizer_weight_names = [
n.decode('utf8')
for n in optimizer_weights_group.attrs['weight_names']
]
optimizer_weight_values = [optimizer_weights_group[n].value for n in
optimizer_weight_names]
if optimizer_weight_values:
optimizer.set_weights(optimizer_weight_values)
return optimizer
| 38.686441 | 87 | 0.62782 | [
"Apache-2.0"
] | HCYXAS/horovod | horovod/spark/keras/tensorflow.py | 4,565 | Python |
# -----------------------------------------------------------------------------
# Copyright (c) 2021 Trevor P. Martin. All rights reserved.
# Distributed under the MIT License.
# -----------------------------------------------------------------------------
from Data import encode_data
# from utils import cross_validation
from Models import utils
from Models import build_models
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import Perceptron
from sklearn.svm import LinearSVC
import matplotlib.pyplot as plt
import matplotlib.font_manager as font_manager
import numpy as np
import pandas as pd
import tensorflow as tf
import copy
class CNN01(tf.keras.Model):
@staticmethod
def build(rows, columns, channels, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns, channels)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Conv2D(
filters=32,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.Conv2D(
filters=64,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(tf.keras.layers.Conv2D(
filters=128,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class CNN02(tf.keras.Model):
@staticmethod
def build(rows, columns, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Conv1D(
filters=32,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.Conv1D(
filters=64,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Conv1D(
filters=128,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class CNN03(tf.keras.Model):
@staticmethod
def build(rows, columns, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Conv1D(
filters=32,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Conv1D(
filters=64,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class CNN04(tf.keras.Model):
@staticmethod
def build(rows, columns, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Conv1D(
filters=32,
kernel_size=3,
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling1D(pool_size=2))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class CNN05(tf.keras.Model):
@staticmethod
def build(rows, columns, channels, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns, channels)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Conv2D(
filters=32,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.Conv2D(
filters=64,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.Conv2D(
filters=64,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.MaxPooling2D(pool_size=(2, 2)))
model.add(tf.keras.layers.Conv2D(
filters=128,
kernel_size=(3,3),
activation="relu",
padding="same"
)
)
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dropout(0.5))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class DNN01(tf.keras.Model):
@staticmethod
def build(rows, columns, units, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(units=units, activation='relu', kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dense(units=units//2, kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dropout(rate=0.15))
model.add(tf.keras.layers.Dense(units=units//4, kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class DNN02(tf.keras.Model):
@staticmethod
def build(rows, columns, units, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(units=units, activation='relu', kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dropout(rate=0.50))
model.add(tf.keras.layers.Dense(units=units//2, kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class DNN03(tf.keras.Model):
@staticmethod
def build(rows, columns, units, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.Flatten())
model.add(tf.keras.layers.Dense(units=units*2, activation='relu', kernel_regularizer=tf.keras.regularizers.l2(0.001)))
model.add(tf.keras.layers.Dropout(rate=0.50))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
class RNN(tf.keras.Model):
@staticmethod
def build(rows, columns, units, classes):
model = tf.keras.Sequential()
input_shape = (rows, columns)
model.add(tf.keras.layers.InputLayer(input_shape=input_shape))
model.add(tf.keras.layers.LSTM(
units=units,
activation='tanh',
return_sequences=True,
)
)
model.add(tf.keras.layers.Dropout(rate=0.20))
model.add(tf.keras.layers.LSTM(
units=units//2,
activation='tanh',
)
)
model.add(tf.keras.layers.Dropout(rate=0.20))
model.add(tf.keras.layers.Dense(64, activation="relu"))
model.add(tf.keras.layers.Dense(classes, activation="softmax"))
return model
def run(datasets,
splice_sites,
sub_models,
save,
vis,
iter,
metrics,
summary,
config,
num_folds,
bal,
imbal,
imbal_t,
imbal_f,
batch_size,
epochs
):
"""
Parameters
----------
dataset: a string {nn269, ce, hs3d} indicating which dataset to use
splice_site_type: a string {acceptor, donor} indicating which splice
site to train on
model_architecture: a string {cnn, dnn, rnn} indicating which model
architecture to use for training
save_model: boolean, whether to save the current model
bal: boolean, whether to balance the dataset
summary: boolean, whether to print out the model architecture summary
config: boolean, whether to print out the model's configuration
visualize: boolean, whether to save a performance graph of the model
metrics: boolean, whether to print out the evaluation metrics for the model
num_folds: int (default 10), the number of folds for k-fold cross validation
epochs: int (default 15), the number of epochs for the chosen model
batch_size: int (default 32), the model batch size
model_iter: integer, the iteration of the current model architecture (e.g.
if this is the third cnn architecture you are testing, use 3)
"""
# (acceptor row len, donor row len) by dataset
network_rows = {
'acceptor':{
'nn269':90, 'ce':141,
'hs3d':140, 'hs2':602,
'ce2':602, 'dm':602,
'ar':602, 'or':602,
},
'donor':{
'nn269':15, 'ce':141,
'hs3d':140, 'hs2':602,
'ce2':602, 'dm':602,
'ar':602, 'or':602,
},
}
# initialize selected sub models
to_run = dict(
[
(sub_model,{
'nn269':'', 'ce':'',
'hs3d':'', 'hs2':'',
'ce2':'', 'dm':'',
'ar':'', 'or':''
}) for sub_model in sub_models
]
)
# results dictionary
results = copy.deepcopy(to_run)
# populate sub models with encoded data
for sub_model in sub_models:
for dataset in datasets:
# encode datasets -> return (acc_x, acc_y, don_x, don_y)
to_run[sub_model][dataset] = encode_data.encode(dataset, sub_model, bal)
# get a metrics dictionary
evals = dict(
[
(sub_model, {
'f1':'', 'precision':'',
'sensitivity':'', 'specificity':'',
'recall':'', 'mcc':'',
'err_rate':''
}) for sub_model in sub_models
]
)
# accumulate results from running cross validation
for sub_model in sub_models:
for dataset in datasets:
if to_run[sub_model][dataset] == '':
pass
else:
results[sub_model][dataset] = utils.cross_validation(
num_folds,
sub_model,
splice_sites,
dataset,
to_run[sub_model][dataset],# encoded data for dataset (ds)
network_rows, # donor, acceptor rows for ds
evals,
summary,
config,
batch_size,
epochs,
save,
)
# if vis:
print(results)
return results
# plot results
# loss_acc_sub_models(
# results,
# datasets,
# sub_models,
# epochs,
# num_folds,
# bal
# )
# # different by splice site type
# if splice_site_type == 'acceptor':
# cnn_X_train, cnn_y_train = cnn_acc_x, acc_y
# # same name to preserve for loop structure
# X_train, y_train = rd_acc_x, acc_y
# dataset_row_num = network_rows[dataset][0]
# if splice_site_type == 'donor':
# cnn_X_train, cnn_y_train = cnn_don_x, don_y
# X_train, y_train = rd_don_x, don_y
# dataset_row_num = network_rows[dataset][1]
#
#
# # if tune_rnn:
# # tune_rnn()
#
# # perform cross validation
# # general
# trn_fold_accs, trn_fold_losses = [], []
# val_fold_accs, val_fold_losses = [], []
# # esplice
# rnn_va, rnn_vl, cnn_vl, cnn_va, dnn_vl, dnn_va = [],[],[],[],[],[]
# rnn_ta, rnn_tl, cnn_tl, cnn_ta, dnn_tl, dnn_ta = [],[],[],[],[],[]
#
# # this loop inspired by https://www.machinecurve.com/
# #index.php/2020/02/18/how-to-use-k-fold-cross-validation-with-keras/
# k_fold = KFold(n_splits=num_folds, shuffle=False)
# fold = 1
# for train, test in k_fold.split(X_train, y_train):
# if model_architecture != 'esplice':
# X_trn, y_trn = X_train[train], y_train[train]
# X_val, y_val = X_train[test], y_train[test]
# if model_architecture=='cnn':
# history, model = build_cnn(
# dataset_row_num,
# summary,
# X_trn,
# y_trn,
# batch_size,
# epochs,
# X_val,#becomes X_val
# y_val,#becomes y_val
# fold,
# num_folds
# )
# if model_architecture=='dnn':
# history, model = build_dnn(
# dataset_row_num,
# summary,
# X_trn,
# y_trn,
# batch_size,
# epochs,
# X_val,#becomes X_val
# y_val,#becomes y_val
# fold,
# num_folds
# )
# if model_architecture=='rnn':
# history, model = build_rnn(
# dataset_row_num,
# summary,
# X_trn,
# y_trn,
# batch_size,
# epochs,
# X_val,#becomes X_val
# y_val,#becomes y_val
# fold,
# num_folds
# )
# # model.predict(X_trn)
# val_fold_accs.append(history.history['val_accuracy'])
# val_fold_losses.append(history.history['val_loss'])
# trn_fold_accs.append(history.history['accuracy'])
# trn_fold_losses.append(history.history['loss'])
# fold += 1
# else:
# # set up submodel datasets
# cnn_X_trn, cnn_y_trn = cnn_X_train[train], cnn_y_train[train]
# cnn_X_val, cnn_y_val = cnn_X_train[test], cnn_y_train[test]
# rd_X_trn, rd_y_trn = X_train[train], y_train[train]
# rd_X_val, rd_y_val = X_train[test], y_train[test]
# # build each submodel
# hist01, submodel_01 = build_cnn(
# dataset_row_num,
# summary,
# cnn_X_trn,
# cnn_y_trn,
# batch_size,
# epochs,
# cnn_X_val,
# cnn_y_val,
# fold,
# num_folds
# )
# hist02, submodel_02 = build_dnn(
# dataset_row_num,
# summary,
# rd_X_trn,
# rd_y_trn,
# batch_size,
# epochs,
# rd_X_val,
# rd_y_val,
# fold,
# num_folds
# )
# # hist03, submodel_03 = build_rnn(
# # dataset_row_num,
# # summary,
# # rd_X_trn,
# # rd_y_trn,
# # batch_size,
# # epochs,
# # rd_X_val,
# # rd_y_val,
# # fold,
# # num_folds
# # )
# models = [submodel_01, submodel_02]#, submodel_03]
# trn_scores, val_scores = EnsembleSplice.build(
# models,
# batch_size,
# cnn_X_trn,
# cnn_y_trn,
# cnn_X_val,
# cnn_y_val,
# rd_X_trn,
# rd_y_trn,
# rd_X_val,
# rd_y_val,
# )
# # get final epoch accuracy
# trn_fold_accs.append(trn_scores)
# val_fold_accs.append(val_scores)
# # rnn_va.append(hist03.history['val_accuracy'])
# # rnn_vl.append(hist03.history['val_loss'])
# # rnn_ta.append(hist03.history['accuracy'])
# # rnn_tl.append(hist03.history['loss'])
# # cnn_vl.append(hist01.history['val_loss'])
# # cnn_va.append(hist01.history['val_accuracy'])
# # cnn_tl.append(hist01.history['loss'])
# # cnn_ta.append(hist01.history['accuracy'])
# # dnn_vl.append(hist02.history['val_loss'])
# # dnn_va.append(hist02.history['val_accuracy'])
# # dnn_tl.append(hist02.history['loss'])
# # dnn_ta.append(hist02.history['accuracy'])
#
# # rnn_va.append(hist03.history['val_accuracy'][-1])
# # rnn_vl.append(hist03.history['val_loss'][-1])
# # rnn_ta.append(hist03.history['accuracy'][-1])
# # rnn_tl.append(hist03.history['loss'][-1])
# cnn_vl.append(hist01.history['val_loss'][-1])
# cnn_va.append(hist01.history['val_accuracy'][-1])
# cnn_tl.append(hist01.history['loss'][-1])
# cnn_ta.append(hist01.history['accuracy'][-1])
# dnn_vl.append(hist02.history['val_loss'][-1])
# dnn_va.append(hist02.history['val_accuracy'][-1])
# dnn_tl.append(hist02.history['loss'][-1])
# dnn_ta.append(hist02.history['accuracy'][-1])
#
# fold += 1
#
# # do something with predicted values and real values to get AUC-ROC scores
# # sklearn.metrics.roc_auc_score
# # also get f-score and other scores here
# # maybe connect tune_rnn and build_rnn -> get tuned parameters and plug them
# # in automatically to RNN
#
# if model_architecture != 'esplice':
#
# val_acc_by_epoch = np.apply_along_axis(lambda row: np.mean(row), 1, np.asarray(val_fold_accs).T)
# val_loss_by_epoch = np.apply_along_axis(lambda row: np.mean(row), 1, np.asarray(val_fold_losses).T)
# trn_acc_by_epoch = np.apply_along_axis(lambda row: np.mean(row), 1, np.asarray(trn_fold_accs).T)
# trn_loss_by_epoch = np.apply_along_axis(lambda row: np.mean(row), 1, np.asarray(trn_fold_losses).T)
#
# std_val_acc = np.apply_along_axis(lambda row: np.std(row), 1, np.asarray(val_fold_accs).T)
# std_val_loss = np.apply_along_axis(lambda row: np.std(row), 1, np.asarray(val_fold_losses).T)
# std_trn_acc = np.apply_along_axis(lambda row: np.std(row), 1, np.asarray(trn_fold_accs).T)
# std_trn_loss = np.apply_along_axis(lambda row: np.std(row), 1, np.asarray(trn_fold_losses).T)
#
# values = [
# val_acc_by_epoch,
# std_val_acc,
# trn_acc_by_epoch,
# std_trn_acc,
# val_loss_by_epoch,
# std_val_loss,
# trn_loss_by_epoch,
# std_trn_loss
# ]
#
# if model_architecture == 'esplice':
#
# # make a DICTIONARY AREY
# # ES_Val_ACc: (vacc, std_va)
# mean_good = lambda seq: np.apply_along_axis(lambda row: np.mean(row), 1, np.asarray(seq).T)
# std_good = lambda seq: np.apply_along_axis(lambda row: np.std(row), 1, np.asarray(seq).T)
# vacc = val_fold_accs
# tacc = trn_fold_accs
# # std_va = val_fold_accs
# # std_ta = trn_fold_accs
#
# values = [
# val_fold_accs,
# trn_fold_accs,
# #rnn_va,
# # rnn_vl,
# #rnn_ta,
# # rnn_tl,
# # cnn_vl,
# cnn_va,
# # cnn_tl,
# cnn_ta,
# # dnn_vl,
# dnn_va,
# # dnn_tl,
# dnn_ta
# ]
#
# # cnn_mva = mean_good(cnn_va)
# # cnn_mvl = mean_good(cnn_vl)
# # cnn_mta = mean_good(cnn_ta)
# # cnn_mtl = mean_good(cnn_tl)
# # cnn_sva = std_good(cnn_va)
# # cnn_svl = std_good(cnn_vl)
# # cnn_sta = std_good(cnn_ta)
# # cnn_stl = std_good(cnn_tl)
# #
# # dnn_mva = mean_good(dnn_va)
# # dnn_mvl = mean_good(dnn_vl)
# # dnn_mta = mean_good(dnn_ta)
# # dnn_mtl = mean_good(dnn_tl)
# # dnn_sva = std_good(dnn_va)
# # dnn_svl = std_good(dnn_vl)
# # dnn_sta = std_good(dnn_ta)
# # dnn_stl = std_good(dnn_tl)
# #
# # rnn_mva = mean_good(rnn_va)
# # rnn_mvl = mean_good(rnn_vl)
# # rnn_mta = mean_good(rnn_ta)
# # rnn_mtl = mean_good(rnn_tl)
# # rnn_sva = std_good(rnn_va)
# # rnn_svl = std_good(rnn_vl)
# # rnn_sta = std_good(rnn_ta)
# # rnn_stl = std_good(rnn_tl)
#
# # values = [
# # vacc,
# # # std_va,
# # tacc,
# # # std_ta,
# # cnn_mva,
# # cnn_sva,
# # cnn_mvl,
# # cnn_svl,
# # cnn_mta,
# # cnn_sta,
# # cnn_mtl,
# # cnn_stl,
# # dnn_mva,
# # dnn_sva,
# # dnn_mvl,
# # dnn_svl,
# # dnn_mta,
# # dnn_sta,
# # dnn_mtl,
# # dnn_stl,
# # rnn_mva,
# # rnn_sva,
# # rnn_mvl,
# # rnn_svl,
# # rnn_mta,
# # rnn_sta,
# # rnn_mtl,
# # rnn_stl,
# # ]
# if config:
# print(model.get_config())
# if save_model:
# name = input('What would you like to name this model?: ')
# model.save(f'{name}')
# tf.keras.utils.plot_model(model, f'{name}.png', show_shapes=True)
# if visualize:
# loss_acc_esplice(
# values,
# model_architecture,
# dataset,
# splice_site_type,
# num_folds,
# epochs,
# bal,
# )
| 34.781627 | 126 | 0.525352 | [
"MIT"
] | tmartin2/EnsembleSplice-Inactive | sub_models.py | 23,095 | Python |
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import enum
from builtins import _test_sink, _test_source
from typing import Annotated, Any, Dict, List
class Test1_C:
x: int = 0
y: str = "y"
z: Annotated[str, "test1"] = "z"
def test1_alarm1():
# always-via-type:int
c = Test1_C(_test_source())
_test_sink(c.x)
def test1_alarm2():
# always-via-type:str
c = Test1_C(_test_source())
_test_sink(c.y)
def test1_alarm3():
# always-via-type:typing.Annotated[str]
c = Test1_C(_test_source())
_test_sink(c.z)
def test1_alarm4(foo):
# via-type:int, via-type:str, via-type:typing.Annotated[str]
c = Test1_C(_test_source())
foo = c.x
if 1:
foo = c.y
elif 2:
foo = c.z
_test_sink(foo)
class Test2_C:
x: Dict[str, int] = {}
y: List[str] = []
z: Annotated[float, "test2"] = 0.0
def test2_alarm1():
# always-via-type:Dict[str, int]
c = Test2_C(_test_source())
_test_sink(c.x)
def test2_alarm2():
# always-via-type:List[str]
c = Test2_C(_test_source())
_test_sink(c.y)
def test2_alarm3():
# always-via-type:float
c = Test2_C(_test_source())
_test_sink(c.z)
def test2_alarm4(foo):
# via-type:Dict[str, int], via-type:List[str], via-type:float
c = Test2_C(_test_source())
foo = c.x
if 1:
foo = c.y
elif 2:
foo = c.z
_test_sink(foo)
class Test3_Foo:
...
class Test3_C:
x: Dict[str, List[int]] = {}
y: Test3_Foo = Test3_Foo()
z: Annotated[List[List[str]], "test3"] = []
def test3_alarm1(c: Test3_C):
# always-via-type:Dict[str, List[int]]
_test_sink(c.x)
def test3_alarm2(c: Test3_C):
# always-via-type:Test3_Foo
_test_sink(c.y)
def test3_alarm3(c: Test3_C):
# always-via-type:typing.Annotated[List[List[str]]
_test_sink(c.z)
def test3_alarm4(c: Test3_C, foo):
# via-type:Dict[str, List[int]],
# via-type:Test3_Foo,
# via-type:typing.Annotated[List[List[str]]
foo = c.x
if 1:
foo = c.y
elif 2:
foo = c.z
_test_sink(foo)
class Test4_C:
x = ...
y: Any = 0
z: object = []
def test4_alarm1(c: Test4_C):
# always-via-type:unknown
c.x = _test_source()
def test4_alarm2(c: Test4_C):
# always-via-type:Any
c.y = _test_source()
def test4_alarm3(c: Test4_C):
# always-via-type:object
c.z = _test_source()
def return_via_parameter_type(parameter):
return 0
def test_strings():
return return_via_parameter_type("A")
def test_numerals():
return return_via_parameter_type(1)
def test_lists():
return return_via_parameter_type(["a", "b"])
def meta(parameter):
return return_via_parameter_type(parameter)
def test_via_type_of_does_not_propagate():
return meta("Name")
def tito(parameter, other):
pass
def test_tito():
a = tito(_test_source(), [1, 2])
return a
def sink_via_type_of(x, y):
pass
def test_sink(element):
return sink_via_type_of(element, 1)
def test_backwards_tito(parameter):
return tito(parameter, "by_backwards")
| 17.741758 | 65 | 0.635491 | [
"MIT"
] | 0xgpapad/pyre-check | source/interprocedural_analyses/taint/test/integration/via_type_of.py | 3,229 | Python |
import os
import anyconfig
PARSABLES = {
'pickle':['p','pickle'],
'toml':['toml'],
'xml':['xml'],
'yaml':['yml','yaml'],
'json':['json'],
'ini':['ini'],
'properties':['props','properties'],
'shellvars':['env']}
class Source(object):
def __init__(self, uri, root_path=None, contents={}, parser=None, error=None):
self.uri = uri
self.root_path = root_path
self.error = error
self.parser = parser
self.contents = contents
def __repr__(self):
return self.__str__()
def __str__(self):
return "<Source uri={} root_path={} parser={} error={}>".format(self.uri, self.root_path, self.parser, self.error)
def load(self):
pass
@staticmethod
def from_file(full_file_path, root_path):
s = SourceFile(full_file_path, root_path)
s.load()
return s
class SourceFile(Source):
def _parse(self, parser=None):
contents = anyconfig.load(self.uri, ac_parser=parser, ac_ordered=True)
parser = parser if parser else os.path.splitext(self.uri)[1].strip('.')
return (contents, parser)
def load(self):
try:
self.contents, self.parser = self._parse()
except Exception as e:
# if the file had a known extension but didn't parse, raise an exception. The danger is that
# it be parsed incorrectly as properties file which seems to match everything
ext = os.path.splitext(self.uri)[1][1:]
if [lext for lext in PARSABLES.values() if ext in lext]:
self.error = e
# print type(e) # 'exception parsing {}\t{}'.format(ext, e)
else:
for p in PARSABLES.keys():
try:
self.contents, self.parser = self._parse(p)
self.error = None
break
except Exception as e:
# print type(e) #'exception parsing as ', p, ' ', e
pass
| 25.9 | 122 | 0.543436 | [
"MIT"
] | flashashen/flange | flange/source.py | 2,072 | Python |
from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'picscope.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
| 23 | 53 | 0.652174 | [
"MIT"
] | yeaske/picscope | picscope/urls.py | 299 | Python |
"""Data Helpers
Data helpers leverage the abstraction layer of ArchivesSnake to provide
additional functionality for retrieving, inferring and concatenating data
elements. They can also extend (or invert) relationships between different
objects.
"""
from datetime import datetime
import re
from rapidfuzz import fuzz
from asnake.jsonmodel import JSONModelObject
from string import Formatter
from .decorators import check_type
@check_type(dict)
def get_note_text(note):
"""Parses note content from different note types.
:param dict: an ArchivesSpace note.
:returns: a list containing note content.
:rtype: list
"""
def parse_subnote(subnote):
"""Parses note content from subnotes.
:param dict: an ArchivesSpace subnote.
:returns: a list containing subnote content.
:rtype: list
"""
if subnote["jsonmodel_type"] in [
"note_orderedlist", "note_index"]:
content = subnote["items"]
elif subnote["jsonmodel_type"] in ["note_chronology", "note_definedlist"]:
content = []
for k in subnote["items"]:
for i in k:
content += k.get(i) if isinstance(k.get(i),
list) else [k.get(i)]
else:
content = subnote["content"] if isinstance(
subnote["content"], list) else [subnote["content"]]
return content
if note["jsonmodel_type"] == "note_singlepart":
content = note["content"]
elif note["jsonmodel_type"] == "note_bibliography":
data = []
data += note["content"]
data += note["items"]
content = data
elif note["jsonmodel_type"] == "note_index":
data = []
for item in note["items"]:
data.append(item["value"])
content = data
else:
subnote_content_list = list(parse_subnote(sn)
for sn in note["subnotes"])
content = [
c for subnote_content in subnote_content_list for c in subnote_content]
return content
@check_type(dict)
def text_in_note(note, query_string):
"""Performs fuzzy searching against note text.
:param dict note: an ArchivesSpace note.
:param str query_string: a string to match against.
:returns: True if a match is found for `query_string`, False if no match is
found.
:rtype: bool
"""
CONFIDENCE_RATIO = 97
"""int: Minimum confidence ratio to match against."""
note_content = get_note_text(note)
ratio = fuzz.token_sort_ratio(
" ".join([n.lower() for n in note_content]),
query_string.lower(),
score_cutoff=CONFIDENCE_RATIO)
return bool(ratio)
@check_type(JSONModelObject)
def object_locations(archival_object):
"""Finds locations associated with an archival object.
:param JSONModelObject archival_object: an ArchivesSpace archival_object.
:returns: Locations objects associated with the archival object.
:rtype: list
"""
locations = []
for instance in archival_object.instances:
top_container = instance.sub_container.top_container.reify()
locations += top_container.container_locations
return locations
@check_type(JSONModelObject)
def format_from_obj(obj, format_string):
"""Generates a human-readable string from an object.
:param JSONModelObject location: an ArchivesSpace object.
:returns: a string in the chosen format.
:rtype: str
"""
if not format_string:
raise Exception("No format string provided.")
else:
try:
d = {}
matches = [i[1] for i in Formatter().parse(format_string) if i[1]]
for m in matches:
d.update({m: getattr(obj, m, "")})
return format_string.format(**d)
except KeyError as e:
raise KeyError(
"The field {} was not found in this object".format(
str(e)))
@check_type(dict)
def format_resource_id(resource, separator=":"):
"""Concatenates the four-part ID for a resource record.
:param dict resource: an ArchivesSpace resource.
:param str separator: a separator to insert between the id parts. Defaults
to `:`.
:returns: a concatenated four-part ID for the resource record.
:rtype: str
"""
resource_id = []
for x in range(4):
try:
resource_id.append(resource["id_{0}".format(x)])
except KeyError:
break
return separator.join(resource_id)
@check_type(JSONModelObject)
def closest_value(archival_object, key):
"""Finds the closest value matching a key.
Starts with an archival object, and iterates up through its ancestors
until it finds a match for a key that is not empty or null.
:param JSONModelObject archival_object: an ArchivesSpace archival_object.
:param str key: the key to match against.
:returns: The value of the key, which could be a str, list, or dict.
:rtype: str, list, or key
"""
if getattr(archival_object, key) not in ["", [], {}, None]:
return getattr(archival_object, key)
else:
for ancestor in archival_object.ancestors:
return closest_value(ancestor, key)
def get_orphans(object_list, null_attribute):
"""Finds objects in a list which do not have a value in a specified field.
:param list object_list: a list of ArchivesSpace objects.
:param null_attribute: an attribute which must be empty or null.
:yields: a list of ArchivesSpace objects.
:yield type: dict
"""
for obj in object_list:
if getattr(obj, null_attribute) in ["", [], {}, None]:
yield obj
@check_type(dict)
def get_expression(date):
"""Returns a date expression for a date object.
Concatenates start and end dates if no date expression exists.
:param dict date: an ArchivesSpace date
:returns: date expression for the date object.
:rtype: str
"""
try:
expression = date["expression"]
except KeyError:
if date.get("end"):
expression = "{0}-{1}".format(date["begin"], date["end"])
else:
expression = date["begin"]
return expression
@check_type(dict)
def indicates_restriction(rights_statement, restriction_acts):
"""Parses a rights statement to determine if it indicates a restriction.
:param dict rights_statement: an ArchivesSpace rights statement.
:returns: True if rights statement indicates a restriction, False if not.
:rtype: bool
"""
def is_expired(date):
today = datetime.now()
date = date if date else datetime.strftime("%Y-%m-%d")
return False if (
datetime.strptime(date, "%Y-%m-%d") >= today) else True
if is_expired(rights_statement.get("end_date")):
return False
for act in rights_statement.get("acts"):
if (act.get("restriction")
in restriction_acts and not is_expired(act.get("end_date"))):
return True
return False
@check_type(dict)
def is_restricted(archival_object, query_string, restriction_acts):
"""Parses an archival object to determine if it is restricted.
Iterates through notes, looking for a conditions governing access note
which contains a particular set of strings.
Also looks for associated rights statements which indicate object may be
restricted.
:param dict archival_object: an ArchivesSpace archival_object.
:param list restriction_acts: a list of strings to match restriction act against.
:returns: True if archival object is restricted, False if not.
:rtype: bool
"""
for note in archival_object["notes"]:
if note["type"] == "accessrestrict":
if text_in_note(note, query_string.lower()):
return True
for rights_statement in archival_object["rights_statements"]:
if indicates_restriction(rights_statement, restriction_acts):
return True
return False
@check_type(str)
def strip_html_tags(string):
"""Strips HTML tags from a string.
:param str string: An input string from which to remove HTML tags.
"""
tag_match = re.compile("<.*?>")
cleantext = re.sub(tag_match, "", string)
return cleantext
| 31.73384 | 85 | 0.648454 | [
"MIT"
] | RockefellerArchiveCenter/rac_aspace | rac_aspace/data_helpers.py | 8,346 | Python |
# Copyright (c) 2009-2017 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
# Gabe Black
# William Wang
from m5.params import *
from m5.proxy import *
from ClockDomain import ClockDomain
from VoltageDomain import VoltageDomain
from Device import BasicPioDevice, PioDevice, IsaFake, BadAddr, DmaDevice
from PciHost import *
from Ethernet import NSGigE, IGbE_igb, IGbE_e1000
from Ide import *
from Platform import Platform
from Terminal import Terminal
from Uart import Uart
from SimpleMemory import SimpleMemory
from Gic import *
from EnergyCtrl import EnergyCtrl
from ClockDomain import SrcClockDomain
from SubSystem import SubSystem
# Platforms with KVM support should generally use in-kernel GIC
# emulation. Use a GIC model that automatically switches between
# gem5's GIC model and KVM's GIC model if KVM is available.
try:
from KvmGic import MuxingKvmGic
kvm_gicv2_class = MuxingKvmGic
except ImportError:
# KVM support wasn't compiled into gem5. Fallback to a
# software-only GIC.
kvm_gicv2_class = Pl390
pass
class AmbaPioDevice(BasicPioDevice):
type = 'AmbaPioDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
amba_id = Param.UInt32("ID of AMBA device for kernel detection")
class AmbaIntDevice(AmbaPioDevice):
type = 'AmbaIntDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
int_delay = Param.Latency("100ns",
"Time between action and interrupt generation by device")
class AmbaDmaDevice(DmaDevice):
type = 'AmbaDmaDevice'
abstract = True
cxx_header = "dev/arm/amba_device.hh"
pio_addr = Param.Addr("Address for AMBA slave interface")
pio_latency = Param.Latency("10ns", "Time between action and write/read result by AMBA DMA Device")
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
amba_id = Param.UInt32("ID of AMBA device for kernel detection")
class A9SCU(BasicPioDevice):
type = 'A9SCU'
cxx_header = "dev/arm/a9scu.hh"
class ArmPciIntRouting(Enum): vals = [
'ARM_PCI_INT_STATIC',
'ARM_PCI_INT_DEV',
'ARM_PCI_INT_PIN',
]
class GenericArmPciHost(GenericPciHost):
type = 'GenericArmPciHost'
cxx_header = "dev/arm/pci_host.hh"
int_policy = Param.ArmPciIntRouting("PCI interrupt routing policy")
int_base = Param.Unsigned("PCI interrupt base")
int_count = Param.Unsigned("Maximum number of interrupts used by this host")
class RealViewCtrl(BasicPioDevice):
type = 'RealViewCtrl'
cxx_header = "dev/arm/rv_ctrl.hh"
proc_id0 = Param.UInt32(0x0C000000, "Processor ID, SYS_PROCID")
proc_id1 = Param.UInt32(0x0C000222, "Processor ID, SYS_PROCID1")
idreg = Param.UInt32(0x00000000, "ID Register, SYS_ID")
class RealViewOsc(ClockDomain):
type = 'RealViewOsc'
cxx_header = "dev/arm/rv_ctrl.hh"
parent = Param.RealViewCtrl(Parent.any, "RealView controller")
# TODO: We currently don't have the notion of a clock source,
# which means we have to associate oscillators with a voltage
# source.
voltage_domain = Param.VoltageDomain(Parent.voltage_domain,
"Voltage domain")
# See ARM DUI 0447J (ARM Motherboard Express uATX -- V2M-P1) and
# the individual core/logic tile reference manuals for details
# about the site/position/dcc/device allocation.
site = Param.UInt8("Board Site")
position = Param.UInt8("Position in device stack")
dcc = Param.UInt8("Daughterboard Configuration Controller")
device = Param.UInt8("Device ID")
freq = Param.Clock("Default frequency")
class RealViewTemperatureSensor(SimObject):
type = 'RealViewTemperatureSensor'
cxx_header = "dev/arm/rv_ctrl.hh"
parent = Param.RealViewCtrl(Parent.any, "RealView controller")
system = Param.System(Parent.any, "system")
# See ARM DUI 0447J (ARM Motherboard Express uATX -- V2M-P1) and
# the individual core/logic tile reference manuals for details
# about the site/position/dcc/device allocation.
site = Param.UInt8("Board Site")
position = Param.UInt8("Position in device stack")
dcc = Param.UInt8("Daughterboard Configuration Controller")
device = Param.UInt8("Device ID")
class VExpressMCC(SubSystem):
"""ARM V2M-P1 Motherboard Configuration Controller
This subsystem describes a subset of the devices that sit behind the
motherboard configuration controller on the the ARM Motherboard
Express (V2M-P1) motherboard. See ARM DUI 0447J for details.
"""
class Osc(RealViewOsc):
site, position, dcc = (0, 0, 0)
class Temperature(RealViewTemperatureSensor):
site, position, dcc = (0, 0, 0)
osc_mcc = Osc(device=0, freq="50MHz")
osc_clcd = Osc(device=1, freq="23.75MHz")
osc_peripheral = Osc(device=2, freq="24MHz")
osc_system_bus = Osc(device=4, freq="24MHz")
# See Table 4.19 in ARM DUI 0447J (Motherboard Express uATX TRM).
temp_crtl = Temperature(device=0)
class CoreTile2A15DCC(SubSystem):
"""ARM CoreTile Express A15x2 Daughterboard Configuration Controller
This subsystem describes a subset of the devices that sit behind the
daughterboard configuration controller on a CoreTile Express A15x2. See
ARM DUI 0604E for details.
"""
class Osc(RealViewOsc):
site, position, dcc = (1, 0, 0)
# See Table 2.8 in ARM DUI 0604E (CoreTile Express A15x2 TRM)
osc_cpu = Osc(device=0, freq="60MHz")
osc_hsbm = Osc(device=4, freq="40MHz")
osc_pxl = Osc(device=5, freq="23.75MHz")
osc_smb = Osc(device=6, freq="50MHz")
osc_sys = Osc(device=7, freq="60MHz")
osc_ddr = Osc(device=8, freq="40MHz")
class VGic(PioDevice):
type = 'VGic'
cxx_header = "dev/arm/vgic.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
platform = Param.Platform(Parent.any, "Platform this device is part of.")
vcpu_addr = Param.Addr(0, "Address for vcpu interfaces")
hv_addr = Param.Addr(0, "Address for hv control")
pio_delay = Param.Latency('10ns', "Delay for PIO r/w")
# The number of list registers is not currently configurable at runtime.
ppint = Param.UInt32("HV maintenance interrupt number")
class AmbaFake(AmbaPioDevice):
type = 'AmbaFake'
cxx_header = "dev/arm/amba_fake.hh"
ignore_access = Param.Bool(False, "Ignore reads/writes to this device, (e.g. IsaFake + AMBA)")
amba_id = 0;
class Pl011(Uart):
type = 'Pl011'
cxx_header = "dev/arm/pl011.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num = Param.UInt32("Interrupt number that connects to GIC")
end_on_eot = Param.Bool(False, "End the simulation when a EOT is received on the UART")
int_delay = Param.Latency("100ns", "Time between action and interrupt generation by UART")
class Sp804(AmbaPioDevice):
type = 'Sp804'
cxx_header = "dev/arm/timer_sp804.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num0 = Param.UInt32("Interrupt number that connects to GIC")
clock0 = Param.Clock('1MHz', "Clock speed of the input")
int_num1 = Param.UInt32("Interrupt number that connects to GIC")
clock1 = Param.Clock('1MHz', "Clock speed of the input")
amba_id = 0x00141804
class CpuLocalTimer(BasicPioDevice):
type = 'CpuLocalTimer'
cxx_header = "dev/arm/timer_cpulocal.hh"
gic = Param.BaseGic(Parent.any, "Gic to use for interrupting")
int_num_timer = Param.UInt32("Interrrupt number used per-cpu to GIC")
int_num_watchdog = Param.UInt32("Interrupt number for per-cpu watchdog to GIC")
class GenericTimer(SimObject):
type = 'GenericTimer'
cxx_header = "dev/arm/generic_timer.hh"
system = Param.ArmSystem(Parent.any, "system")
gic = Param.BaseGic(Parent.any, "GIC to use for interrupting")
# @todo: for now only two timers per CPU is supported, which is the
# normal behaviour when security extensions are disabled.
int_phys = Param.UInt32("Physical timer interrupt number")
int_virt = Param.UInt32("Virtual timer interrupt number")
class GenericTimerMem(PioDevice):
type = 'GenericTimerMem'
cxx_header = "dev/arm/generic_timer.hh"
gic = Param.BaseGic(Parent.any, "GIC to use for interrupting")
base = Param.Addr(0, "Base address")
int_phys = Param.UInt32("Interrupt number")
int_virt = Param.UInt32("Interrupt number")
class PL031(AmbaIntDevice):
type = 'PL031'
cxx_header = "dev/arm/rtc_pl031.hh"
time = Param.Time('01/01/2009', "System time to use ('Now' for actual time)")
amba_id = 0x00341031
class Pl050(AmbaIntDevice):
type = 'Pl050'
cxx_header = "dev/arm/kmi.hh"
vnc = Param.VncInput(Parent.any, "Vnc server for remote frame buffer display")
is_mouse = Param.Bool(False, "Is this interface a mouse, if not a keyboard")
int_delay = '1us'
amba_id = 0x00141050
class Pl111(AmbaDmaDevice):
type = 'Pl111'
cxx_header = "dev/arm/pl111.hh"
pixel_clock = Param.Clock('24MHz', "Pixel clock")
vnc = Param.VncInput(Parent.any, "Vnc server for remote frame buffer display")
amba_id = 0x00141111
enable_capture = Param.Bool(True, "capture frame to system.framebuffer.bmp")
class HDLcd(AmbaDmaDevice):
type = 'HDLcd'
cxx_header = "dev/arm/hdlcd.hh"
vnc = Param.VncInput(Parent.any, "Vnc server for remote frame buffer "
"display")
amba_id = 0x00141000
workaround_swap_rb = Param.Bool(False, "Workaround incorrect color "
"selector order in some kernels")
workaround_dma_line_count = Param.Bool(True, "Workaround incorrect "
"DMA line count (off by 1)")
enable_capture = Param.Bool(True, "capture frame to system.framebuffer.bmp")
pixel_buffer_size = Param.MemorySize32("2kB", "Size of address range")
pxl_clk = Param.ClockDomain("Pixel clock source")
pixel_chunk = Param.Unsigned(32, "Number of pixels to handle in one batch")
virt_refresh_rate = Param.Frequency("20Hz", "Frame refresh rate "
"in KVM mode")
class RealView(Platform):
type = 'RealView'
cxx_header = "dev/arm/realview.hh"
system = Param.System(Parent.any, "system")
_mem_regions = [(Addr(0), Addr('256MB'))]
def _on_chip_devices(self):
return []
def _off_chip_devices(self):
return []
_off_chip_ranges = []
def _attach_device(self, device, bus, dma_ports=None):
if hasattr(device, "pio"):
device.pio = bus.master
if hasattr(device, "dma"):
if dma_ports is None:
device.dma = bus.slave
else:
dma_ports.append(device.dma)
def _attach_io(self, devices, *args, **kwargs):
for d in devices:
self._attach_device(d, *args, **kwargs)
def _attach_clk(self, devices, clkdomain):
for d in devices:
if hasattr(d, "clk_domain"):
d.clk_domain = clkdomain
def attachPciDevices(self):
pass
def enableMSIX(self):
pass
def onChipIOClkDomain(self, clkdomain):
self._attach_clk(self._on_chip_devices(), clkdomain)
def offChipIOClkDomain(self, clkdomain):
self._attach_clk(self._off_chip_devices(), clkdomain)
def attachOnChipIO(self, bus, bridge=None, **kwargs):
self._attach_io(self._on_chip_devices(), bus, **kwargs)
if bridge:
bridge.ranges = self._off_chip_ranges
def attachIO(self, *args, **kwargs):
self._attach_io(self._off_chip_devices(), *args, **kwargs)
def setupBootLoader(self, mem_bus, cur_sys, loc):
self.nvmem = SimpleMemory(range = AddrRange('2GB', size = '64MB'),
conf_table_reported = False)
self.nvmem.port = mem_bus.master
cur_sys.boot_loader = loc('boot.arm')
cur_sys.atags_addr = 0x100
cur_sys.load_addr_mask = 0xfffffff
cur_sys.load_offset = 0
# Reference for memory map and interrupt number
# RealView Platform Baseboard Explore for Cortex-A9 User Guide(ARM DUI 0440A)
# Chapter 4: Programmer's Reference
class RealViewPBX(RealView):
uart = Pl011(pio_addr=0x10009000, int_num=44)
realview_io = RealViewCtrl(pio_addr=0x10000000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
gic = Pl390()
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=16,
pci_pio_base=0)
timer0 = Sp804(int_num0=36, int_num1=36, pio_addr=0x10011000)
timer1 = Sp804(int_num0=37, int_num1=37, pio_addr=0x10012000)
local_cpu_timer = CpuLocalTimer(int_num_timer=29, int_num_watchdog=30, pio_addr=0x1f000600)
clcd = Pl111(pio_addr=0x10020000, int_num=55)
kmi0 = Pl050(pio_addr=0x10006000, int_num=52)
kmi1 = Pl050(pio_addr=0x10007000, int_num=53, is_mouse=True)
a9scu = A9SCU(pio_addr=0x1f000000)
cf_ctrl = IdeController(disks=[], pci_func=0, pci_dev=7, pci_bus=2,
io_shift = 1, ctrl_offset = 2, Command = 0x1,
BAR0 = 0x18000000, BAR0Size = '16B',
BAR1 = 0x18000100, BAR1Size = '1B',
BAR0LegacyIO = True, BAR1LegacyIO = True)
l2x0_fake = IsaFake(pio_addr=0x1f002000, pio_size=0xfff)
flash_fake = IsaFake(pio_addr=0x40000000, pio_size=0x20000000,
fake_mem=True)
dmac_fake = AmbaFake(pio_addr=0x10030000)
uart1_fake = AmbaFake(pio_addr=0x1000a000)
uart2_fake = AmbaFake(pio_addr=0x1000b000)
uart3_fake = AmbaFake(pio_addr=0x1000c000)
smc_fake = AmbaFake(pio_addr=0x100e1000)
sp810_fake = AmbaFake(pio_addr=0x10001000, ignore_access=True)
watchdog_fake = AmbaFake(pio_addr=0x10010000)
gpio0_fake = AmbaFake(pio_addr=0x10013000)
gpio1_fake = AmbaFake(pio_addr=0x10014000)
gpio2_fake = AmbaFake(pio_addr=0x10015000)
ssp_fake = AmbaFake(pio_addr=0x1000d000)
sci_fake = AmbaFake(pio_addr=0x1000e000)
aaci_fake = AmbaFake(pio_addr=0x10004000)
mmc_fake = AmbaFake(pio_addr=0x10005000)
rtc = PL031(pio_addr=0x10017000, int_num=42)
energy_ctrl = EnergyCtrl(pio_addr=0x1000f000)
# Attach I/O devices that are on chip and also set the appropriate
# ranges for the bridge
def attachOnChipIO(self, bus, bridge=None, dma_ports =[]):
self.gic.pio = bus.master
self.l2x0_fake.pio = bus.master
self.a9scu.pio = bus.master
self.local_cpu_timer.pio = bus.master
if bridge:
# Bridge ranges based on excluding what is part of on-chip I/O
# (gic, l2x0, a9scu, local_cpu_timer)
bridge.ranges = [AddrRange(self.realview_io.pio_addr,
self.a9scu.pio_addr - 1),
AddrRange(self.flash_fake.pio_addr,
self.flash_fake.pio_addr + \
self.flash_fake.pio_size - 1)]
# Set the clock domain for IO objects that are considered
# to be "close" to the cores.
def onChipIOClkDomain(self, clkdomain):
self.gic.clk_domain = clkdomain
self.l2x0_fake.clk_domain = clkdomain
self.a9scu.clkdomain = clkdomain
self.local_cpu_timer.clk_domain = clkdomain
# Attach I/O devices to specified bus object. Can't do this
# earlier, since the bus object itself is typically defined at the
# System level.
def attachIO(self, bus, dma_ports = []):
self.uart.pio = bus.master
self.realview_io.pio = bus.master
self.pci_host.pio = bus.master
self.timer0.pio = bus.master
self.timer1.pio = bus.master
self.clcd.pio = bus.master
if dma_ports.count(self.clcd.dma) == 0:
self.clcd.dma = bus.slave
self.kmi0.pio = bus.master
self.kmi1.pio = bus.master
self.cf_ctrl.pio = bus.master
if dma_ports.count(self.cf_ctrl.dma) == 0:
self.cf_ctrl.dma = bus.slave
self.dmac_fake.pio = bus.master
self.uart1_fake.pio = bus.master
self.uart2_fake.pio = bus.master
self.uart3_fake.pio = bus.master
self.smc_fake.pio = bus.master
self.sp810_fake.pio = bus.master
self.watchdog_fake.pio = bus.master
self.gpio0_fake.pio = bus.master
self.gpio1_fake.pio = bus.master
self.gpio2_fake.pio = bus.master
self.ssp_fake.pio = bus.master
self.sci_fake.pio = bus.master
self.aaci_fake.pio = bus.master
self.mmc_fake.pio = bus.master
self.rtc.pio = bus.master
self.flash_fake.pio = bus.master
self.energy_ctrl.pio = bus.master
# Set the clock domain for IO objects that are considered
# to be "far" away from the cores.
def offChipIOClkDomain(self, clkdomain):
self.uart.clk_domain = clkdomain
self.realview_io.clk_domain = clkdomain
self.timer0.clk_domain = clkdomain
self.timer1.clk_domain = clkdomain
self.clcd.clk_domain = clkdomain
self.kmi0.clk_domain = clkdomain
self.kmi1.clk_domain = clkdomain
self.cf_ctrl.clk_domain = clkdomain
self.dmac_fake.clk_domain = clkdomain
self.uart1_fake.clk_domain = clkdomain
self.uart2_fake.clk_domain = clkdomain
self.uart3_fake.clk_domain = clkdomain
self.smc_fake.clk_domain = clkdomain
self.sp810_fake.clk_domain = clkdomain
self.watchdog_fake.clk_domain = clkdomain
self.gpio0_fake.clk_domain = clkdomain
self.gpio1_fake.clk_domain = clkdomain
self.gpio2_fake.clk_domain = clkdomain
self.ssp_fake.clk_domain = clkdomain
self.sci_fake.clk_domain = clkdomain
self.aaci_fake.clk_domain = clkdomain
self.mmc_fake.clk_domain = clkdomain
self.rtc.clk_domain = clkdomain
self.flash_fake.clk_domain = clkdomain
self.energy_ctrl.clk_domain = clkdomain
# Reference for memory map and interrupt number
# RealView Emulation Baseboard User Guide (ARM DUI 0143B)
# Chapter 4: Programmer's Reference
class RealViewEB(RealView):
uart = Pl011(pio_addr=0x10009000, int_num=44)
realview_io = RealViewCtrl(pio_addr=0x10000000, idreg=0x01400500)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
gic = Pl390(dist_addr=0x10041000, cpu_addr=0x10040000)
timer0 = Sp804(int_num0=36, int_num1=36, pio_addr=0x10011000)
timer1 = Sp804(int_num0=37, int_num1=37, pio_addr=0x10012000)
clcd = Pl111(pio_addr=0x10020000, int_num=23)
kmi0 = Pl050(pio_addr=0x10006000, int_num=20)
kmi1 = Pl050(pio_addr=0x10007000, int_num=21, is_mouse=True)
l2x0_fake = IsaFake(pio_addr=0x1f002000, pio_size=0xfff, warn_access="1")
flash_fake = IsaFake(pio_addr=0x40000000, pio_size=0x20000000-1,
fake_mem=True)
dmac_fake = AmbaFake(pio_addr=0x10030000)
uart1_fake = AmbaFake(pio_addr=0x1000a000)
uart2_fake = AmbaFake(pio_addr=0x1000b000)
uart3_fake = AmbaFake(pio_addr=0x1000c000)
smcreg_fake = IsaFake(pio_addr=0x10080000, pio_size=0x10000-1)
smc_fake = AmbaFake(pio_addr=0x100e1000)
sp810_fake = AmbaFake(pio_addr=0x10001000, ignore_access=True)
watchdog_fake = AmbaFake(pio_addr=0x10010000)
gpio0_fake = AmbaFake(pio_addr=0x10013000)
gpio1_fake = AmbaFake(pio_addr=0x10014000)
gpio2_fake = AmbaFake(pio_addr=0x10015000)
ssp_fake = AmbaFake(pio_addr=0x1000d000)
sci_fake = AmbaFake(pio_addr=0x1000e000)
aaci_fake = AmbaFake(pio_addr=0x10004000)
mmc_fake = AmbaFake(pio_addr=0x10005000)
rtc_fake = AmbaFake(pio_addr=0x10017000, amba_id=0x41031)
energy_ctrl = EnergyCtrl(pio_addr=0x1000f000)
# Attach I/O devices that are on chip and also set the appropriate
# ranges for the bridge
def attachOnChipIO(self, bus, bridge=None, dma_ports =[]):
self.gic.pio = bus.master
self.l2x0_fake.pio = bus.master
if bridge:
# Bridge ranges based on excluding what is part of on-chip I/O
# (gic, l2x0)
bridge.ranges = [AddrRange(self.realview_io.pio_addr,
self.gic.cpu_addr - 1),
AddrRange(self.flash_fake.pio_addr, Addr.max)]
# Set the clock domain for IO objects that are considered
# to be "close" to the cores.
def onChipIOClkDomain(self, clkdomain):
self.gic.clk_domain = clkdomain
self.l2x0_fake.clk_domain = clkdomain
# Attach I/O devices to specified bus object. Can't do this
# earlier, since the bus object itself is typically defined at the
# System level.
def attachIO(self, bus, dma_ports = []):
self.uart.pio = bus.master
self.realview_io.pio = bus.master
self.pci_host.pio = bus.master
self.timer0.pio = bus.master
self.timer1.pio = bus.master
self.clcd.pio = bus.master
if dma_ports.count(self.clcd.dma) == 0:
self.clcd.dma = bus.slave
self.kmi0.pio = bus.master
self.kmi1.pio = bus.master
self.dmac_fake.pio = bus.master
self.uart1_fake.pio = bus.master
self.uart2_fake.pio = bus.master
self.uart3_fake.pio = bus.master
self.smc_fake.pio = bus.master
self.sp810_fake.pio = bus.master
self.watchdog_fake.pio = bus.master
self.gpio0_fake.pio = bus.master
self.gpio1_fake.pio = bus.master
self.gpio2_fake.pio = bus.master
self.ssp_fake.pio = bus.master
self.sci_fake.pio = bus.master
self.aaci_fake.pio = bus.master
self.mmc_fake.pio = bus.master
self.rtc_fake.pio = bus.master
self.flash_fake.pio = bus.master
self.smcreg_fake.pio = bus.master
self.energy_ctrl.pio = bus.master
# Set the clock domain for IO objects that are considered
# to be "far" away from the cores.
def offChipIOClkDomain(self, clkdomain):
self.uart.clk_domain = clkdomain
self.realview_io.clk_domain = clkdomain
self.timer0.clk_domain = clkdomain
self.timer1.clk_domain = clkdomain
self.clcd.clk_domain = clkdomain
self.kmi0.clk_domain = clkdomain
self.kmi1.clk_domain = clkdomain
self.dmac_fake.clk_domain = clkdomain
self.uart1_fake.clk_domain = clkdomain
self.uart2_fake.clk_domain = clkdomain
self.uart3_fake.clk_domain = clkdomain
self.smc_fake.clk_domain = clkdomain
self.sp810_fake.clk_domain = clkdomain
self.watchdog_fake.clk_domain = clkdomain
self.gpio0_fake.clk_domain = clkdomain
self.gpio1_fake.clk_domain = clkdomain
self.gpio2_fake.clk_domain = clkdomain
self.ssp_fake.clk_domain = clkdomain
self.sci_fake.clk_domain = clkdomain
self.aaci_fake.clk_domain = clkdomain
self.mmc_fake.clk_domain = clkdomain
self.rtc.clk_domain = clkdomain
self.flash_fake.clk_domain = clkdomain
self.smcreg_fake.clk_domain = clkdomain
self.energy_ctrl.clk_domain = clkdomain
class VExpress_EMM(RealView):
_mem_regions = [(Addr('2GB'), Addr('2GB'))]
uart = Pl011(pio_addr=0x1c090000, int_num=37)
realview_io = RealViewCtrl(
proc_id0=0x14000000, proc_id1=0x14000000,
idreg=0x02250000, pio_addr=0x1C010000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
gic = Pl390(dist_addr=0x2C001000, cpu_addr=0x2C002000)
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=16,
pci_pio_base=0)
local_cpu_timer = CpuLocalTimer(int_num_timer=29, int_num_watchdog=30, pio_addr=0x2C080000)
generic_timer = GenericTimer(int_phys=29, int_virt=27)
timer0 = Sp804(int_num0=34, int_num1=34, pio_addr=0x1C110000, clock0='1MHz', clock1='1MHz')
timer1 = Sp804(int_num0=35, int_num1=35, pio_addr=0x1C120000, clock0='1MHz', clock1='1MHz')
clcd = Pl111(pio_addr=0x1c1f0000, int_num=46)
hdlcd = HDLcd(pxl_clk=dcc.osc_pxl,
pio_addr=0x2b000000, int_num=117,
workaround_swap_rb=True)
kmi0 = Pl050(pio_addr=0x1c060000, int_num=44)
kmi1 = Pl050(pio_addr=0x1c070000, int_num=45, is_mouse=True)
vgic = VGic(vcpu_addr=0x2c006000, hv_addr=0x2c004000, ppint=25)
cf_ctrl = IdeController(disks=[], pci_func=0, pci_dev=0, pci_bus=2,
io_shift = 2, ctrl_offset = 2, Command = 0x1,
BAR0 = 0x1C1A0000, BAR0Size = '256B',
BAR1 = 0x1C1A0100, BAR1Size = '4096B',
BAR0LegacyIO = True, BAR1LegacyIO = True)
vram = SimpleMemory(range = AddrRange(0x18000000, size='32MB'),
conf_table_reported = False)
rtc = PL031(pio_addr=0x1C170000, int_num=36)
l2x0_fake = IsaFake(pio_addr=0x2C100000, pio_size=0xfff)
uart1_fake = AmbaFake(pio_addr=0x1C0A0000)
uart2_fake = AmbaFake(pio_addr=0x1C0B0000)
uart3_fake = AmbaFake(pio_addr=0x1C0C0000)
sp810_fake = AmbaFake(pio_addr=0x1C020000, ignore_access=True)
watchdog_fake = AmbaFake(pio_addr=0x1C0F0000)
aaci_fake = AmbaFake(pio_addr=0x1C040000)
lan_fake = IsaFake(pio_addr=0x1A000000, pio_size=0xffff)
usb_fake = IsaFake(pio_addr=0x1B000000, pio_size=0x1ffff)
mmc_fake = AmbaFake(pio_addr=0x1c050000)
energy_ctrl = EnergyCtrl(pio_addr=0x1c080000)
# Attach any PCI devices that are supported
def attachPciDevices(self):
self.ethernet = IGbE_e1000(pci_bus=0, pci_dev=0, pci_func=0,
InterruptLine=1, InterruptPin=1)
self.ide = IdeController(disks = [], pci_bus=0, pci_dev=1, pci_func=0,
InterruptLine=2, InterruptPin=2)
def enableMSIX(self):
self.gic = Pl390(dist_addr=0x2C001000, cpu_addr=0x2C002000, it_lines=512)
self.gicv2m = Gicv2m()
self.gicv2m.frames = [Gicv2mFrame(spi_base=256, spi_len=64, addr=0x2C1C0000)]
def setupBootLoader(self, mem_bus, cur_sys, loc):
self.nvmem = SimpleMemory(range = AddrRange('64MB'),
conf_table_reported = False)
self.nvmem.port = mem_bus.master
cur_sys.boot_loader = loc('boot_emm.arm')
cur_sys.atags_addr = 0x8000000
cur_sys.load_addr_mask = 0xfffffff
cur_sys.load_offset = 0x80000000
# Attach I/O devices that are on chip and also set the appropriate
# ranges for the bridge
def attachOnChipIO(self, bus, bridge=None, dma_ports =[]):
self.gic.pio = bus.master
self.vgic.pio = bus.master
self.local_cpu_timer.pio = bus.master
if hasattr(self, "gicv2m"):
self.gicv2m.pio = bus.master
if dma_ports.count(self.hdlcd.dma) == 0:
self.hdlcd.dma = bus.slave
if bridge:
# Bridge ranges based on excluding what is part of on-chip I/O
# (gic, a9scu)
bridge.ranges = [AddrRange(0x2F000000, size='16MB'),
AddrRange(0x2B000000, size='4MB'),
AddrRange(0x30000000, size='256MB'),
AddrRange(0x40000000, size='512MB'),
AddrRange(0x18000000, size='64MB'),
AddrRange(0x1C000000, size='64MB')]
# Set the clock domain for IO objects that are considered
# to be "close" to the cores.
def onChipIOClkDomain(self, clkdomain):
self.gic.clk_domain = clkdomain
if hasattr(self, "gicv2m"):
self.gicv2m.clk_domain = clkdomain
self.hdlcd.clk_domain = clkdomain
self.vgic.clk_domain = clkdomain
# Attach I/O devices to specified bus object. Done here
# as the specified bus to connect to may not always be fixed.
def attachIO(self, bus, dma_ports =[]):
self.uart.pio = bus.master
self.realview_io.pio = bus.master
self.pci_host.pio = bus.master
self.timer0.pio = bus.master
self.timer1.pio = bus.master
self.clcd.pio = bus.master
if dma_ports.count(self.clcd.dma) == 0:
self.clcd.dma = bus.slave
self.hdlcd.pio = bus.master
self.kmi0.pio = bus.master
self.kmi1.pio = bus.master
self.cf_ctrl.pio = bus.master
if dma_ports.count(self.cf_ctrl.dma) == 0:
self.cf_ctrl.dma = bus.slave
self.rtc.pio = bus.master
self.vram.port = bus.master
self.l2x0_fake.pio = bus.master
self.uart1_fake.pio = bus.master
self.uart2_fake.pio = bus.master
self.uart3_fake.pio = bus.master
self.sp810_fake.pio = bus.master
self.watchdog_fake.pio = bus.master
self.aaci_fake.pio = bus.master
self.lan_fake.pio = bus.master
self.usb_fake.pio = bus.master
self.mmc_fake.pio = bus.master
self.energy_ctrl.pio = bus.master
# Try to attach the I/O if it exists
try:
self.ide.pio = bus.master
if dma_ports.count(self.ide.dma) == 0:
self.ide.dma = bus.slave
self.ethernet.pio = bus.master
if dma_ports.count(self.ethernet.dma) == 0:
self.ethernet.dma = bus.slave
except:
pass
# Set the clock domain for IO objects that are considered
# to be "far" away from the cores.
def offChipIOClkDomain(self, clkdomain):
self.uart.clk_domain = clkdomain
self.realview_io.clk_domain = clkdomain
self.timer0.clk_domain = clkdomain
self.timer1.clk_domain = clkdomain
self.clcd.clk_domain = clkdomain
self.kmi0.clk_domain = clkdomain
self.kmi1.clk_domain = clkdomain
self.cf_ctrl.clk_domain = clkdomain
self.rtc.clk_domain = clkdomain
self.vram.clk_domain = clkdomain
self.l2x0_fake.clk_domain = clkdomain
self.uart1_fake.clk_domain = clkdomain
self.uart2_fake.clk_domain = clkdomain
self.uart3_fake.clk_domain = clkdomain
self.sp810_fake.clk_domain = clkdomain
self.watchdog_fake.clk_domain = clkdomain
self.aaci_fake.clk_domain = clkdomain
self.lan_fake.clk_domain = clkdomain
self.usb_fake.clk_domain = clkdomain
self.mmc_fake.clk_domain = clkdomain
self.energy_ctrl.clk_domain = clkdomain
class VExpress_EMM64(VExpress_EMM):
# Three memory regions are specified totalling 512GB
_mem_regions = [(Addr('2GB'), Addr('2GB')), (Addr('34GB'), Addr('30GB')),
(Addr('512GB'), Addr('480GB'))]
pci_host = GenericPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=12,
pci_pio_base=0x2f000000)
def setupBootLoader(self, mem_bus, cur_sys, loc):
self.nvmem = SimpleMemory(range=AddrRange(0, size='64MB'),
conf_table_reported=False)
self.nvmem.port = mem_bus.master
cur_sys.boot_loader = loc('boot_emm.arm64')
cur_sys.atags_addr = 0x8000000
cur_sys.load_addr_mask = 0xfffffff
cur_sys.load_offset = 0x80000000
class VExpress_GEM5_V1(RealView):
"""
The VExpress gem5 memory map is loosely based on a modified
Versatile Express RS1 memory map.
The gem5 platform has been designed to implement a subset of the
original Versatile Express RS1 memory map. Off-chip peripherals should,
when possible, adhere to the Versatile Express memory map. Non-PCI
off-chip devices that are gem5-specific should live in the CS5 memory
space to avoid conflicts with existing devices that we might want to
model in the future. Such devices should normally have interrupts in
the gem5-specific SPI range.
On-chip peripherals are loosely modeled after the ARM CoreTile Express
A15x2 A7x3 memory and interrupt map. In particular, the GIC and
Generic Timer have the same interrupt lines and base addresses. Other
on-chip devices are gem5 specific.
Unlike the original Versatile Express RS2 extended platform, gem5 implements a
large contigious DRAM space, without aliases or holes, starting at the
2GiB boundary. This means that PCI memory is limited to 1GiB.
Memory map:
0x00000000-0x03ffffff: Boot memory (CS0)
0x04000000-0x07ffffff: Reserved
0x08000000-0x0bffffff: Reserved (CS0 alias)
0x0c000000-0x0fffffff: Reserved (Off-chip, CS4)
0x10000000-0x13ffffff: gem5-specific peripherals (Off-chip, CS5)
0x10000000-0x1000ffff: gem5 energy controller
0x10010000-0x1001ffff: gem5 pseudo-ops
0x14000000-0x17ffffff: Reserved (Off-chip, PSRAM, CS1)
0x18000000-0x1bffffff: Reserved (Off-chip, Peripherals, CS2)
0x1c000000-0x1fffffff: Peripheral block 1 (Off-chip, CS3):
0x1c010000-0x1c01ffff: realview_io (VE system control regs.)
0x1c060000-0x1c06ffff: KMI0 (keyboard)
0x1c070000-0x1c07ffff: KMI1 (mouse)
0x1c090000-0x1c09ffff: UART0
0x1c0a0000-0x1c0affff: UART1 (reserved)
0x1c0b0000-0x1c0bffff: UART2 (reserved)
0x1c0c0000-0x1c0cffff: UART3 (reserved)
0x1c170000-0x1c17ffff: RTC
0x20000000-0x3fffffff: On-chip peripherals:
0x2b000000-0x2b00ffff: HDLCD
0x2c001000-0x2c001fff: GIC (distributor)
0x2c002000-0x2c0020ff: GIC (CPU interface)
0x2c004000-0x2c005fff: vGIC (HV)
0x2c006000-0x2c007fff: vGIC (VCPU)
0x2c1c0000-0x2c1cffff: GICv2m MSI frame 0
0x2d000000-0x2d00ffff: GPU (reserved)
0x2f000000-0x2fffffff: PCI IO space
0x30000000-0x3fffffff: PCI config space
0x40000000-0x7fffffff: Ext. AXI: Used as PCI memory
0x80000000-X: DRAM
Interrupts:
0- 15: Software generated interrupts (SGIs)
16- 31: On-chip private peripherals (PPIs)
25 : vgic
26 : generic_timer (hyp)
27 : generic_timer (virt)
28 : Reserved (Legacy FIQ)
29 : generic_timer (phys, sec)
30 : generic_timer (phys, non-sec)
31 : Reserved (Legacy IRQ)
32- 95: Mother board peripherals (SPIs)
32 : Reserved (SP805)
33 : Reserved (IOFPGA SW int)
34-35: Reserved (SP804)
36 : RTC
37-40: uart0-uart3
41-42: Reserved (PL180)
43 : Reserved (AACI)
44-45: kmi0-kmi1
46 : Reserved (CLCD)
47 : Reserved (Ethernet)
48 : Reserved (USB)
95-255: On-chip interrupt sources (we use these for
gem5-specific devices, SPIs)
95 : HDLCD
96- 98: GPU (reserved)
100-103: PCI
256-319: MSI frame 0 (gem5-specific, SPIs)
320-511: Unused
"""
# Everything above 2GiB is memory
_mem_regions = [(Addr('2GB'), Addr('510GB'))]
_off_chip_ranges = [
# CS1-CS5
AddrRange(0x0c000000, 0x1fffffff),
# External AXI interface (PCI)
AddrRange(0x2f000000, 0x7fffffff),
]
# Platform control device (off-chip)
realview_io = RealViewCtrl(proc_id0=0x14000000, proc_id1=0x14000000,
idreg=0x02250000, pio_addr=0x1c010000)
mcc = VExpressMCC()
dcc = CoreTile2A15DCC()
### On-chip devices ###
gic = kvm_gicv2_class(dist_addr=0x2c001000, cpu_addr=0x2c002000,
it_lines=512)
vgic = VGic(vcpu_addr=0x2c006000, hv_addr=0x2c004000, ppint=25)
gicv2m = Gicv2m()
gicv2m.frames = [
Gicv2mFrame(spi_base=256, spi_len=64, addr=0x2c1c0000),
]
generic_timer = GenericTimer(int_phys=29, int_virt=27)
hdlcd = HDLcd(pxl_clk=dcc.osc_pxl,
pio_addr=0x2b000000, int_num=95)
def _on_chip_devices(self):
return [
self.gic, self.vgic, self.gicv2m,
self.hdlcd,
self.generic_timer,
]
### Off-chip devices ###
uart0 = Pl011(pio_addr=0x1c090000, int_num=37)
kmi0 = Pl050(pio_addr=0x1c060000, int_num=44)
kmi1 = Pl050(pio_addr=0x1c070000, int_num=45, is_mouse=True)
rtc = PL031(pio_addr=0x1c170000, int_num=36)
### gem5-specific off-chip devices ###
pci_host = GenericArmPciHost(
conf_base=0x30000000, conf_size='256MB', conf_device_bits=12,
pci_pio_base=0x2f000000,
int_policy="ARM_PCI_INT_DEV", int_base=100, int_count=4)
energy_ctrl = EnergyCtrl(pio_addr=0x10000000)
def _off_chip_devices(self):
return [
self.realview_io,
self.uart0,
self.kmi0, self.kmi1,
self.rtc,
self.pci_host,
self.energy_ctrl,
]
def attachPciDevice(self, device, *args, **kwargs):
device.host = self.pci_host
self._attach_device(device, *args, **kwargs)
def setupBootLoader(self, mem_bus, cur_sys, loc):
self.nvmem = SimpleMemory(range=AddrRange(0, size='64MB'),
conf_table_reported=False)
self.nvmem.port = mem_bus.master
cur_sys.boot_loader = [ loc('boot_emm.arm64'), loc('boot_emm.arm') ]
cur_sys.atags_addr = 0x8000000
cur_sys.load_addr_mask = 0xfffffff
cur_sys.load_offset = 0x80000000
# Setup m5ops. It's technically not a part of the boot
# loader, but this is the only place we can configure the
# system.
cur_sys.m5ops_base = 0x10010000
| 41.76078 | 103 | 0.654309 | [
"BSD-3-Clause"
] | gem5-graphics/gem5-graphics | gem5/src/dev/arm/RealView.py | 40,675 | Python |
#!/usr/bin/env python
# File: plot_icd_vs_colorgrad.py
# Created on: Tue 08 May 2012 11:03:26 AM CDT
# Last Change: Sun 21 Oct 2012 02:43:33 PM CDT
# Purpose of script: <+INSERT+>
# Author: Steven Boada
import pylab as pyl
from mk_galaxy_struc import mk_galaxy_struc
galaxies = mk_galaxy_struc()
f1 = pyl.figure(1,figsize=(8,8))
f1s1 = f1.add_subplot(221)
f1s2 = f1.add_subplot(222)
f1s3 = f1.add_subplot(223)
f1s4 = f1.add_subplot(224)
for galaxy in galaxies:
if galaxy.ston_I >= 30. and galaxy.Color_grad != None and galaxy.sersic !=\
None:
if galaxy.sersic < 1.:
col1 =f1s1.scatter(galaxy.ICD_IH, galaxy.Color_grad, s=50, c='k',
edgecolor='w')
if 1. < galaxy.sersic < 2.:
col2 =f1s2.scatter(galaxy.ICD_IH, galaxy.Color_grad, s=50,c='k',
edgecolor='w')
if 2. < galaxy.sersic < 3.:
col3 =f1s3.scatter(galaxy.ICD_IH, galaxy.Color_grad, s=50, c='k',
edgecolor='w')
if 3. < galaxy.sersic:
col4 =f1s4.scatter(galaxy.ICD_IH, galaxy.Color_grad, s=50, c='k',
edgecolor='w')
#pyl.scatter(galaxy.ICD_IH,galaxy.Color_grad,s=50,edgecolor='w')
#f1s1.vlines(0.04,-3.,1,lw=2,zorder=0)
#f1s1.hlines(0.0,-0.1,0.25,lw=2,zorder=0)
#pyl.text(0.24, 0.7, "Blue Core, Red Edge", size=15, ha="right", va="top",
# bbox = dict(boxstyle="round", ec=(1., 0.5, 0.5),
# fc=(1., 0.8, 0.8)))
#pyl.text(0.24, -2.5, "Red Core, Blue Edge", size=15, ha="right", va="top",
# bbox = dict(boxstyle="round", ec=(1., 0.5, 0.5),
# fc=(1., 0.8, 0.8)))
# Finish Plot
f1s1.set_xlim(-0.05,0.25)
f1s1.set_ylim(-3.,1)
f1s2.set_xlim(-0.05,0.25)
f1s2.set_ylim(-3.,1)
f1s3.set_xlim(-0.05,0.25)
f1s3.set_ylim(-3.,1)
f1s4.set_xlim(-0.05,0.25)
f1s4.set_ylim(-3.,1)
#pyl.subplots_adjust(left=0.15,bottom=0.15)
f1s1.set_xlabel(r'$\xi[I,H]$')
f1s1.set_ylabel('Color Gradient')
pyl.savefig('icd_vs_color_grad_vs_sersic_IH.eps',bbox='tight')
pyl.show()
| 29.537313 | 79 | 0.624558 | [
"MIT"
] | boada/ICD | sandbox/legacy_plot_code/plot_icd_vs_colorgrad_vs_sersic.py | 1,979 | Python |
# -*- coding: utf-8 -*-
"""
babel.messages.mofile
~~~~~~~~~~~~~~~~~~~~~
Writing of files in the ``gettext`` MO (machine object) format.
:copyright: (c) 2013 by the Babel Team.
:license: BSD, see LICENSE for more details.
"""
import array
import struct
from babel.messages.catalog import Catalog, Message
from babel._compat import range_type, array_tobytes
LE_MAGIC = 0x950412de
BE_MAGIC = 0xde120495
def read_mo(fileobj):
"""Read a binary MO file from the given file-like object and return a
corresponding `Catalog` object.
:param fileobj: the file-like object to read the MO file from
:note: The implementation of this function is heavily based on the
``GNUTranslations._parse`` method of the ``gettext`` module in the
standard library.
"""
catalog = Catalog()
headers = {}
filename = getattr(fileobj, 'name', '')
buf = fileobj.read()
buflen = len(buf)
unpack = struct.unpack
# Parse the .mo file header, which consists of 5 little endian 32
# bit words.
magic = unpack('<I', buf[:4])[0] # Are we big endian or little endian?
if magic == LE_MAGIC:
version, msgcount, origidx, transidx = unpack('<4I', buf[4:20])
ii = '<II'
elif magic == BE_MAGIC:
version, msgcount, origidx, transidx = unpack('>4I', buf[4:20])
ii = '>II'
else:
raise IOError(0, 'Bad magic number', filename)
# Now put all messages from the .mo file buffer into the catalog
# dictionary
for i in range_type(0, msgcount):
mlen, moff = unpack(ii, buf[origidx:origidx + 8])
mend = moff + mlen
tlen, toff = unpack(ii, buf[transidx:transidx + 8])
tend = toff + tlen
if mend < buflen and tend < buflen:
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
raise IOError(0, 'File is corrupt', filename)
# See if we're looking at GNU .mo conventions for metadata
if mlen == 0:
# Catalog description
lastkey = key = None
for item in tmsg.splitlines():
item = item.strip()
if not item:
continue
if b':' in item:
key, value = item.split(b':', 1)
lastkey = key = key.strip().lower()
headers[key] = value.strip()
elif lastkey:
headers[lastkey] += b'\n' + item
if b'\x04' in msg: # context
ctxt, msg = msg.split(b'\x04')
else:
ctxt = None
if b'\x00' in msg: # plural forms
msg = msg.split(b'\x00')
tmsg = tmsg.split(b'\x00')
if catalog.charset:
msg = [x.decode(catalog.charset) for x in msg]
tmsg = [x.decode(catalog.charset) for x in tmsg]
else:
if catalog.charset:
msg = msg.decode(catalog.charset)
tmsg = tmsg.decode(catalog.charset)
catalog[msg] = Message(msg, tmsg, context=ctxt)
# advance to next entry in the seek tables
origidx += 8
transidx += 8
catalog.mime_headers = headers.items()
return catalog
def write_mo(fileobj, catalog, use_fuzzy=False):
"""Write a catalog to the specified file-like object using the GNU MO file
format.
>>> import sys
>>> from babel.messages import Catalog
>>> from gettext import GNUTranslations
>>> from babel._compat import BytesIO
>>> catalog = Catalog(locale='en_US')
>>> catalog.add('foo', 'Voh')
<Message ...>
>>> catalog.add((u'bar', u'baz'), (u'Bahr', u'Batz'))
<Message ...>
>>> catalog.add('fuz', 'Futz', flags=['fuzzy'])
<Message ...>
>>> catalog.add('Fizz', '')
<Message ...>
>>> catalog.add(('Fuzz', 'Fuzzes'), ('', ''))
<Message ...>
>>> buf = BytesIO()
>>> write_mo(buf, catalog)
>>> x = buf.seek(0)
>>> translations = GNUTranslations(fp=buf)
>>> if sys.version_info[0] >= 3:
... translations.ugettext = translations.gettext
... translations.ungettext = translations.ngettext
>>> translations.ugettext('foo')
u'Voh'
>>> translations.ungettext('bar', 'baz', 1)
u'Bahr'
>>> translations.ungettext('bar', 'baz', 2)
u'Batz'
>>> translations.ugettext('fuz')
u'fuz'
>>> translations.ugettext('Fizz')
u'Fizz'
>>> translations.ugettext('Fuzz')
u'Fuzz'
>>> translations.ugettext('Fuzzes')
u'Fuzzes'
:param fileobj: the file-like object to write to
:param catalog: the `Catalog` instance
:param use_fuzzy: whether translations marked as "fuzzy" should be included
in the output
"""
messages = list(catalog)
if not use_fuzzy:
messages[1:] = [m for m in messages[1:] if not m.fuzzy]
messages.sort()
ids = strs = b''
offsets = []
for message in messages:
# For each string, we need size and file offset. Each string is NUL
# terminated; the NUL does not count into the size.
if message.pluralizable:
msgid = b'\x00'.join([
msgid.encode(catalog.charset) for msgid in message.id
])
msgstrs = []
for idx, string in enumerate(message.string):
if not string:
msgstrs.append(message.id[min(int(idx), 1)])
else:
msgstrs.append(string)
msgstr = b'\x00'.join([
msgstr.encode(catalog.charset) for msgstr in msgstrs
])
else:
msgid = message.id.encode(catalog.charset)
if not message.string:
msgstr = message.id.encode(catalog.charset)
else:
msgstr = message.string.encode(catalog.charset)
if message.context:
msgid = b'\x04'.join([message.context.encode(catalog.charset),
msgid])
offsets.append((len(ids), len(msgid), len(strs), len(msgstr)))
ids += msgid + b'\x00'
strs += msgstr + b'\x00'
# The header is 7 32-bit unsigned integers. We don't use hash tables, so
# the keys start right after the index tables.
keystart = 7 * 4 + 16 * len(messages)
valuestart = keystart + len(ids)
# The string table first has the list of keys, then the list of values.
# Each entry has first the size of the string, then the file offset.
koffsets = []
voffsets = []
for o1, l1, o2, l2 in offsets:
koffsets += [l1, o1 + keystart]
voffsets += [l2, o2 + valuestart]
offsets = koffsets + voffsets
fileobj.write(struct.pack('Iiiiiii',
LE_MAGIC, # magic
0, # version
len(messages), # number of entries
7 * 4, # start of key index
7 * 4 + len(messages) * 8, # start of value index
0, 0 # size and offset of hash table
) + array_tobytes(array.array("i", offsets)) + ids + strs)
| 34.084112 | 89 | 0.541815 | [
"Apache-2.0"
] | 10088/hue | desktop/core/ext-py/Babel-2.5.1/babel/messages/mofile.py | 7,294 | Python |
import pyaf.Bench.web_traffic.Forecaster as fo
PROJECTS = ['de.wikipedia.org']
data_dir = 'data/web-traffic-time-series-forecasting'
lForecaster = fo.cProjectForecaster()
lForecaster.mDataDirectory = data_dir
lForecaster.mBackendName = 'pyaf_default_clean'
lForecaster.mKeysFileName = 'key_1.csv.zip'
last_date = '2016-12-31'
horizon = 60
lForecaster.mKeysFileName = 'key_1.csv.zip'
lForecaster.forecast(PROJECTS, last_date , horizon)
| 27.4375 | 53 | 0.794989 | [
"BSD-3-Clause"
] | antoinecarme/pyaf | tests/bench/web_traffic_jobs/de.wikipedia.org/test_web_traffic_de.wikipedia.org_pyaf_default_clean.py | 439 | Python |
import sys
import logging
import urlparse
import urllib
import redis
from flask import Flask, current_app
from flask_sslify import SSLify
from werkzeug.contrib.fixers import ProxyFix
from werkzeug.routing import BaseConverter
from statsd import StatsClient
from flask_mail import Mail
from flask_limiter import Limiter
from flask_limiter.util import get_ipaddr
from flask_migrate import Migrate
from redash import settings
from redash.query_runner import import_query_runners
from redash.destinations import import_destinations
__version__ = '7.0.0-beta'
import os
if os.environ.get("REMOTE_DEBUG"):
import ptvsd
ptvsd.enable_attach(address=('0.0.0.0', 5678))
def setup_logging():
handler = logging.StreamHandler(sys.stdout if settings.LOG_STDOUT else sys.stderr)
formatter = logging.Formatter(settings.LOG_FORMAT)
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(settings.LOG_LEVEL)
# Make noisy libraries less noisy
if settings.LOG_LEVEL != "DEBUG":
logging.getLogger("passlib").setLevel("ERROR")
logging.getLogger("requests.packages.urllib3").setLevel("ERROR")
logging.getLogger("snowflake.connector").setLevel("ERROR")
logging.getLogger('apiclient').setLevel("ERROR")
def create_redis_connection():
logging.debug("Creating Redis connection (%s)", settings.REDIS_URL)
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.scheme == 'redis+socket':
qs = urlparse.parse_qs(redis_url.query)
if 'virtual_host' in qs:
db = qs['virtual_host'][0]
else:
db = 0
client = redis.StrictRedis(unix_socket_path=redis_url.path, db=db)
else:
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
# Redis passwords might be quoted with special characters
redis_password = redis_url.password and urllib.unquote(redis_url.password)
client = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_password)
return client
setup_logging()
redis_connection = create_redis_connection()
mail = Mail()
migrate = Migrate()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
import_query_runners(settings.QUERY_RUNNERS)
import_destinations(settings.DESTINATIONS)
from redash.version_check import reset_new_version_status
reset_new_version_status()
class SlugConverter(BaseConverter):
def to_python(self, value):
# This is ay workaround for when we enable multi-org and some files are being called by the index rule:
# for path in settings.STATIC_ASSETS_PATHS:
# full_path = safe_join(path, value)
# if os.path.isfile(full_path):
# raise ValidationError()
return value
def to_url(self, value):
return value
def create_app():
from redash import authentication, extensions, handlers
from redash.handlers.webpack import configure_webpack
from redash.handlers import chrome_logger
from redash.models import db, users
from redash.metrics.request import provision_app
from redash.utils import sentry
sentry.init()
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
# Make sure we get the right referral address even behind proxies like nginx.
app.wsgi_app = ProxyFix(app.wsgi_app, settings.PROXIES_COUNT)
app.url_map.converters['org_slug'] = SlugConverter
if settings.ENFORCE_HTTPS:
SSLify(app, skips=['ping'])
# configure our database
app.config['SQLALCHEMY_DATABASE_URI'] = settings.SQLALCHEMY_DATABASE_URI
app.config.update(settings.all_settings())
provision_app(app)
db.init_app(app)
migrate.init_app(app, db)
mail.init_app(app)
authentication.init_app(app)
limiter.init_app(app)
handlers.init_app(app)
configure_webpack(app)
extensions.init_extensions(app)
chrome_logger.init_app(app)
users.init_app(app)
return app
| 31.258993 | 118 | 0.729574 | [
"BSD-2-Clause"
] | CodeGerm/redash | redash/__init__.py | 4,345 | Python |
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 5
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class SnapshotScheduleExtended(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'alias': 'str',
'duration': 'int',
'id': 'int',
'name': 'str',
'next_run': 'int',
'next_snapshot': 'str',
'path': 'str',
'pattern': 'str',
'schedule': 'str'
}
attribute_map = {
'alias': 'alias',
'duration': 'duration',
'id': 'id',
'name': 'name',
'next_run': 'next_run',
'next_snapshot': 'next_snapshot',
'path': 'path',
'pattern': 'pattern',
'schedule': 'schedule'
}
def __init__(self, alias=None, duration=None, id=None, name=None, next_run=None, next_snapshot=None, path=None, pattern=None, schedule=None): # noqa: E501
"""SnapshotScheduleExtended - a model defined in Swagger""" # noqa: E501
self._alias = None
self._duration = None
self._id = None
self._name = None
self._next_run = None
self._next_snapshot = None
self._path = None
self._pattern = None
self._schedule = None
self.discriminator = None
if alias is not None:
self.alias = alias
if duration is not None:
self.duration = duration
if id is not None:
self.id = id
if name is not None:
self.name = name
if next_run is not None:
self.next_run = next_run
if next_snapshot is not None:
self.next_snapshot = next_snapshot
if path is not None:
self.path = path
if pattern is not None:
self.pattern = pattern
if schedule is not None:
self.schedule = schedule
@property
def alias(self):
"""Gets the alias of this SnapshotScheduleExtended. # noqa: E501
Alias name to create for each snapshot. # noqa: E501
:return: The alias of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._alias
@alias.setter
def alias(self, alias):
"""Sets the alias of this SnapshotScheduleExtended.
Alias name to create for each snapshot. # noqa: E501
:param alias: The alias of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._alias = alias
@property
def duration(self):
"""Gets the duration of this SnapshotScheduleExtended. # noqa: E501
Time in seconds added to creation time to construction expiration time. # noqa: E501
:return: The duration of this SnapshotScheduleExtended. # noqa: E501
:rtype: int
"""
return self._duration
@duration.setter
def duration(self, duration):
"""Sets the duration of this SnapshotScheduleExtended.
Time in seconds added to creation time to construction expiration time. # noqa: E501
:param duration: The duration of this SnapshotScheduleExtended. # noqa: E501
:type: int
"""
self._duration = duration
@property
def id(self):
"""Gets the id of this SnapshotScheduleExtended. # noqa: E501
The system ID given to the schedule. # noqa: E501
:return: The id of this SnapshotScheduleExtended. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this SnapshotScheduleExtended.
The system ID given to the schedule. # noqa: E501
:param id: The id of this SnapshotScheduleExtended. # noqa: E501
:type: int
"""
self._id = id
@property
def name(self):
"""Gets the name of this SnapshotScheduleExtended. # noqa: E501
The schedule name. # noqa: E501
:return: The name of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this SnapshotScheduleExtended.
The schedule name. # noqa: E501
:param name: The name of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._name = name
@property
def next_run(self):
"""Gets the next_run of this SnapshotScheduleExtended. # noqa: E501
Unix Epoch time of next snapshot to be created. # noqa: E501
:return: The next_run of this SnapshotScheduleExtended. # noqa: E501
:rtype: int
"""
return self._next_run
@next_run.setter
def next_run(self, next_run):
"""Sets the next_run of this SnapshotScheduleExtended.
Unix Epoch time of next snapshot to be created. # noqa: E501
:param next_run: The next_run of this SnapshotScheduleExtended. # noqa: E501
:type: int
"""
self._next_run = next_run
@property
def next_snapshot(self):
"""Gets the next_snapshot of this SnapshotScheduleExtended. # noqa: E501
Formatted name (see pattern) of next snapshot to be created. # noqa: E501
:return: The next_snapshot of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._next_snapshot
@next_snapshot.setter
def next_snapshot(self, next_snapshot):
"""Sets the next_snapshot of this SnapshotScheduleExtended.
Formatted name (see pattern) of next snapshot to be created. # noqa: E501
:param next_snapshot: The next_snapshot of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._next_snapshot = next_snapshot
@property
def path(self):
"""Gets the path of this SnapshotScheduleExtended. # noqa: E501
The /ifs path snapshotted. # noqa: E501
:return: The path of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""Sets the path of this SnapshotScheduleExtended.
The /ifs path snapshotted. # noqa: E501
:param path: The path of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._path = path
@property
def pattern(self):
"""Gets the pattern of this SnapshotScheduleExtended. # noqa: E501
Pattern expanded with strftime to create snapshot name. # noqa: E501
:return: The pattern of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._pattern
@pattern.setter
def pattern(self, pattern):
"""Sets the pattern of this SnapshotScheduleExtended.
Pattern expanded with strftime to create snapshot name. # noqa: E501
:param pattern: The pattern of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._pattern = pattern
@property
def schedule(self):
"""Gets the schedule of this SnapshotScheduleExtended. # noqa: E501
The isidate compatible natural language description of the schedule. # noqa: E501
:return: The schedule of this SnapshotScheduleExtended. # noqa: E501
:rtype: str
"""
return self._schedule
@schedule.setter
def schedule(self, schedule):
"""Sets the schedule of this SnapshotScheduleExtended.
The isidate compatible natural language description of the schedule. # noqa: E501
:param schedule: The schedule of this SnapshotScheduleExtended. # noqa: E501
:type: str
"""
self._schedule = schedule
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SnapshotScheduleExtended):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.498525 | 159 | 0.589898 | [
"Unlicense"
] | Isilon/isilon_sdk_python | isi_sdk_8_1_0/isi_sdk_8_1_0/models/snapshot_schedule_extended.py | 9,661 | Python |
from decimal import Decimal as D
from django.utils.translation import ugettext_lazy as _
from oscar.core.loading import get_class, get_classes, get_model
from oscar.templatetags.currency_filters import currency
Benefit = get_model('offer', 'Benefit')
BasketDiscount, SHIPPING_DISCOUNT, ZERO_DISCOUNT = get_classes('offer.results', [
'BasketDiscount', 'SHIPPING_DISCOUNT', 'ZERO_DISCOUNT'])
CoverageCondition, ValueCondition = get_classes('offer.conditions', ['CoverageCondition', 'ValueCondition'])
range_anchor = get_class('offer.utils', 'range_anchor')
__all__ = [
'PercentageDiscountBenefit', 'AbsoluteDiscountBenefit', 'FixedPriceBenefit',
'ShippingBenefit', 'MultibuyDiscountBenefit',
'ShippingAbsoluteDiscountBenefit', 'ShippingFixedPriceBenefit',
'ShippingPercentageDiscountBenefit',
]
def apply_discount(line, discount, quantity, offer=None):
"""
Apply a given discount to the passed basket
"""
line.discount(discount, quantity, incl_tax=False, offer=offer)
class PercentageDiscountBenefit(Benefit):
"""
An offer benefit that gives a percentage discount
"""
_description = _("%(value)s%% discount on %(range)s")
@property
def name(self):
return self._description % {
'value': self.value,
'range': self.range.name}
@property
def description(self):
return self._description % {
'value': self.value,
'range': range_anchor(self.range)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Percentage discount benefit")
verbose_name_plural = _("Percentage discount benefits")
def apply(self, basket, condition, offer, discount_percent=None,
max_total_discount=None):
if discount_percent is None:
discount_percent = self.value
discount_amount_available = max_total_discount
line_tuples = self.get_applicable_lines(offer, basket)
discount_percent = min(discount_percent, D('100.0'))
discount = D('0.00')
affected_items = 0
max_affected_items = self._effective_max_affected_items()
affected_lines = []
for price, line in line_tuples:
if affected_items >= max_affected_items:
break
if discount_amount_available == 0:
break
quantity_affected = min(
line.quantity_without_offer_discount(offer),
max_affected_items - affected_items)
line_discount = self.round(discount_percent / D('100.0') * price
* int(quantity_affected))
if discount_amount_available is not None:
line_discount = min(line_discount, discount_amount_available)
discount_amount_available -= line_discount
apply_discount(line, line_discount, quantity_affected, offer)
affected_lines.append((line, line_discount, quantity_affected))
affected_items += quantity_affected
discount += line_discount
if discount > 0:
condition.consume_items(offer, basket, affected_lines)
return BasketDiscount(discount)
class AbsoluteDiscountBenefit(Benefit):
"""
An offer benefit that gives an absolute discount
"""
_description = _("%(value)s discount on %(range)s")
@property
def name(self):
return self._description % {
'value': currency(self.value),
'range': self.range.name.lower()}
@property
def description(self):
return self._description % {
'value': currency(self.value),
'range': range_anchor(self.range)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Absolute discount benefit")
verbose_name_plural = _("Absolute discount benefits")
def apply(self, basket, condition, offer, discount_amount=None,
max_total_discount=None):
if discount_amount is None:
discount_amount = self.value
# Fetch basket lines that are in the range and available to be used in
# an offer.
line_tuples = self.get_applicable_lines(offer, basket)
# Determine which lines can have the discount applied to them
max_affected_items = self._effective_max_affected_items()
num_affected_items = 0
affected_items_total = D('0.00')
lines_to_discount = []
for price, line in line_tuples:
if num_affected_items >= max_affected_items:
break
qty = min(
line.quantity_without_offer_discount(offer),
max_affected_items - num_affected_items)
lines_to_discount.append((line, price, qty))
num_affected_items += qty
affected_items_total += qty * price
# Ensure we don't try to apply a discount larger than the total of the
# matching items.
discount = min(discount_amount, affected_items_total)
if max_total_discount is not None:
discount = min(discount, max_total_discount)
if discount == 0:
return ZERO_DISCOUNT
# Apply discount equally amongst them
affected_lines = []
applied_discount = D('0.00')
for i, (line, price, qty) in enumerate(lines_to_discount):
if i == len(lines_to_discount) - 1:
# If last line, then take the delta as the discount to ensure
# the total discount is correct and doesn't mismatch due to
# rounding.
line_discount = discount - applied_discount
else:
# Calculate a weighted discount for the line
line_discount = self.round(
((price * qty) / affected_items_total) * discount)
apply_discount(line, line_discount, qty, offer)
affected_lines.append((line, line_discount, qty))
applied_discount += line_discount
condition.consume_items(offer, basket, affected_lines)
return BasketDiscount(discount)
class FixedPriceBenefit(Benefit):
"""
An offer benefit that gives the items in the condition for a
fixed price. This is useful for "bundle" offers.
Note that we ignore the benefit range here and only give a fixed price
for the products in the condition range. The condition cannot be a value
condition.
We also ignore the max_affected_items setting.
"""
_description = _("The products that meet the condition are sold "
"for %(amount)s")
@property
def name(self):
return self._description % {
'amount': currency(self.value)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Fixed price benefit")
verbose_name_plural = _("Fixed price benefits")
def apply(self, basket, condition, offer): # noqa (too complex (10))
if isinstance(condition, ValueCondition):
return ZERO_DISCOUNT
# Fetch basket lines that are in the range and available to be used in
# an offer.
line_tuples = self.get_applicable_lines(offer, basket,
range=condition.range)
if not line_tuples:
return ZERO_DISCOUNT
# Determine the lines to consume
num_permitted = int(condition.value)
num_affected = 0
value_affected = D('0.00')
covered_lines = []
for price, line in line_tuples:
if isinstance(condition, CoverageCondition):
quantity_affected = 1
else:
quantity_affected = min(
line.quantity_without_offer_discount(offer),
num_permitted - num_affected)
num_affected += quantity_affected
value_affected += quantity_affected * price
covered_lines.append((price, line, quantity_affected))
if num_affected >= num_permitted:
break
discount = max(value_affected - self.value, D('0.00'))
if not discount:
return ZERO_DISCOUNT
# Apply discount to the affected lines
discount_applied = D('0.00')
last_line = covered_lines[-1][1]
for price, line, quantity in covered_lines:
if line == last_line:
# If last line, we just take the difference to ensure that
# rounding doesn't lead to an off-by-one error
line_discount = discount - discount_applied
else:
line_discount = self.round(
discount * (price * quantity) / value_affected)
apply_discount(line, line_discount, quantity, offer)
discount_applied += line_discount
return BasketDiscount(discount)
class MultibuyDiscountBenefit(Benefit):
_description = _("Cheapest product from %(range)s is free")
@property
def name(self):
return self._description % {
'range': self.range.name.lower()}
@property
def description(self):
return self._description % {
'range': range_anchor(self.range)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Multibuy discount benefit")
verbose_name_plural = _("Multibuy discount benefits")
def apply(self, basket, condition, offer):
line_tuples = self.get_applicable_lines(offer, basket)
if not line_tuples:
return ZERO_DISCOUNT
# Cheapest line gives free product
discount, line = line_tuples[0]
apply_discount(line, discount, 1, offer)
affected_lines = [(line, discount, 1)]
condition.consume_items(offer, basket, affected_lines)
return BasketDiscount(discount)
# =================
# Shipping benefits
# =================
class ShippingBenefit(Benefit):
def apply(self, basket, condition, offer):
condition.consume_items(offer, basket, affected_lines=())
return SHIPPING_DISCOUNT
class Meta:
app_label = 'offer'
proxy = True
class ShippingAbsoluteDiscountBenefit(ShippingBenefit):
_description = _("%(amount)s off shipping cost")
@property
def name(self):
return self._description % {
'amount': currency(self.value)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Shipping absolute discount benefit")
verbose_name_plural = _("Shipping absolute discount benefits")
def shipping_discount(self, charge):
return min(charge, self.value)
class ShippingFixedPriceBenefit(ShippingBenefit):
_description = _("Get shipping for %(amount)s")
@property
def name(self):
return self._description % {
'amount': currency(self.value)}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Fixed price shipping benefit")
verbose_name_plural = _("Fixed price shipping benefits")
def shipping_discount(self, charge):
if charge < self.value:
return D('0.00')
return charge - self.value
class ShippingPercentageDiscountBenefit(ShippingBenefit):
_description = _("%(value)s%% off of shipping cost")
@property
def name(self):
return self._description % {
'value': self.value}
class Meta:
app_label = 'offer'
proxy = True
verbose_name = _("Shipping percentage discount benefit")
verbose_name_plural = _("Shipping percentage discount benefits")
def shipping_discount(self, charge):
discount = charge * self.value / D('100.0')
return discount.quantize(D('0.01'))
| 33.862857 | 108 | 0.626645 | [
"BSD-3-Clause"
] | AMuratTuran/mkn | src/oscar/apps/offer/benefits.py | 11,852 | Python |
import os
import shutil
import tempfile
import time
class TemporaryDirectory():
def __init__(self, remove: bool=True, prefix: str='tmp'):
self._remove = remove
self._prefix = prefix
def __enter__(self) -> str:
if 'KACHERY_STORAGE_DIR' in os.environ:
storage_dir = os.getenv('KACHERY_STORAGE_DIR')
else:
storage_dir = None
if storage_dir is not None:
dirpath = os.path.join(storage_dir, 'tmp')
if not os.path.exists(dirpath):
try:
os.mkdir(dirpath)
except:
# maybe somebody else created this directory
if not os.path.exists:
raise Exception(f'Unexpected problem creating temporary directory: {dirpath}')
else:
dirpath = None
self._path = str(tempfile.mkdtemp(prefix=self._prefix, dir=dirpath))
return self._path
def __exit__(self, exc_type, exc_val, exc_tb):
if self._remove:
_rmdir_with_retries(self._path, num_retries=5)
def path(self):
return self._path
def _rmdir_with_retries(dirname: str, num_retries: int, delay_between_tries: float=1):
for retry_num in range(1, num_retries + 1):
if not os.path.exists(dirname):
return
try:
shutil.rmtree(dirname)
break
except: # pragma: no cover
if retry_num < num_retries:
print('Retrying to remove directory: {}'.format(dirname))
time.sleep(delay_between_tries)
else:
raise Exception('Unable to remove directory after {} tries: {}'.format(num_retries, dirname))
| 33.307692 | 109 | 0.586028 | [
"Apache-2.0"
] | flatironinstitute/kachery | kachery/_temporarydirectory.py | 1,732 | Python |
from pypy.objspace.std.multimethod import *
from rpython.annotator.annrpython import RPythonAnnotator
class W_Root(object):
pass
class W_Int(W_Root):
pass
class W_Str(W_Root):
pass
str_w = MultiMethodTable(1, root_class=W_Root, argnames_before=['space'])
int_w = MultiMethodTable(1, root_class=W_Root, argnames_before=['space'])
def int_w__Int(space, w_x):
assert space == 'space'
assert isinstance(w_x, W_Int)
return 1
def str_w__Str(space, w_x):
assert space == 'space'
assert isinstance(w_x, W_Str)
return "string"
int_w.register(int_w__Int, W_Int)
str_w.register(str_w__Str, W_Str)
def setup_module(mod):
typeorder = {
W_Int: [(W_Int, None)],
W_Str: [(W_Str, None)],
}
mod.typeorder = typeorder
mod.str_w1 = str_w.install('__str_w', [typeorder])
mod.int_w1 = int_w.install('__int_w', [typeorder])
def test_str_w_ann():
a = RPythonAnnotator()
s1 = a.build_types(str_w1,[str, W_Str])
s2 = a.build_types(str_w1,[str, W_Root])
assert s1.knowntype == str
assert s2.knowntype == str
def test_int_w_ann():
a = RPythonAnnotator()
s1 = a.build_types(int_w1,[str, W_Int])
s2 = a.build_types(int_w1,[str, W_Str])
assert s1.knowntype == int
assert s2.knowntype == int
| 22.877193 | 73 | 0.669479 | [
"MIT"
] | kantai/passe-pypy-taint-tracking | pypy/objspace/std/test/test_annmm.py | 1,304 | Python |
# --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2017 Microsoft
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Modified by Haozhi Qi, from py-faster-rcnn (https://github.com/rbgirshick/py-faster-rcnn)
# --------------------------------------------------------
"""
Pascal VOC database
This class loads ground truth notations from standard Pascal VOC XML data formats
and transform them into IMDB format. Selective search is used for proposals, see roidb
function. Results are written as the Pascal VOC format. Evaluation is based on mAP
criterion.
"""
import cPickle
import cv2
import os
import numpy as np
import PIL
from imdb import IMDB
from pascal_voc_eval import voc_eval #voc_eval_sds
from ds_utils import unique_boxes, filter_small_boxes
class PascalVOC(IMDB):
def __init__(self, image_set, root_path, devkit_path, result_path=None, mask_size=-1, binary_thresh=None):
"""
fill basic information to initialize imdb
:param image_set: 2007_trainval, 2007_test, etc
:param root_path: 'selective_search_data' and 'cache'
:param devkit_path: data and results
:return: imdb object
"""
year = image_set.split('_')[0]
image_set = image_set[len(year) + 1 : len(image_set)]
super(PascalVOC, self).__init__('voc_' + year, image_set, root_path, devkit_path, result_path) # set self.name
self.year = year
self.root_path = root_path
self.devkit_path = devkit_path
self.data_path = os.path.join(devkit_path, 'VOC' + year)
self.classes = ['__background__', # always index 0
'aeroplane', 'bicycle', 'bird', 'boat',
'bottle', 'bus', 'car', 'cat', 'chair',
'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant',
'sheep', 'sofa', 'train', 'tvmonitor']
self.num_classes = len(self.classes)
self.image_set_index = self.load_image_set_index()
self.num_images = len(self.image_set_index)
print 'num_images', self.num_images
self.mask_size = mask_size
self.binary_thresh = binary_thresh
self.config = {'comp_id': 'comp4',
'use_diff': False,
'min_size': 2}
def load_image_set_index(self):
"""
find out which indexes correspond to given image set (train or val)
:return:
"""
image_set_index_file = os.path.join(self.data_path, 'ImageSets', 'Main', self.image_set + '.txt')
assert os.path.exists(image_set_index_file), 'Path does not exist: {}'.format(image_set_index_file)
with open(image_set_index_file) as f:
image_set_index = [x.strip() for x in f.readlines()]
return image_set_index
def image_path_from_index(self, index):
"""
given image index, find out full path
:param index: index of a specific image
:return: full path of this image
"""
image_file = os.path.join(self.data_path, 'JPEGImages', index + '.jpg')
assert os.path.exists(image_file), 'Path does not exist: {}'.format(image_file)
return image_file
def segmentation_path_from_index(self, index):
"""
given image index, find out the full path of segmentation class
:param index: index of a specific image
:return: full path of segmentation class
"""
seg_class_file = os.path.join(self.data_path, 'SegmentationClass', index + '.png')
assert os.path.exists(seg_class_file), 'Path does not exist: {}'.format(seg_class_file)
return seg_class_file
def gt_roidb(self):
"""
return ground truth image regions database
:return: imdb[image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print '{} gt roidb loaded from {}'.format(self.name, cache_file)
return roidb
gt_roidb = [self.load_pascal_annotation(index) for index in self.image_set_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_roidb, fid, cPickle.HIGHEST_PROTOCOL)
print 'wrote gt roidb to {}'.format(cache_file)
return gt_roidb
def gt_segdb(self):
"""
return ground truth image regions database
:return: imdb[image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
cache_file = os.path.join(self.cache_path, self.name + '_gt_segdb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
segdb = cPickle.load(fid)
print '{} gt segdb loaded from {}'.format(self.name, cache_file)
return segdb
gt_segdb = [self.load_pascal_segmentation_annotation(index) for index in self.image_set_index]
with open(cache_file, 'wb') as fid:
cPickle.dump(gt_segdb, fid, cPickle.HIGHEST_PROTOCOL)
print 'wrote gt segdb to {}'.format(cache_file)
return gt_segdb
def load_pascal_annotation(self, index):
"""
for a given index, load image and bounding boxes info from XML file
:param index: index of a specific image
:return: record['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
import xml.etree.ElementTree as ET
roi_rec = dict()
roi_rec['image'] = self.image_path_from_index(index)
filename = os.path.join(self.data_path, 'Annotations', index + '.xml')
tree = ET.parse(filename)
size = tree.find('size')
roi_rec['height'] = float(size.find('height').text)
roi_rec['width'] = float(size.find('width').text)
#im_size = cv2.imread(roi_rec['image'], cv2.IMREAD_COLOR|cv2.IMREAD_IGNORE_ORIENTATION).shape
#assert im_size[0] == roi_rec['height'] and im_size[1] == roi_rec['width']
objs = tree.findall('object')
if not self.config['use_diff']:
non_diff_objs = [obj for obj in objs if int(obj.find('difficult').text) == 0]
objs = non_diff_objs
num_objs = len(objs)
boxes = np.zeros((num_objs, 4), dtype=np.uint16)
gt_classes = np.zeros((num_objs), dtype=np.int32)
overlaps = np.zeros((num_objs, self.num_classes), dtype=np.float32)
class_to_index = dict(zip(self.classes, range(self.num_classes)))
# Load object bounding boxes into a data frame.
for ix, obj in enumerate(objs):
bbox = obj.find('bndbox')
# Make pixel indexes 0-based
x1 = float(bbox.find('xmin').text) - 1
y1 = float(bbox.find('ymin').text) - 1
x2 = float(bbox.find('xmax').text) - 1
y2 = float(bbox.find('ymax').text) - 1
cls = class_to_index[obj.find('name').text.lower().strip()]
boxes[ix, :] = [x1, y1, x2, y2]
gt_classes[ix] = cls
overlaps[ix, cls] = 1.0
roi_rec.update({'boxes': boxes,
'gt_classes': gt_classes,
'gt_overlaps': overlaps,
'max_classes': overlaps.argmax(axis=1),
'max_overlaps': overlaps.max(axis=1),
'flipped': False})
return roi_rec
def load_selective_search_roidb(self, gt_roidb):
"""
turn selective search proposals into selective search roidb
:param gt_roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
:return: roidb: [image_index]['boxes', 'gt_classes', 'gt_overlaps', 'flipped']
"""
import scipy.io
matfile = os.path.join(self.root_path, 'selective_search_data', self.name + '.mat')
assert os.path.exists(matfile), 'selective search data does not exist: {}'.format(matfile)
raw_data = scipy.io.loadmat(matfile)['boxes'].ravel() # original was dict ['images', 'boxes']
box_list = []
for i in range(raw_data.shape[0]):
boxes = raw_data[i][:, (1, 0, 3, 2)] - 1 # pascal voc dataset starts from 1.
keep = unique_boxes(boxes)
boxes = boxes[keep, :]
keep = filter_small_boxes(boxes, self.config['min_size'])
boxes = boxes[keep, :]
box_list.append(boxes)
return self.create_roidb_from_box_list(box_list, gt_roidb)
def selective_search_roidb(self, gt_roidb, append_gt=False):
"""
get selective search roidb and ground truth roidb
:param gt_roidb: ground truth roidb
:param append_gt: append ground truth
:return: roidb of selective search
"""
cache_file = os.path.join(self.cache_path, self.name + '_ss_roidb.pkl')
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
roidb = cPickle.load(fid)
print '{} ss roidb loaded from {}'.format(self.name, cache_file)
return roidb
if append_gt:
print 'appending ground truth annotations'
ss_roidb = self.load_selective_search_roidb(gt_roidb)
roidb = IMDB.merge_roidbs(gt_roidb, ss_roidb)
else:
roidb = self.load_selective_search_roidb(gt_roidb)
with open(cache_file, 'wb') as fid:
cPickle.dump(roidb, fid, cPickle.HIGHEST_PROTOCOL)
print 'wrote ss roidb to {}'.format(cache_file)
return roidb
def load_pascal_segmentation_annotation(self, index):
"""
for a given index, load image and bounding boxes info from XML file
:param index: index of a specific image
:return: record['seg_cls_path', 'flipped']
"""
import xml.etree.ElementTree as ET
seg_rec = dict()
seg_rec['image'] = self.image_path_from_index(index)
size = cv2.imread(seg_rec['image']).shape
seg_rec['height'] = size[0]
seg_rec['width'] = size[1]
seg_rec['seg_cls_path'] = self.segmentation_path_from_index(index)
seg_rec['flipped'] = False
return seg_rec
def evaluate_detections(self, detections):
"""
top level evaluations
:param detections: result matrix, [bbox, confidence]
:return: None
"""
# make all these folders for results
result_dir = os.path.join(self.result_path, 'results')
if not os.path.exists(result_dir):
os.mkdir(result_dir)
year_folder = os.path.join(self.result_path, 'results', 'VOC' + self.year)
if not os.path.exists(year_folder):
os.mkdir(year_folder)
res_file_folder = os.path.join(self.result_path, 'results', 'VOC' + self.year, 'Main')
if not os.path.exists(res_file_folder):
os.mkdir(res_file_folder)
self.write_pascal_results(detections)
info = self.do_python_eval()
return info
def evaluate_segmentations(self, pred_segmentations=None):
"""
top level evaluations
:param pred_segmentations: the pred segmentation result
:return: the evaluation results
"""
# make all these folders for results
if not (pred_segmentations is None):
self.write_pascal_segmentation_result(pred_segmentations)
info = self._py_evaluate_segmentation()
return info
def write_pascal_segmentation_result(self, pred_segmentations):
"""
Write pred segmentation to res_file_folder
:param pred_segmentations: the pred segmentation results
:param res_file_folder: the saving folder
:return: [None]
"""
result_dir = os.path.join(self.result_path, 'results')
if not os.path.exists(result_dir):
os.mkdir(result_dir)
year_folder = os.path.join(self.result_path, 'results', 'VOC' + self.year)
if not os.path.exists(year_folder):
os.mkdir(year_folder)
res_file_folder = os.path.join(self.result_path, 'results', 'VOC' + self.year, 'Segmentation')
if not os.path.exists(res_file_folder):
os.mkdir(res_file_folder)
result_dir = os.path.join(self.result_path, 'results', 'VOC' + self.year, 'Segmentation')
if not os.path.exists(result_dir):
os.mkdir(result_dir)
pallete = self.get_pallete(256)
for i, index in enumerate(self.image_set_index):
segmentation_result = np.uint8(np.squeeze(np.copy(pred_segmentations[i])))
segmentation_result = PIL.Image.fromarray(segmentation_result)
segmentation_result.putpalette(pallete)
segmentation_result.save(os.path.join(result_dir, '%s.png'%(index)))
def get_pallete(self, num_cls):
"""
this function is to get the colormap for visualizing the segmentation mask
:param num_cls: the number of visulized class
:return: the pallete
"""
n = num_cls
pallete = [0]*(n*3)
for j in xrange(0,n):
lab = j
pallete[j*3+0] = 0
pallete[j*3+1] = 0
pallete[j*3+2] = 0
i = 0
while (lab > 0):
pallete[j*3+0] |= (((lab >> 0) & 1) << (7-i))
pallete[j*3+1] |= (((lab >> 1) & 1) << (7-i))
pallete[j*3+2] |= (((lab >> 2) & 1) << (7-i))
i = i + 1
lab >>= 3
return pallete
def get_confusion_matrix(self, gt_label, pred_label, class_num):
"""
Calcute the confusion matrix by given label and pred
:param gt_label: the ground truth label
:param pred_label: the pred label
:param class_num: the nunber of class
:return: the confusion matrix
"""
index = (gt_label * class_num + pred_label).astype('int32')
label_count = np.bincount(index)
confusion_matrix = np.zeros((class_num, class_num))
for i_label in range(class_num):
for i_pred_label in range(class_num):
cur_index = i_label * class_num + i_pred_label
if cur_index < len(label_count):
confusion_matrix[i_label, i_pred_label] = label_count[cur_index]
return confusion_matrix
def _py_evaluate_segmentation(self):
"""
This function is a wrapper to calculte the metrics for given pred_segmentation results
:param pred_segmentations: the pred segmentation result
:return: the evaluation metrics
"""
confusion_matrix = np.zeros((self.num_classes,self.num_classes))
result_dir = os.path.join(self.result_path, 'results', 'VOC' + self.year, 'Segmentation')
for i, index in enumerate(self.image_set_index):
seg_gt_info = self.load_pascal_segmentation_annotation(index)
seg_gt_path = seg_gt_info['seg_cls_path']
seg_gt = np.array(PIL.Image.open(seg_gt_path)).astype('float32')
seg_pred_path = os.path.join(result_dir, '%s.png'%(index))
seg_pred = np.array(PIL.Image.open(seg_pred_path)).astype('float32')
seg_gt = cv2.resize(seg_gt, (seg_pred.shape[1], seg_pred.shape[0]), interpolation=cv2.INTER_NEAREST)
ignore_index = seg_gt != 255
seg_gt = seg_gt[ignore_index]
seg_pred = seg_pred[ignore_index]
confusion_matrix += self.get_confusion_matrix(seg_gt, seg_pred, self.num_classes)
pos = confusion_matrix.sum(1)
res = confusion_matrix.sum(0)
tp = np.diag(confusion_matrix)
IU_array = (tp / np.maximum(1.0, pos + res - tp))
mean_IU = IU_array.mean()
return {'meanIU':mean_IU, 'IU_array':IU_array}
def get_result_file_template(self):
"""
this is a template
VOCdevkit/results/VOC2007/Main/<comp_id>_det_test_aeroplane.txt
:return: a string template
"""
res_file_folder = os.path.join(self.result_path, 'results', 'VOC' + self.year, 'Main')
comp_id = self.config['comp_id']
filename = comp_id + '_det_' + self.image_set + '_{:s}.txt'
path = os.path.join(res_file_folder, filename)
return path
def write_pascal_results(self, all_boxes):
"""
write results files in pascal devkit path
:param all_boxes: boxes to be processed [bbox, confidence]
:return: None
"""
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
print 'Writing {} VOC results file'.format(cls)
filename = self.get_result_file_template().format(cls)
with open(filename, 'wt') as f:
for im_ind, index in enumerate(self.image_set_index):
dets = all_boxes[cls_ind][im_ind]
if len(dets) == 0:
continue
# the VOCdevkit expects 1-based indices
for k in range(dets.shape[0]):
f.write('{:s} {:.3f} {:.1f} {:.1f} {:.1f} {:.1f}\n'.
format(index, dets[k, -1],
dets[k, 0] + 1, dets[k, 1] + 1, dets[k, 2] + 1, dets[k, 3] + 1))
def do_python_eval(self):
"""
python evaluation wrapper
:return: info_str
"""
info_str = ''
annopath = os.path.join(self.data_path, 'Annotations', '{0!s}.xml')
imageset_file = os.path.join(self.data_path, 'ImageSets', 'Main', self.image_set + '.txt')
annocache = os.path.join(self.cache_path, self.name + '_annotations.pkl')
aps = []
# The PASCAL VOC metric changed in 2010
use_07_metric = True if self.year == 'SDS' or int(self.year) < 2010 else False
print 'VOC07 metric? ' + ('Y' if use_07_metric else 'No')
info_str += 'VOC07 metric? ' + ('Y' if use_07_metric else 'No')
info_str += '\n'
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
filename = self.get_result_file_template().format(cls)
rec, prec, ap = voc_eval(filename, annopath, imageset_file, cls, annocache,
ovthresh=0.5, use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
info_str += 'AP for {} = {:.4f}\n'.format(cls, ap)
print('Mean [email protected] = {:.4f}'.format(np.mean(aps)))
info_str += 'Mean [email protected] = {:.4f}\n\n'.format(np.mean(aps))
# @0.7
aps = []
for cls_ind, cls in enumerate(self.classes):
if cls == '__background__':
continue
filename = self.get_result_file_template().format(cls)
rec, prec, ap = voc_eval(filename, annopath, imageset_file, cls, annocache,
ovthresh=0.7, use_07_metric=use_07_metric)
aps += [ap]
print('AP for {} = {:.4f}'.format(cls, ap))
info_str += 'AP for {} = {:.4f}\n'.format(cls, ap)
print('Mean [email protected] = {:.4f}'.format(np.mean(aps)))
info_str += 'Mean [email protected] = {:.4f}'.format(np.mean(aps))
return info_str
| 42.781182 | 119 | 0.591428 | [
"MIT"
] | alphadadajuju/Deep-Feature-Flow-mod | lib/dataset/pascal_voc.py | 19,551 | Python |
from typing import List
from yandex_market_language import models, exceptions
from yandex_market_language.models import fields
from yandex_market_language.models.abstract import XMLElement, XMLSubElement
from yandex_market_language.exceptions import ValidationError
class Shop(
fields.EnableAutoDiscountField,
fields.DeliveryOptionsField,
fields.PickupOptionsField,
models.AbstractModel
):
"""
Shop model.
Docs:
https://yandex.ru/support/partnermarket/elements/shop.html
"""
__slots__ = [
'_url',
'name',
'company',
'currencies',
'categories',
'offers',
'platform',
'version',
'agency',
'email',
'_delivery_options',
'_pickup_options',
'_enable_auto_discounts',
'gifts',
'promos'
]
def __init__(
self,
name: str,
company: str,
url: str,
currencies: List["models.Currency"],
categories: List["models.Category"],
offers: List["models.offers.AbstractOffer"],
platform: str = None,
version: str = None,
agency: str = None,
email: str = None,
delivery_options: List["models.Option"] = None,
pickup_options: List["models.Option"] = None,
enable_auto_discounts=None,
gifts: List["models.Gift"] = None,
promos: List["models.Promo"] = None,
):
self.name = name
self.company = company
self.url = url
self.platform = platform
self.version = version
self.agency = agency
self.email = email
self.currencies = currencies
self.categories = categories
self.delivery_options = delivery_options
self.pickup_options = pickup_options
self.enable_auto_discounts = enable_auto_discounts
self.offers = offers
self.gifts = gifts
self.promos = promos
@property
def url(self):
return self._url
@url.setter
def url(self, value: str):
if len(value) > 512:
raise ValidationError("The maximum url length is 512 characters.")
self._url = value
def create_dict(self, **kwargs) -> dict:
return dict(
name=self.name,
company=self.company,
url=self.url,
platform=self.platform,
version=self.version,
agency=self.agency,
email=self.email,
currencies=[c.to_dict() for c in self.currencies],
categories=[c.to_dict() for c in self.categories],
delivery_options=[o.to_dict() for o in self.delivery_options],
pickup_options=[o.to_dict() for o in self.pickup_options],
enable_auto_discounts=self.enable_auto_discounts,
offers=[o.to_dict() for o in self.offers],
gifts=[g.to_dict() for g in self.gifts] if self.gifts else [],
promos=[p.to_dict() for p in self.promos] if self.promos else [],
)
def create_xml(self, **kwargs) -> XMLElement:
shop_el = XMLElement("shop")
# Add simple elements
for tag in (
"name",
"company",
"url",
"platform",
"version",
"agency",
"email",
):
value = getattr(self, tag)
if value:
el = XMLSubElement(shop_el, tag)
el.text = value
# Add currencies
currencies_el = XMLSubElement(shop_el, "currencies")
for c in self.currencies:
c.to_xml(currencies_el)
# Add categories
categories_el = XMLSubElement(shop_el, "categories")
for c in self.categories:
c.to_xml(categories_el)
# Add delivery options
if self.delivery_options:
delivery_options_el = XMLSubElement(shop_el, "delivery-options")
for o in self.delivery_options:
o.to_xml(delivery_options_el)
# Add pickup options
if self.pickup_options:
pickup_options_el = XMLSubElement(shop_el, "pickup-options")
for o in self.pickup_options:
o.to_xml(pickup_options_el)
# Add enable_auto_discounts
if self._enable_auto_discounts:
enable_auto_discounts_el = XMLSubElement(
shop_el, "enable_auto_discounts"
)
enable_auto_discounts_el.text = self._enable_auto_discounts
# Add offers
offers_el = XMLSubElement(shop_el, "offers")
for o in self.offers:
o.to_xml(offers_el)
# Add gifts
if self.gifts:
gifts_el = XMLSubElement(shop_el, "gifts")
for g in self.gifts:
g.to_xml(gifts_el)
# Add promos
if self.promos:
promos_el = XMLSubElement(shop_el, "promos")
for p in self.promos:
p.to_xml(promos_el)
return shop_el
@staticmethod
def from_xml(shop_el: XMLElement) -> "Shop":
kwargs = {}
for el in shop_el:
if el.tag == "currencies":
currencies = []
for currency_el in el:
currencies.append(models.Currency.from_xml(currency_el))
kwargs["currencies"] = currencies
elif el.tag == "categories":
categories = []
for category_el in el:
categories.append(models.Category.from_xml(category_el))
kwargs["categories"] = categories
elif el.tag == "delivery-options":
delivery_options = []
for option_el in el:
delivery_options.append(models.Option.from_xml(option_el))
kwargs["delivery_options"] = delivery_options
elif el.tag == "pickup-options":
pickup_options = []
for option_el in el:
pickup_options.append(models.Option.from_xml(option_el))
kwargs["pickup_options"] = pickup_options
elif el.tag == "offers":
offers = []
for offer_el in el:
offer_type = offer_el.attrib.get("type")
if offer_type is None:
offer = models.SimplifiedOffer.from_xml(offer_el)
elif offer_type == "vendor.model":
offer = models.ArbitraryOffer.from_xml(offer_el)
elif offer_type == "book":
offer = models.BookOffer.from_xml(offer_el)
elif offer_type == "audiobook":
offer = models.AudioBookOffer.from_xml(offer_el)
elif offer_type == "artist.title":
offer = models.MusicVideoOffer.from_xml(offer_el)
elif offer_type == "medicine":
offer = models.MedicineOffer.from_xml(offer_el)
elif offer_type == "event-ticket":
offer = models.EventTicketOffer.from_xml(offer_el)
elif offer_type == "alco":
offer = models.AlcoholOffer.from_xml(offer_el)
else:
raise exceptions.ParseError(
"Got unexpected offer type: {0}".format(offer_type)
)
offers.append(offer)
kwargs["offers"] = offers
elif el.tag == "gifts":
gifts = []
for gift_el in el:
gifts.append(models.Gift.from_xml(gift_el))
if gifts:
kwargs["gifts"] = gifts
elif el.tag == "promos":
promos = []
for promo_el in el:
promos.append(models.Promo.from_xml(promo_el))
if promos:
kwargs["promos"] = promos
else:
kwargs[el.tag] = el.text
return Shop(**kwargs)
| 34.172996 | 79 | 0.540684 | [
"MIT"
] | stefanitsky/yandex_market_language | yandex_market_language/models/shop.py | 8,099 | Python |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2017-10-02 09:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hkm', '0025_userprofile_printer_presets'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='printer_password',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Printer password'),
),
migrations.AddField(
model_name='userprofile',
name='printer_username',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Printer username'),
),
]
| 28.807692 | 107 | 0.636849 | [
"MIT"
] | City-of-Helsinki/kuvaselaamo | hkm/migrations/0026_hkm_museum_printer_credentials.py | 749 | Python |
# =============================================================================
# IMPORTS
# =============================================================================
import torch
from espaloma.nn.readout.base_readout import BaseReadout
# =============================================================================
# MODULE CLASSES
# =============================================================================
class NodeTyping(BaseReadout):
"""Simple typing on homograph."""
def __init__(self, in_features, n_classes=100):
super(NodeTyping, self).__init__()
self.c = torch.nn.Linear(in_features, n_classes)
def forward(self, g):
g.apply_nodes(
ntype="n1",
func=lambda node: {"nn_typing": self.c(node.data["h"])},
)
return g
| 32.48 | 79 | 0.383005 | [
"MIT"
] | choderalab/espaloma | espaloma/nn/readout/node_typing.py | 812 | Python |
from __future__ import print_function, division
import json
import torch
from torch.utils.data import Dataset
import numpy as np
import os
import sys
import collections
import torch.utils.data as data
import shutil
from PIL import Image
from torchvision.datasets.utils import download_url, check_integrity
class ADE20KDataset(Dataset):
def __init__(self,ROOT_DIR, period, transform=None):
self.root_dir = ROOT_DIR
self.rst_dir = os.path.join(self.root_dir,'ADEChallengeData2016','result')
self.period = period
self.num_categories = 150
self.transform = transform
self.odgt = None
if self.period == 'train':
self.odgt = os.path.join(self.root_dir,'ADEChallengeData2016','train.odgt')
else:
self.odgt = os.path.join(self.root_dir,'ADEChallengeData2016','validation.odgt')
self.list_sample = [json.loads(x.rstrip()) for x in open(self.odgt, 'r')]
def __len__(self):
return len(self.list_sample)
def __getitem__(self, idx):
image_path = os.path.join(self.root_dir, self.list_sample[idx]['fpath_img'])
img = Image.open(image_path).convert('RGB')
r = self.list_sample[idx]['height']
c = self.list_sample[idx]['width']
name = self.list_sample[idx]['fpath_img'].replace('ADEChallengeData2016/images/','')
if self.period == 'train':
name = name.replace('train/','')
if 'val' in self.period:
name = name.replace('validation/','')
assert(self.period != 'test')
name = name.replace('.jpg','')
sample = {'image': img, 'name': name, 'row': r, 'col': c}
if self.period == 'train' or self.period == 'val':
seg_path = os.path.join(self.root_dir, self.list_sample[idx]['fpath_segm'])
seg = Image.open(seg_path)
sample['segmentation'] = seg
#assert(seg.ndim == 2)
assert(img.size[0] == seg.size[0])
assert(img.size[1] == seg.size[1])
if self.transform is not None:
img, target = self.transform(img, seg)
return img, target
def decode_target(self, label):
m = label.astype(np.uint16)
r,c = m.shape
cmap = np.zeros((r,c,3), dtype=np.uint8)
cmap[:,:,0] = (m&1)<<7 | (m&8)<<3 | (m&64)>>1
cmap[:,:,1] = (m&2)<<6 | (m&16)<<2 | (m&128)>>2
cmap[:,:,2] = (m&4)<<5 | (m&32)<<1
return cmap | 36.411765 | 92 | 0.5937 | [
"MIT"
] | hsfzxjy/ESSNet | datasets/ade.py | 2,476 | Python |
# coding: utf-8
#
# Copyright 2018 Moriyoshi Koizumi
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import six
import pandas
from .column import _DataFrameColumn, _Function, _Literal, eval_column, infer_data_type, compile_to_raf, resolve_alias
from .functions import SimpleAggregationFunctionSpec
from .group import GroupedData
from .types import StructType, StructField
class _Raw(object):
def __init__(self, pdf):
self.pdf = pdf
def __call__(self, df):
return self.pdf
class _Filter(object):
def __init__(self, df, expr):
self.df = df
self.expr = expr
def __call__(self, df):
raf = compile_to_raf(df, self.expr)
pdf = self.df._yield_pdf()
return pdf.loc[raf]
class _Aggregation(object):
def __init__(self, grouped_data, agg_cols):
self.grouped_data = grouped_data
self.agg_cols = agg_cols
def __call__(self, df):
pdf = self.grouped_data.df._yield_pdf()
agg_fn_cols = []
agg_variations = set()
const_cols = []
resolved_cols = []
for col in self.agg_cols:
col = resolve_alias(col)
resolved_cols.append(col)
if isinstance(col, _Function):
agg_fn_cols.append(col)
if isinstance(col.spec, SimpleAggregationFunctionSpec):
agg_variations.add(col.spec.fn)
else:
raise TypeError()
elif isinstance(col, _Literal):
const_cols.append(col)
else:
raise TypeError(col.__class__)
if len(self.grouped_data.cols) > 0:
pg = pdf.groupby(
by=[pdf.iloc[:,col.index] for col in self.grouped_data.cols]
)
agg_result = pg.aggregate(list(agg_variations))
agg_result = pandas.concat([agg_result.index.to_frame(), agg_result], axis=1)
# convert columns to a set of series
agg_result_index = agg_result.index.to_frame()
series_set = [
agg_result_index[col].rename(i)
for i, col in enumerate(agg_result_index.columns)
]
for col in resolved_cols:
if isinstance(col, _Function):
if isinstance(col.spec, SimpleAggregationFunctionSpec):
series = agg_result[col.operands[0].index, col.spec.fn].rename(len(series_set))
else:
# should never get here; already validated in the above loop
assert False
elif isinstance(col, _Literal):
series = pandas.Series([col.value], name=len(series_set))
else:
# should never get here; already validated in the above loop
assert False
series_set.append(series)
else:
agg_result = pdf.aggregate(list(agg_variations))
# convert columns to a set of series
series_set = []
for col in self.agg_cols:
if isinstance(col, _Function):
if isinstance(col.spec, SimpleAggregationFunctionSpec):
series = pandas.Series([agg_result[col.operands[0].index][col.spec.fn]], name=len(series_set))
else:
# should never get here; already validated in the above loop
assert False
elif isinstance(col, _Literal):
series = pandas.Series([col.value], name=len(series_set))
else:
# should never get here; already validated in the above loop
assert False
series_set.append(series)
return pandas.concat(series_set, axis=1)
class _WithColumns(object):
def __init__(self, df, name_col_pairs):
self.df = df
self.name_col_pairs = name_col_pairs
def __call__(self, df):
extra_fields = df.schema.fields[len(self.df.schema.fields):]
lhs = self.df._yield_pdf()
return pandas.concat(
[lhs] + [
eval_column(df, lhs, col).rename(i)
for i, (_, col) in enumerate(self.name_col_pairs, len(self.df.columns))
],
axis=1
)
class _Union(object):
def __init__(self, df, following):
self.df = df
self.following = following
def __call__(self, df):
return pandas.concat([self.df._yield_pdf(), self.following._yield_pdf()], axis=0)
class _OrderBy(object):
def __init__(self, df, cols, ascending=None):
self.df = df
self.cols = cols
self.ascending = ascending
def __call__(self, df):
assert all(isinstance(col, _DataFrameColumn) for col in self.cols)
return self.df._yield_pdf().sort_values(by=[col.index for col in self.cols], ascending=self.ascending)
class Row(object):
def __init__(self, pdf, schema, i, name_to_column_map):
self.pdf = pdf
self.schema = schema
self.i = i
self.name_to_column_map = name_to_column_map
def __str__(self):
return str(self.pdf.iloc[self.i])
def __getitem__(self, i):
if isinstance(i, six.string_types):
return self.pdf.iloc[self.i][self.name_to_column_map[i].index]
else:
return self.pdf.iloc[self.i][i]
class DataFrame(object):
def __init__(self, sql_ctx, schema, modifier=None):
self.sql_ctx = sql_ctx
self.schema = schema
self.modifier = modifier
self._columns = [
_DataFrameColumn(self, f, i)
for i, f in enumerate(schema.fields)
]
self._name_to_column_map = {
f.name: c
for f, c in zip(schema.fields, self._columns)
}
def __getitem__(self, i):
if isinstance(i, six.string_types):
return self._name_to_column_map[i]
elif isinstance(i, (int, long)):
return self._columns[i]
else:
raise TypeError()
def filter(self, cond):
return DataFrame(
self.sql_ctx,
self.schema,
_Filter(self, cond)
)
def groupBy(self, *cols):
return GroupedData(self, cols)
def agg(self, *exprs):
return self.groupBy().agg(*exprs)
def withColumn(self, name, col):
return self._with_columns([(name, col)])
def unionAll(self, following):
return DataFrame(
self.sql_ctx,
self.schema,
_Union(self, following)
)
def orderBy(self, *cols, **kwargs):
ascending = kwargs.pop('ascending', None)
return DataFrame(
self.sql_ctx,
self.schema,
_OrderBy(self, cols, ascending)
)
@property
def columns(self):
return [col.field.name for col in self._columns]
def _with_columns(self, name_col_pairs):
return DataFrame(
self.sql_ctx,
StructType(
fields=self.schema.fields + [
StructField(
name,
infer_data_type(col)
)
for name, col in name_col_pairs
]
),
_WithColumns(self, name_col_pairs)
)
def _yield_pdf(self):
return self.modifier(self)
def collect(self):
pdf = self._yield_pdf()
return [
Row(pdf, self.schema, i, self._name_to_column_map)
for i in range(0, len(pdf))
]
| 33.796875 | 118 | 0.589112 | [
"MIT"
] | moriyoshi/dummydf | dummydf/sql/dataframe.py | 8,652 | Python |
from kivy.uix.screenmanager import Screen
# Declara o Menu do Jogo
class TelaMenu(Screen):
pass
# Declara a Tela do Pong
class TelaJogo(Screen):
pass
# Declara a Tela do Vencedor 3
class TelaVencedor1(Screen):
pass
# Declara a Tela do Vencedor 2
class TelaVencedor2(Screen):
pass
| 13.818182 | 41 | 0.723684 | [
"MIT"
] | LivioAlvarenga/Tutoriais_Kivy_KivyMD | Jogo_pong/telas/Telas.py | 304 | Python |
def printBigHeadline(text):
print("")
print("#######################################################################")
print(text)
print("#######################################################################")
print("")
def printSmallHeadline(text):
print("")
print("-----------------------------------------------------------------------")
print(text)
print("-----------------------------------------------------------------------")
print("")
| 30.5625 | 84 | 0.216769 | [
"MIT"
] | recursinging/daisyHat | python/daisyHat/Tools.py | 489 | Python |
import re
from social_core.backends.oauth import OAuthAuth
NAME_RE = re.compile(r'([^O])Auth')
LEGACY_NAMES = ['username', 'email']
def backend_name(backend):
name = backend.__name__
name = name.replace('OAuth', ' OAuth')
name = name.replace('OpenId', ' OpenId')
name = name.replace('Sandbox', '')
name = NAME_RE.sub(r'\1 Auth', name)
return name
def backend_class(backend):
return backend.name.replace('-', ' ')
def icon_name(name):
return {
'stackoverflow': 'stack-overflow',
'google-oauth': 'google',
'google-oauth2': 'google',
'google-openidconnect': 'google',
'yahoo-oauth': 'yahoo',
'facebook-app': 'facebook',
'email': 'envelope',
'vimeo': 'vimeo-square',
'linkedin-oauth2': 'linkedin',
'vk-oauth2': 'vk',
'live': 'windows',
'username': 'user',
}.get(name, name)
def slice_by(value, items):
return [value[n:n + items] for n in range(0, len(value), items)]
def social_backends(backends):
return filter_backends(
backends,
lambda name, backend: name not in LEGACY_NAMES
)
def legacy_backends(backends):
return filter_backends(
backends,
lambda name, backend: name in LEGACY_NAMES
)
def oauth_backends(backends):
return filter_backends(
backends,
lambda name, backend: issubclass(backend, OAuthAuth)
)
def filter_backends(backends, filter_func):
backends = [item for item in backends.items() if filter_func(*item)]
backends.sort(key=lambda backend: backend[0])
return backends
| 23.157143 | 72 | 0.624923 | [
"MIT"
] | 776166/yggdrasil-django | site/social_auth/filters.py | 1,621 | Python |
""" Get a list of Packages in CI, Deployed, or all Images """
from __future__ import print_function
from pc_lib import pc_api, pc_utility
# --Configuration-- #
parser = pc_utility.get_arg_parser()
parser.add_argument(
'--mode',
type=str,
choices=['ci', 'deployed', 'all'],
default='all',
help='(Optional) - Report on CI, Deployed, or all Images.')
parser.add_argument(
'--package_type',
type=str,
choices=['binary', 'gem', 'go', 'jar', 'nodejs', 'nuget', 'package', 'python', 'windows', 'all'],
default='all',
help='(Optional) - Report on one or all Package Types.')
parser.add_argument(
'--image_id',
type=str,
help='(Optional) - ID of the Image (sha256:...).')
parser.add_argument(
'--package_id',
type=str,
help='(Optional) - ID of the Package (name:version).')
args = parser.parse_args()
search_package_name = None
search_package_version = None
if args.package_id:
print_all_packages = False
if ':' in args.package_id:
[search_package_name, search_package_version] = args.package_id.split(':')
else:
search_package_name = args.package_id
else:
print_all_packages = True
# --Helpers-- #
def optional_print(txt='', mode=True):
if mode:
print(txt)
# --Initialize-- #
settings = pc_utility.get_settings(args)
pc_api.configure(settings)
pc_api.validate_api_compute()
# --Main-- #
get_deployed_images = True
get_ci_images = True
deployed_images_with_package = []
ci_images_with_package = []
"""
"instances": [{
"image": "k8s.gcr.io/etcd:3.4.3-0",
"host": "demo",
"registry": "k8s.gcr.io"
"repo": "etcd",
"tag": "3.4.3-0",
}],
"packages": [{
"pkgsType": "package",
"pkgs": [{
"version": "2.27-2",
"name": "grep",
"cveCount": 12,
"license": "GPL-3+",
"layerTime": 1557275612
}],
"pkgsType": [
"binary",
"gem",
"go",
"jar",
"nodejs",
"nuget",
"package",
"python",
"windows",
]
"""
print('Testing Compute API Access ...', end='')
intelligence = pc_api.statuses_intelligence()
print(' done.')
print()
if search_package_name:
print('Searching for Package: (%s) Version: (%s)' % (search_package_name, search_package_version))
print()
# Monitor > Vulnerabilities/Compliance > Images > Deployed
deployed_images = {}
if args.mode in ['deployed', 'all']:
print('Getting Deployed Images ...')
images = pc_api.images_list_read(args.image_id)
for image in images:
image_id = image['_id']
# TODO: Verify instances array length.
image_ii = '%s %s' % (image['instances'][0]['image'], image['instances'][0]['host'])
deployed_images[image_id] = {
'id': image['_id'],
'instance': image_ii,
'instances': image['instances'],
'packages': image['packages']}
optional_print(mode=print_all_packages)
for image in deployed_images:
optional_print('Deployed Image', mode=print_all_packages)
optional_print('ID: %s' % image, mode=print_all_packages)
optional_print('Instance: %s' % deployed_images[image]['instance'], mode=print_all_packages)
optional_print(mode=print_all_packages)
if not deployed_images[image]['packages']:
continue
for package_type in deployed_images[image]['packages']:
for package in package_type['pkgs']:
optional_print('\tType: %s' % package_type['pkgsType'], mode=print_all_packages)
optional_print('\tName: %s' % package['name'], mode=print_all_packages)
optional_print('\tVers: %s' % package['version'], mode=print_all_packages)
optional_print('\tCVEs: %s' % package['cveCount'], mode=print_all_packages)
optional_print(mode=print_all_packages)
if args.package_type in [package_type['pkgsType'], 'all']:
if search_package_name and (search_package_name == package['name']):
if search_package_version:
if search_package_version == package['version']:
deployed_images_with_package.append(deployed_images[image]['instance'])
else:
deployed_images_with_package.append(deployed_images[image]['instance'])
print('Done.')
print()
# Monitor > Vulnerabilities/Compliance > Images > CI
ci_images = {}
if args.mode in ['ci', 'all']:
print('Getting CI Images ...')
images = pc_api.scans_list_read(args.image_id)
for image in images:
image_id = image['entityInfo']['id']
if image['entityInfo']['instances']:
image_ii = '%s %s' % (image['entityInfo']['instances'][0]['image'], image['entityInfo']['instances'][0]['host'])
else:
image_ii = None
ci_images[image_id] = {
'id': image['entityInfo']['id'],
'instance': image_ii,
'instances': image['entityInfo']['instances'],
'packages': image['entityInfo']['packages']}
optional_print(mode=print_all_packages)
for image in ci_images:
optional_print('CI Image', mode=print_all_packages)
optional_print('ID: %s' % image, mode=print_all_packages)
optional_print('Instance: %s' % ci_images[image]['instance'], mode=print_all_packages)
optional_print(mode=print_all_packages)
if not ci_images[image]['packages']:
continue
for package_type in ci_images[image]['packages']:
for package in package_type['pkgs']:
optional_print('\tType: %s' % package_type['pkgsType'], mode=print_all_packages)
optional_print('\tName: %s' % package['name'], mode=print_all_packages)
optional_print('\tVers: %s' % package['version'], mode=print_all_packages)
optional_print('\tCVEs: %s' % package['cveCount'], mode=print_all_packages)
optional_print(mode=print_all_packages)
if args.package_type in [package_type['pkgsType'], 'all']:
if search_package_name and (search_package_name == package['name']):
if search_package_version:
if search_package_version == package['version']:
ci_images_with_package.append(deployed_images[image]['instance'])
else:
ci_images_with_package.append(deployed_images[image]['instance'])
print('Done.')
print()
if args.package_id:
if args.mode in ['deployed', 'all']:
print()
if deployed_images_with_package:
print('Package: (%s) Version: (%s) found in these Deployed Images:' % (search_package_name, search_package_version))
print()
for image in deployed_images_with_package:
print('\t%s' % image)
else:
print('Package: (%s) Version: (%s) not found in any Deployed Images' % (search_package_name, search_package_version))
if args.mode in ['ci', 'all']:
print()
if ci_images_with_package:
print('Package: (%s) Version: (%s) found in these CI Images:' % (search_package_name, search_package_version))
print()
for image in ci_images_with_package:
print('\t%s' % image)
else:
print('Package: (%s) Version: (%s) not found in any CI Images' % (search_package_name, search_package_version))
| 37.351485 | 129 | 0.605302 | [
"MIT"
] | moonman81/pc-toolbox | pcs_images_packages_read.py | 7,545 | Python |
import numpy as np
import theano
def intX(X):
return np.asarray(X, dtype=np.int32)
def floatX(X):
return np.asarray(X, dtype=theano.config.floatX)
def sharedX(X, dtype=theano.config.floatX, name=None):
return theano.shared(np.asarray(X, dtype=dtype), name=name)
def shared0s(shape, dtype=theano.config.floatX, name=None):
return sharedX(np.zeros(shape), dtype=dtype, name=name)
def sharedNs(shape, n, dtype=theano.config.floatX, name=None):
return sharedX(np.ones(shape)*n, dtype=dtype, name=name)
def downcast_float(X):
return np.asarray(X, dtype=np.float32)
| 28.095238 | 63 | 0.727119 | [
"MIT"
] | IndicoDataSolutions/Passage | passage/theano_utils.py | 590 | Python |
import sys
from PyQt5.QtGui import QIcon
from PyQt5.QtWidgets import QMainWindow, QAction, QMessageBox, QStatusBar
from PyMailConfigWindow import ConfigWindow
from PyMailReceiverModel import ReceiverModel
from PyMailReceiverView import ReceiverView
from PyMailSenderModel import SenderModel
from PyMailSenderWindow import SenderWindow
from PyMailSplitWidget import SplitWidget
from PyMailStartUpWindow import StartUpWindow
from PyMailToolBar import ToolBar
class PyMailMainWindow(QMainWindow):
def __init__(self, delegate):
super().__init__()
self.setWindowTitle("PyMail")
self.setWindowIcon(QIcon(r"res\logo.png"))
self.setCentralWidget(SplitWidget(self))
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.setupUI()
self.show()
self.addToolBar(ToolBar(self))
self.delegate = delegate
self.delegate.registerView(self)
self.setStatusBar(QStatusBar())
self.statusBar()
self.setStatusTip("Ready")
self.startUpWindow = StartUpWindow(self, self.delegate)
def setupUI(self):
self.setupMenuBar()
def setupMenuBar(self):
menuBar = self.menuBar()
self.setupFileMenu(menuBar)
self.setupEditMenu(menuBar)
self.setupOptionsMenu(menuBar)
self.setupHelpMenu(menuBar)
def setupFileMenu(self, menuBar):
fileMenu = menuBar.addMenu("File")
self.setFileMenuActions(fileMenu)
def setupEditMenu(self, menuBar):
editMenu = menuBar.addMenu("Edit")
def setupOptionsMenu(self, menuBar):
optionsMenu = menuBar.addMenu("Options")
settingsAction = QAction(QIcon(r"res\settings.png"), "Settings", optionsMenu)
settingsAction.setStatusTip("Settings")
settingsAction.triggered.connect(self.showSettings)
optionsMenu.addAction(settingsAction)
def setupHelpMenu(self, menuBar):
helpMenu = menuBar.addMenu("Help")
def setFileMenuActions(self, fileMenu):
exitAction = QAction(QIcon(r"res\exit.png"), "Exit", fileMenu)
exitAction.setShortcut("Ctrl+Q")
exitAction.triggered.connect(self.close)
fileMenu.addAction(exitAction)
def showSettings(self):
settingsView = ConfigWindow(self)
self.delegate.reset()
self.delegate.configView = settingsView
self.centralWidget().changeRightWidget(settingsView)
def showHelp(self):
pass
def receiveMail(self):
self.delegate.reset()
receiverView = ReceiverView()
self.delegate.receiverView = receiverView
receiverModel = ReceiverModel()
receiverModel.delegate = self.delegate
self.delegate.receiverModel = receiverModel
receiverView.delegate = self.delegate
self.centralWidget().changeLeftWidget(receiverView)
def showNewMail(self):
newMailView = SenderWindow()
newMailModel = SenderModel()
self.delegate.reset()
self.delegate.senderView = newMailView
self.delegate.senderModel = newMailModel
newMailView.delegate = self.delegate
newMailModel.delegate = self.delegate
newMailView.set_actions()
self.centralWidget().changeRightWidget(newMailView)
def closeEvent(self, event):
event.ignore()
self.exit()
def resizeEvent(self, event):
self.centralWidget().resizeWidget()
def exit(self):
msg = QMessageBox.question(None, "Exit PyMail", "Do You want to quit")
if msg == QMessageBox.Yes:
self.destroy()
sys.exit() | 33.663551 | 85 | 0.68573 | [
"MIT"
] | LolsonX/PyMail | PyMailMainWindow.py | 3,602 | Python |
from abc import ABC
from types import NoneType
import logictest
import http_connector
from log import log
class TestHttp(logictest.SuiteRunner, ABC):
def __init__(self, kind, pattern):
super().__init__(kind, pattern)
self._http = None
def get_connection(self):
if self._http is None:
self._http = http_connector.HttpConnector()
self._http.connect(**self.driver)
return self._http
def reset_connection(self):
self._http.reset_session()
def batch_execute(self, statement_list):
for statement in statement_list:
self.execute_statement(statement)
self.reset_connection()
def execute_ok(self, statement):
self.get_connection().query_with_session(statement)
return None
def execute_error(self, statement):
resp = self.get_connection().query_with_session(statement)
return http_connector.get_error(resp)
def execute_query(self, statement):
results = self.get_connection().fetch_all(statement.text)
query_type = statement.s_type.query_type
vals = []
for (ri, row) in enumerate(results):
for (i, v) in enumerate(row):
if isinstance(v, NoneType):
vals.append("NULL")
continue
if query_type[i] == 'I':
if not isinstance(v, int):
log.error(
"Expected int, got type {} in query {} row {} col {} value {}"
.format(type(v), statement.text, ri, i, v))
elif query_type[i] == 'F' or query_type[i] == 'R':
if not isinstance(v, float):
log.error(
"Expected float, got type {} in query {} row {} col {} value {}"
.format(type(v), statement.text, ri, i, v))
elif query_type[i] == 'T':
# include data, timestamp, dict, list ...
if not (isinstance(v, str) or isinstance(v, dict) or
isinstance(v, list)):
log.error(
"Expected string, got type {} in query {} row {} col {} value {}"
.format(type(v), statement.text, ri, i, v))
elif query_type[i] == 'B':
if not isinstance(v, bool):
log.error(
"Expected bool, got type {} in query {} row {} col {} value {}"
.format(type(v), statement.text, ri, i, v))
else:
log.error(
"Unknown type {} in query {} row {} col {} value {}".
format(query_type[i], statement.text, ri, i, v))
if isinstance(v, bool):
v = str(v).lower(
) # bool to string in python will be True/False
vals.append(str(v))
return vals
| 39.192308 | 93 | 0.499836 | [
"Apache-2.0"
] | LiuYuHui/databend | tests/logictest/http_runner.py | 3,057 | Python |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-10-14 18:05
from __future__ import unicode_literals
import common.blocks.columns
import common.blocks.tabs
from django.db import migrations, models
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailembeds.blocks
import wagtail.wagtailimages.blocks
class Migration(migrations.Migration):
dependencies = [
('common', '0017_upimagepath'),
]
operations = [
migrations.AlterField(
model_name='custompage',
name='content',
field=wagtail.wagtailcore.fields.StreamField((('appeal', wagtail.wagtailcore.blocks.StructBlock((('icon', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('none', 'none'), ('flask', 'flask'), ('group', 'group'), ('laptop', 'laptop'), ('sitemap', 'sitemap'), ('user', 'user'), ('book', 'book'), ('download', 'download')])), ('topic', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('content', wagtail.wagtailcore.blocks.TextBlock(max_length=255, required=True))), classname='appeal', icon='tick', template='common/blocks/appeal.html')), ('heading', wagtail.wagtailcore.blocks.CharBlock(classname='full title')), ('statement', wagtail.wagtailcore.blocks.CharBlock()), ('paragraph', wagtail.wagtailcore.blocks.RichTextBlock()), ('imagechooser', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('column', common.blocks.columns.RowBlock()), ('tabbed_block', common.blocks.tabs.TabListBlock()), ('image', wagtail.wagtailcore.blocks.StructBlock((('main_image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('style', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('max-width:225px;max-height:145px', 'small display'), ('max_width:250px;max-height:250px', 'middle display'), ('max_width:250px;max-height:250px;padding-top:20px', 'middle + padding display'), ('height:auto', 'auto display')], default='height:auto')), ('url', wagtail.wagtailcore.blocks.CharBlock(max_length=250, required=False))))), ('rich_text', wagtail.wagtailcore.blocks.RichTextBlock()), ('raw_html', wagtail.wagtailcore.blocks.RawHTMLBlock(help_text='With great power comes great responsibility. This HTML is unescaped. Be careful!')), ('people_block', wagtail.wagtailcore.blocks.StructBlock((('displayStyle', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('concise-team', 'concise-team'), ('concise-ambassador', 'concise-ambassador'), ('detailed', 'detailed')], default='concise')), ('tag', wagtail.wagtailcore.blocks.CharBlock(max_length=20))))), ('centered_text', wagtail.wagtailcore.blocks.StructBlock((('text', wagtail.wagtailcore.blocks.RichTextBlock()),))), ('hero_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock(required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))), ('spotlight_block', wagtail.wagtailcore.blocks.StructBlock((('bubbles', wagtail.wagtailcore.blocks.StreamBlock((('bubble_block', wagtail.wagtailcore.blocks.StructBlock((('image', wagtail.wagtailimages.blocks.ImageChooserBlock()), ('title', wagtail.wagtailcore.blocks.CharBlock(max_length=35, required=True)), ('description', wagtail.wagtailcore.blocks.RichTextBlock(required=True))))),))),))), ('job_whole_block', wagtail.wagtailcore.blocks.StructBlock(())), ('embed_block', wagtail.wagtailembeds.blocks.EmbedBlock()), ('whitespaceblock', wagtail.wagtailcore.blocks.StructBlock((('height', wagtail.wagtailcore.blocks.IntegerBlock()),))), ('clear_fixblock', wagtail.wagtailcore.blocks.StructBlock(())), ('code_block', wagtail.wagtailcore.blocks.StructBlock((('language', wagtail.wagtailcore.blocks.ChoiceBlock(choices=[('python', 'python'), ('css', 'css'), ('sql', 'sql'), ('javascript', 'javascript'), ('clike', 'clike'), ('markup', 'markup'), ('java', 'java')], default='python')), ('codes', wagtail.wagtailcore.blocks.TextBlock())))), ('calender_blog', wagtail.wagtailcore.blocks.StructBlock((('source', wagtail.wagtailcore.blocks.CharBlock(help_text='Such as: [email protected]', max_length=255, required=True)),)))), blank=True, null=True),
),
migrations.AlterField(
model_name='upimagepath',
name='upImagePath',
field=models.CharField(default='https://cosio.s3.amazonaws.com/images/up.original.png', help_text='Up image path', max_length=255),
),
]
| 136.59375 | 3,522 | 0.732098 | [
"Apache-2.0"
] | baylee-d/cos.io | common/migrations/0018_auto_20161014_1805.py | 4,371 | Python |
# Copyright 2013 IBM Corp
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# yardstick comment: this is a modified copy of
# ceilometer/ceilometer/dispatcher/http.py
from __future__ import absolute_import
import logging
import os
from oslo_serialization import jsonutils
import requests
from oslo_config import cfg
from yardstick.dispatcher.base import Base as DispatchBase
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
http_dispatcher_opts = [
cfg.StrOpt('target',
default='http://127.0.0.1:8000/results',
help='The target where the http request will be sent. '
'If this is not set, no data will be posted. For '
'example: target = http://hostname:1234/path'),
cfg.IntOpt('timeout',
default=5,
help='The max time in seconds to wait for a request to '
'timeout.'),
]
CONF.register_opts(http_dispatcher_opts, group="dispatcher_http")
class HttpDispatcher(DispatchBase):
"""Dispatcher class for posting data into a http target.
"""
__dispatcher_type__ = "Http"
def __init__(self, conf):
super(HttpDispatcher, self).__init__(conf)
self.headers = {'Content-type': 'application/json'}
self.timeout = CONF.dispatcher_http.timeout
self.target = CONF.dispatcher_http.target
self.raw_result = []
self.result = {
"project_name": "yardstick",
"description": "yardstick test cases result",
"pod_name": os.environ.get('NODE_NAME', 'unknown'),
"installer": os.environ.get('INSTALLER_TYPE', 'unknown'),
"version": os.environ.get('YARDSTICK_VERSION', 'unknown')
}
def record_result_data(self, data):
self.raw_result.append(data)
def flush_result_data(self):
if self.target == '':
# if the target was not set, do not do anything
LOG.error('Dispatcher target was not set, no data will'
'be posted.')
return
self.result["details"] = self.raw_result
case_name = ""
for v in self.raw_result:
if isinstance(v, dict) and "scenario_cfg" in v:
case_name = v["scenario_cfg"]["tc"]
break
if case_name == "":
LOG.error('Test result : %s',
jsonutils.dump_as_bytes(self.result))
LOG.error('The case_name cannot be found, no data will be posted.')
return
self.result["case_name"] = case_name
try:
LOG.debug('Test result : %s',
jsonutils.dump_as_bytes(self.result))
res = requests.post(self.target,
data=jsonutils.dump_as_bytes(self.result),
headers=self.headers,
timeout=self.timeout)
LOG.debug('Test result posting finished with status code'
' %d.' % res.status_code)
except Exception as err:
LOG.exception('Failed to record result data: %s',
err)
| 35.257143 | 79 | 0.603998 | [
"Apache-2.0"
] | kkltcjk/kklt | yardstick/dispatcher/http.py | 3,702 | Python |
#created by Angus Clark on 8/01/2017
# toDo incoperate the saving program into this_dir
import socket
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
host = '130.56.253.43'
print host # remove when done debugging
port = 5201 # edit when port for comm is decided
s.bind((host,port))
f = open('temp.json','wb')
s.listen(5)
while True:
c, addr = s.accept()
while(l):
f.write(l)
l = c.recv(1024)
f.close()
c.close() | 19.956522 | 53 | 0.651416 | [
"BSD-2-Clause"
] | wmizzi/tn2capstone | ServerScript/recievejson(legacy).py | 459 | Python |
import json
import requests
HUE_NUPNP_URL = "https://www.meethue.com/api/nupnp"
class APIException(Exception):
pass
class HueAPI(object):
def __init__(self, username):
self.username = username
self.ip = self.discover_hub_ip()
@property
def base_url(self):
return "http://{}/api/{}".format(self.ip, self.username)
def get_groups(self):
url = "{}/groups".format(self.base_url)
try:
r = requests.get(url)
except:
raise APIException("Failed to send group get GET")
try:
return list(r.json().keys())
except:
raise APIException("Failed to decode group get json response")
def set_group(self, group_id, state):
url = "{}/groups/{}/action".format(self.base_url, group_id)
try:
r = requests.put(url, data=json.dumps({"on": state}))
except:
raise APIException("Failed to send group set PUT")
def set_groups(self, state):
for group in self.get_groups():
self.set_group(group, state)
def discover_hub_ip(self):
try:
r = requests.get(HUE_NUPNP_URL)
except:
raise APIException("Failed to send hub ip GET")
try:
json_resp = r.json()
except:
raise APIException("Failed to decode hub ip json response")
if len(json_resp) > 0:
return [0]['internalipaddress']
else:
raise APIException("Failed to find hub ip")
def _main():
pass
if __name__ == '__main__':
_main()
| 23.910448 | 74 | 0.576779 | [
"MIT"
] | BenDoan/playground | hue/hue_api.py | 1,602 | Python |
# Generated by Django 2.2.1 on 2019-05-21 10:29
from django.db import migrations, models
import uni_ticket.models
class Migration(migrations.Migration):
dependencies = [
('uni_ticket', '0030_auto_20190520_1532'),
]
operations = [
migrations.AlterField(
model_name='ticketcategoryinputlist',
name='input_type',
field=models.CharField(choices=[('CustomSignedP7MField', 'Allegato P7M firmato'), ('CustomFileField', 'Allegato PDF'), ('CustomSignedFileField', 'Allegato PDF'), ('CustomSignedPdfField', 'Allegato PDF firmato'), ('CustomHiddenField', 'Campo nascosto'), ('CheckBoxField', 'Checkbox'), ('BaseDateField', 'Data'), ('BaseDateTimeField', 'Data e Ora'), ('DateStartEndComplexField', 'Data inizio e Data fine'), ('DurataComeInteroField', 'Durata come numero intero (anni,mesi,ore)'), ('CustomComplexTableField', 'Inserimenti multipli'), ('CustomRadioBoxField', 'Lista di opzioni (checkbox)'), ('CustomSelectBoxField', 'Lista di opzioni (tendina)'), ('PositiveFloatField', 'Numero con virgola positivo'), ('PositiveIntegerField', 'Numero intero positivo'), ('ProtocolloField', 'Protocollo (tipo/numero/data)'), ('CustomCharField', 'Testo'), ('TextAreaField', 'Testo lungo')], max_length=33),
),
migrations.AlterField(
model_name='ticketreply',
name='attachment',
field=models.FileField(blank=True, default=None, null=True, upload_to=uni_ticket.models._reply_attachment_upload),
),
]
| 60.72 | 895 | 0.690382 | [
"Apache-2.0"
] | mspasiano/uniTicket | uni_ticket/migrations/0031_auto_20190521_1229.py | 1,518 | Python |
from typing import List, Literal, Union, Callable, Tuple
from dataclasses import dataclass, replace
from .config_types import (
TWInterface,
TWSettingStorage, TWSettingBool, TWSetting,
WrapType, Fields, AnkiModel, LabelText, WhichField, Tags, Falsifiable,
)
ScriptKeys = Literal[
'enabled',
'name',
'version',
'description',
'conditions',
'code',
]
def __list_to_tw_bool(prototype, vals: List[ScriptKeys]):
return replace(
prototype,
**dict([(key, True) for key in vals])
)
def make_interface(
# name for the type of the interface
tag: str,
prototype: WrapType,
getter: Callable[[str, TWSettingStorage], TWSetting],
# result is used for storing,
setter: Callable[[str, TWSetting], Union[bool, TWSetting]],
wrapper: Callable[[str, TWSettingStorage, AnkiModel, Fields, WhichField, slice, Tags], Tuple[Fields, Tags]],
label: Falsifiable(Callable[[str, TWSettingStorage], LabelText]),
reset: Falsifiable(Callable[[str, TWSettingStorage], TWSetting]),
deletable: Falsifiable(Callable[[str, TWSettingStorage], bool]),
# list of values that are readonly,,
readonly: TWSettingBool,
# list of values or stored in `storage` field,
store: TWSettingBool,
) -> TWInterface:
return TWInterface(
tag,
prototype,
getter,
setter,
wrapper,
label,
reset,
deletable,
readonly,
store,
)
| 26.854545 | 112 | 0.654705 | [
"MIT"
] | hgiesel/anki_text_wrapper | src/lib/interface.py | 1,477 | Python |
import pymysql.cursors
import ldap
def get_domain_name():
"""
Returns the domain name of the current configuration from a config file
Returns
-------
string
the domain name
"""
with open("/var/www/logic_webapp/webapp_config") as file:
line = file.readline()
domain = line.split("=")[1].rstrip() # Take right hand side of = and remove \n
return domain
def get_db_password():
with open("/var/www/logic_webapp/webapp_config") as file:
line = file.readlines()[1]
password = line.split("=")[
1
].rstrip() # Take right hand side of = and remove \n
return password
def create_slurm_db_connection(host, port, user, password, db):
"""
Creates the connection to the database (MySQL) so it can be queried
Parameters
----------
host : string
hostname on which is located the DB
port : integer
port on which the connection is to be established
user : string
user name with which the connection is to be established
password : string
password of the user on the database (of the user `user`)
db : string
name of the database which will be queried
Returns
-------
PyMySQL Connection object
"""
connection = pymysql.connect(
host=host, port=port, user=user, password=password, db=db,
)
print("[+] Slurm accounting DB connection is up! [+]")
return connection
def create_ldap_connection(host):
"""
Creates an LDAP connection object with a given hostname
Parameters
----------
host : hostname with the LDAP database in the form of (ldap://host)
Returns
-------
LDAP connection object
"""
connection = ldap.initialize(host)
connection.set_option(ldap.OPT_REFERRALS, 0)
connection.simple_bind_s()
return connection
| 25.608108 | 87 | 0.626385 | [
"MIT"
] | Quoding/petricore | webapp/external_access.py | 1,895 | Python |
"""Form definitions, allow easy validation of input and rendering of forms
"""
# future imports
from __future__ import absolute_import
# local imports
from app.forms.pages.base import PageForm
class AboutPageForm(PageForm):
pass
| 18.230769 | 74 | 0.780591 | [
"Apache-2.0"
] | mjmcconnell/sra | src-server/app/forms/pages/about.py | 237 | Python |
from typing import Union, Iterator, Tuple
from ...symbols import NOUN, PROPN, PRON
from ...errors import Errors
from ...tokens import Doc, Span
def noun_chunks(doclike: Union[Doc, Span]) -> Iterator[Tuple[int, int, int]]:
"""
Detect base noun phrases from a dependency parse. Works on both Doc and Span.
"""
labels = [
"oprd",
"nsubj",
"dobj",
"nsubjpass",
"pcomp",
"pobj",
"dative",
"appos",
"attr",
"ROOT",
]
doc = doclike.doc # Ensure works on both Doc and Span.
if not doc.has_annotation("DEP"):
raise ValueError(Errors.E029)
np_deps = [doc.vocab.strings.add(label) for label in labels]
conj = doc.vocab.strings.add("conj")
np_label = doc.vocab.strings.add("NP")
prev_end = -1
for i, word in enumerate(doclike):
if word.pos not in (NOUN, PROPN, PRON):
continue
# Prevent nested chunks from being produced
if word.left_edge.i <= prev_end:
continue
if word.dep in np_deps:
prev_end = word.i
yield word.left_edge.i, word.i + 1, np_label
elif word.dep == conj:
head = word.head
while head.dep == conj and head.head.i < head.i:
head = head.head
# If the head is an NP, and we're coordinated to it, we're an NP
if head.dep in np_deps:
prev_end = word.i
yield word.left_edge.i, word.i + 1, np_label
SYNTAX_ITERATORS = {"noun_chunks": noun_chunks}
| 30.784314 | 81 | 0.56879 | [
"Apache-2.0",
"MIT"
] | Alan-love/spaCy | spacy/lang/en/syntax_iterators.py | 1,570 | Python |
"""Permissions for the client app"""
from rest_framework import permissions
class ClientPermissions(permissions.BasePermission):
"""Handles authorization of requests to the client app."""
def has_permission(self, request, view):
if view.action == 'create' \
and request.user.has_perm('client.add_client'):
return True
if view.action in ['update', 'partial_update'] \
and request.user.has_perm('client.change_client'):
return True
if view.action in ['list', 'retrieve'] \
and request.user.has_perm('client.view_client'):
return True
| 34.315789 | 66 | 0.628834 | [
"MIT"
] | My-Garage/resourceideaapi | client/permissions.py | 652 | Python |
import os
import discord
import youtube_dl as ytdl
class MusicPlayer:
'''
This module is responsible for connecting and disconnecting the bot from a voice channel, downloading songs from
youtube and add them in the queue . Basic music functions like pause, resume, stop and play, in order to give
users a simple music bot based on the new api of discord.
'''
def __init__(self):
self.queue = []
self.voiceChannel = None
self.ydl_opts = {
'format': 'bestaudio/best',
# 'quiet' : True,
'outtmpl': 'songs/%(title)s-%(id)s.%(ext)s',
'postprocessors': [{
'key': 'FFmpegExtractAudio',
'preferredcodec': 'mp3',
'preferredquality': '192',
}],
}
async def connect(self, channel):
'''
Connects bot to the given voice channel. If it is not already connected.
:param channel: The channel from which the user send the command
'''
if self.voiceChannel is None or not self.voiceChannel.is_connected():
self.voiceChannel = await channel.connect()
async def disconnect(self):
'''
Disconnects from the channel that the bot is already connected. If there is no such a channel,
this function will simply do nothing
'''
if self.voiceChannel is not None and self.voiceChannel.is_connected():
await self.voiceChannel.disconnect()
def getNextSong(self):
'''
If the queue is not empty this function will remove the first song from the queue and return it
:return: the next song of the queue, or None if the queue is empty
'''
if self.queue:
return self.queue.pop(0)
else:
return None
def clear_folder(self):
'''
Because the songs will be downloaded, it is important to delete them if there are not longer needed.
This function deletes the songs that are not in the queue (not one of the upcoming songs)
'''
for song in os.listdir("songs/"):
if "songs/" + song not in self.queue:
os.remove("songs/" + song)
async def add_song(self, url, ctx):
'''
Add a new song from the youtube in the queue. It will not be downloaded if it is already in the songs file
:param url: The url of the youtube song
:param ctx: The channel from which the user send the command
'''
with ytdl.YoutubeDL(self.ydl_opts) as ydl:
info_dict = ydl.extract_info(url, download=False)
title = "songs/" + info_dict['title'] + "-" + info_dict['id'] + ".mp3"
if title not in self.queue:
await ctx.send("Your song is downloading now!")
ydl.extract_info(url, download=True)
self.queue.append(title)
if self.voiceChannel is None or not self.voiceChannel.is_connected() or not self.voiceChannel.is_playing():
await ctx.send("Your song has added to the queue, use $play to start the party!!")
else:
await ctx.send("Your song has added to the queue")
def load_next_song(self):
'''
This will create a FFMPEG object and start playing it in the voice channel
'''
if not self.voiceChannel.is_playing() and self.queue:
audio_source = discord.FFmpegPCMAudio(self.getNextSong())
# TODO: make the bot play the next song after the previous one has ended
self.voiceChannel.play(audio_source, after=None)
async def pause_song(self, ctx):
'''
Pauses a song that is already being played or send a message if there is no such song
:param ctx: The channel from which the user gave the command.
'''
if self.voiceChannel is not None and self.voiceChannel.is_connected() and self.voiceChannel.is_playing():
self.voiceChannel.pause()
else:
await ctx.send("There is no song playing in order to pause it")
async def resume_song(self, ctx):
'''
Resumes a song if there is one that has been paused or send a message if there is no such song
:param ctx: The channel from which the user gave the command.
'''
if self.voiceChannel is not None and self.voiceChannel.is_connected() and self.voiceChannel.is_paused():
self.voiceChannel.resume()
else:
await ctx.send("There is no song paused in order to resume it")
async def stop(self, ctx):
'''
Stops the music if there is music or sends message if there is not. At the end clears the file of
the unnecessary songs.
:param ctx: The channel from which the user gave the command.
'''
if self.voiceChannel is not None and self.voiceChannel.is_connected() and self.voiceChannel.is_playing():
self.voiceChannel.stop()
else:
await ctx.send("There is no song playing in order to stop it")
self.clear_folder()
async def next(self, ctx):
'''
Stops this song and start the next one. The user will be informed with message if there is no other song or if
there is no song playing at the moment
:param ctx: The channel from which the user gave the command.
'''
if self.voiceChannel is not None and self.voiceChannel.is_connected() and self.voiceChannel.is_playing() \
and self.queue:
await self.stop(ctx)
self.load_next_song()
elif not self.queue:
await ctx.send("There is no other song in the queue")
else:
await ctx.send("There is no song playing, maybe use $play to start playing songs from the queue")
async def play(self, ctx, channel):
'''
Starts playing the first song in the queue. If there are not songs in the queue or there is some music playing
at this moment the user will ne informed with messages
:param ctx: The channel from which the user gave the command.
'''
await self.connect(channel)
if self.voiceChannel is not None and self.voiceChannel.is_connected() and not self.voiceChannel.is_playing()\
and self.queue:
self.load_next_song()
elif not self.queue:
await ctx.send("There is no song in the list")
elif self.voiceChannel.is_playing():
await ctx.send("THere is already some music playing. Increase the volume and join the party!")
| 43.117647 | 120 | 0.621949 | [
"MIT"
] | mavroudo/jarvis-discord | modules/youtube_music.py | 6,597 | Python |
from typing import Any, Dict, Iterable, Optional, Tuple
from datastore.shared.util import DeletedModelsBehaviour
from ....models.checker import Checker, CheckException
from ....models.models import Organization
from ....shared.exceptions import ActionException
from ....shared.filters import FilterOperator
from ....shared.interfaces.event import EventType
from ....shared.interfaces.write_request import WriteRequest
from ....shared.patterns import Collection, FullQualifiedId
from ....shared.util import INITIAL_DATA_FILE, get_initial_data_file
from ...action import Action
from ...mixins.singular_action_mixin import SingularActionMixin
from ...util.action_type import ActionType
from ...util.default_schema import DefaultSchema
from ...util.register import register_action
from ...util.typing import ActionData, ActionResults
@register_action("organization.initial_import", action_type=ActionType.STACK_INTERNAL)
class OrganizationInitialImport(SingularActionMixin, Action):
"""
Action to import an initial-data.json in an empty datastore.
Should be callable from the management service.
"""
model = Organization()
schema = DefaultSchema(Organization()).get_default_schema(
additional_required_fields={"data": {"type": "object"}},
title="Import initial data.",
description="Import an initial data json in an empty datastore.",
)
def perform(
self, action_data: ActionData, user_id: int, internal: bool = False
) -> Tuple[Optional[WriteRequest], Optional[ActionResults]]:
"""
Simplified entrypoint to perform the action.
"""
self.user_id = user_id
self.index = 0
instance = next(iter(action_data))
self.validate_instance(instance)
instance = self.update_instance(instance)
self.write_requests.extend(self.create_write_requests(instance))
final_write_request = self.process_write_requests()
return (final_write_request, [None])
def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
data = instance["data"]
self.check_empty_datastore()
if not data:
data = get_initial_data_file(INITIAL_DATA_FILE)
instance["data"] = data
# check datavalidation
checker = Checker(data=data, mode="all")
try:
checker.run_check()
except CheckException as ce:
raise ActionException(str(ce))
return instance
def check_empty_datastore(self) -> None:
filter_ = FilterOperator("id", ">=", 1)
if self.datastore.exists(
Collection("organization"),
filter_,
DeletedModelsBehaviour.ALL_MODELS,
False,
):
raise ActionException("Datastore is not empty.")
def create_write_requests(self, instance: Dict[str, Any]) -> Iterable[WriteRequest]:
json_data = instance["data"]
write_requests = []
for collection in json_data:
for entry in json_data[collection].values():
fqid = FullQualifiedId(Collection(collection), entry["id"])
write_requests.append(
self.build_write_request(
EventType.Create,
fqid,
"initial import",
entry,
)
)
return write_requests
| 36.978495 | 88 | 0.655714 | [
"MIT"
] | JLkp/openslides-backend | openslides_backend/action/actions/organization/initial_import.py | 3,439 | Python |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkecs.endpoint import endpoint_data
class ModifyLaunchTemplateDefaultVersionRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ecs', '2014-05-26', 'ModifyLaunchTemplateDefaultVersion')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_LaunchTemplateName(self):
return self.get_query_params().get('LaunchTemplateName')
def set_LaunchTemplateName(self,LaunchTemplateName):
self.add_query_param('LaunchTemplateName',LaunchTemplateName)
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_LaunchTemplateId(self):
return self.get_query_params().get('LaunchTemplateId')
def set_LaunchTemplateId(self,LaunchTemplateId):
self.add_query_param('LaunchTemplateId',LaunchTemplateId)
def get_ResourceOwnerAccount(self):
return self.get_query_params().get('ResourceOwnerAccount')
def set_ResourceOwnerAccount(self,ResourceOwnerAccount):
self.add_query_param('ResourceOwnerAccount',ResourceOwnerAccount)
def get_OwnerAccount(self):
return self.get_query_params().get('OwnerAccount')
def set_OwnerAccount(self,OwnerAccount):
self.add_query_param('OwnerAccount',OwnerAccount)
def get_OwnerId(self):
return self.get_query_params().get('OwnerId')
def set_OwnerId(self,OwnerId):
self.add_query_param('OwnerId',OwnerId)
def get_DefaultVersionNumber(self):
return self.get_query_params().get('DefaultVersionNumber')
def set_DefaultVersionNumber(self,DefaultVersionNumber):
self.add_query_param('DefaultVersionNumber',DefaultVersionNumber) | 36.918919 | 87 | 0.786603 | [
"Apache-2.0"
] | hetw/aliyun-openapi-python-sdk | aliyun-python-sdk-ecs/aliyunsdkecs/request/v20140526/ModifyLaunchTemplateDefaultVersionRequest.py | 2,732 | Python |
"""
@brief test log(time=20s)
"""
import sys
import os
import unittest
from pyquickhelper.loghelper import fLOG, run_cmd
from pyquickhelper.pycode import get_temp_folder, fix_tkinter_issues_virtualenv, skipif_appveyor, skipif_travis
from pyquickhelper.pycode import add_missing_development_version
class TestPyData2016Animation(unittest.TestCase):
@skipif_appveyor("no ffmpeg installed")
@skipif_travis("issue with datashader.bokeh_ext, skipping")
@skipif_appveyor("issue with pyproj")
def test_matplotlib_example(self):
fLOG(
__file__,
self._testMethodName,
OutputPrint=__name__ == "__main__")
progs = ["ffmpeg"]
if not sys.platform.startswith("win"):
progs.append("avconv")
errs = []
prog = None
for prog in progs:
out, err = run_cmd(prog, wait=True, fLOG=fLOG)
exps = "usage:"
if (exps not in out and exps not in err) or err is None or len(err) == 0:
errs.append((prog, err))
else:
break
if len(errs) >= len(progs):
if sys.platform.startswith("win"):
fLOG("download ffmpeg")
add_missing_development_version(
["pyensae"], __file__, hide=True)
from pyensae.datasource import download_data
download_data("ffmpeg.zip", website="xd")
else:
raise FileNotFoundError(
"Unable to find '{1}'.\nPATH='{0}'\n--------\n[OUT]\n{2}\n[ERR]\n{3}".format(
os.environ["PATH"], prog, out,
"\n----\n".join("{0}:\n{1}".format(*_) for _ in errs)))
temp = get_temp_folder(__file__, "temp_example_example")
fix_tkinter_issues_virtualenv()
# update a distribution based on new data.
import numpy as np
import matplotlib.pyplot as plt
import scipy.stats as ss
from matplotlib.animation import FuncAnimation, writers
# To get the list of available writers
if not writers.is_available(prog):
writers.register(prog)
fLOG(writers.list())
class UpdateDist:
def __init__(self, ax, prob=0.5):
self.success = 0
self.prob = prob
self.line, = ax.plot([], [], 'k-')
self.x = np.linspace(0, 1, 200)
self.ax = ax
# Set up plot parameters
self.ax.set_xlim(0, 1)
self.ax.set_ylim(0, 15)
self.ax.grid(True)
# This vertical line represents the theoretical value, to
# which the plotted distribution should converge.
self.ax.axvline(prob, linestyle='--', color='black')
def init(self):
self.success = 0
self.line.set_data([], [])
return self.line,
def __call__(self, i):
# This way the plot can continuously run and we just keep
# watching new realizations of the process
if i == 0:
return self.init()
# Choose success based on exceed a threshold with a uniform
# pick
if np.random.rand(1,) < self.prob: # pylint: disable=W0143
self.success += 1
y = ss.beta.pdf(self.x, self.success + 1,
(i - self.success) + 1)
self.line.set_data(self.x, y)
return self.line,
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
ud = UpdateDist(ax, prob=0.7)
anim = FuncAnimation(fig, ud, frames=np.arange(100), init_func=ud.init,
interval=100, blit=True)
try:
Writer = writers[prog]
except KeyError as e:
if prog == "avconv":
from matplotlib.animation import AVConvWriter
Writer = AVConvWriter
else:
raise e
writer = Writer(fps=15, metadata=dict(artist='Me'), bitrate=1800)
anim.save(os.path.join(temp, 'lines2.mp4'), writer=writer)
plt.close('all')
fLOG("end")
if __name__ == "__main__":
unittest.main()
| 34.816 | 111 | 0.535156 | [
"MIT"
] | sdpython/jupytalk | _unittests/ut_talk_examples/test_pydata2016_animation.py | 4,352 | Python |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.login_root, name='login_root'),
url(r'^success/$', views.login_success, name='login_success'),
url(r'^fail/$', views.login_fail, name='login_fail'),
]
| 25.1 | 66 | 0.673307 | [
"MIT"
] | omiguelperez/DjangoLoginBDD | bdd_example/login/urls.py | 251 | Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Scaleway IP management module
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: scaleway_ip
short_description: Scaleway IP management module
author: Remy Leone (@remyleone)
description:
- This module manages IP on Scaleway account
U(https://developer.scaleway.com)
extends_documentation_fragment:
- community.general.scaleway
options:
state:
type: str
description:
- Indicate desired state of the IP.
default: present
choices:
- present
- absent
organization:
type: str
description:
- Scaleway organization identifier
required: true
region:
type: str
description:
- Scaleway region to use (for example par1).
required: true
choices:
- ams1
- EMEA-NL-EVS
- par1
- EMEA-FR-PAR1
- par2
- EMEA-FR-PAR2
- waw1
- EMEA-PL-WAW1
id:
type: str
description:
- id of the Scaleway IP (UUID)
server:
type: str
description:
- id of the server you want to attach an IP to.
- To unattach an IP don't specify this option
reverse:
type: str
description:
- Reverse to assign to the IP
'''
EXAMPLES = '''
- name: Create an IP
community.general.scaleway_ip:
organization: '{{ scw_org }}'
state: present
region: par1
register: ip_creation_task
- name: Make sure IP deleted
community.general.scaleway_ip:
id: '{{ ip_creation_task.scaleway_ip.id }}'
state: absent
region: par1
'''
RETURN = '''
data:
description: This is only present when C(state=present)
returned: when C(state=present)
type: dict
sample: {
"ips": [
{
"organization": "951df375-e094-4d26-97c1-ba548eeb9c42",
"reverse": null,
"id": "dd9e8df6-6775-4863-b517-e0b0ee3d7477",
"server": {
"id": "3f1568ca-b1a2-4e98-b6f7-31a0588157f1",
"name": "ansible_tuto-1"
},
"address": "212.47.232.136"
}
]
}
'''
from ansible_collections.community.general.plugins.module_utils.scaleway import SCALEWAY_LOCATION, scaleway_argument_spec, Scaleway
from ansible.module_utils.basic import AnsibleModule
def ip_attributes_should_be_changed(api, target_ip, wished_ip):
patch_payload = {}
if target_ip["reverse"] != wished_ip["reverse"]:
patch_payload["reverse"] = wished_ip["reverse"]
# IP is assigned to a server
if target_ip["server"] is None and wished_ip["server"]:
patch_payload["server"] = wished_ip["server"]
# IP is unassigned to a server
try:
if target_ip["server"]["id"] and wished_ip["server"] is None:
patch_payload["server"] = wished_ip["server"]
except (TypeError, KeyError):
pass
# IP is migrated between 2 different servers
try:
if target_ip["server"]["id"] != wished_ip["server"]:
patch_payload["server"] = wished_ip["server"]
except (TypeError, KeyError):
pass
return patch_payload
def payload_from_wished_ip(wished_ip):
return dict(
(k, v)
for k, v in wished_ip.items()
if k != 'id' and v is not None
)
def present_strategy(api, wished_ip):
changed = False
response = api.get('ips')
if not response.ok:
api.module.fail_json(msg='Error getting IPs [{0}: {1}]'.format(
response.status_code, response.json['message']))
ips_list = response.json["ips"]
ip_lookup = dict((ip["id"], ip)
for ip in ips_list)
if wished_ip["id"] not in ip_lookup.keys():
changed = True
if api.module.check_mode:
return changed, {"status": "An IP would be created."}
# Create IP
creation_response = api.post('/ips',
data=payload_from_wished_ip(wished_ip))
if not creation_response.ok:
msg = "Error during ip creation: %s: '%s' (%s)" % (creation_response.info['msg'],
creation_response.json['message'],
creation_response.json)
api.module.fail_json(msg=msg)
return changed, creation_response.json["ip"]
target_ip = ip_lookup[wished_ip["id"]]
patch_payload = ip_attributes_should_be_changed(api=api, target_ip=target_ip, wished_ip=wished_ip)
if not patch_payload:
return changed, target_ip
changed = True
if api.module.check_mode:
return changed, {"status": "IP attributes would be changed."}
ip_patch_response = api.patch(path="ips/%s" % target_ip["id"],
data=patch_payload)
if not ip_patch_response.ok:
api.module.fail_json(msg='Error during IP attributes update: [{0}: {1}]'.format(
ip_patch_response.status_code, ip_patch_response.json['message']))
return changed, ip_patch_response.json["ip"]
def absent_strategy(api, wished_ip):
response = api.get('ips')
changed = False
status_code = response.status_code
ips_json = response.json
ips_list = ips_json["ips"]
if not response.ok:
api.module.fail_json(msg='Error getting IPs [{0}: {1}]'.format(
status_code, response.json['message']))
ip_lookup = dict((ip["id"], ip)
for ip in ips_list)
if wished_ip["id"] not in ip_lookup.keys():
return changed, {}
changed = True
if api.module.check_mode:
return changed, {"status": "IP would be destroyed"}
response = api.delete('/ips/' + wished_ip["id"])
if not response.ok:
api.module.fail_json(msg='Error deleting IP [{0}: {1}]'.format(
response.status_code, response.json))
return changed, response.json
def core(module):
wished_ip = {
"organization": module.params['organization'],
"reverse": module.params["reverse"],
"id": module.params["id"],
"server": module.params["server"]
}
region = module.params["region"]
module.params['api_url'] = SCALEWAY_LOCATION[region]["api_endpoint"]
api = Scaleway(module=module)
if module.params["state"] == "absent":
changed, summary = absent_strategy(api=api, wished_ip=wished_ip)
else:
changed, summary = present_strategy(api=api, wished_ip=wished_ip)
module.exit_json(changed=changed, scaleway_ip=summary)
def main():
argument_spec = scaleway_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['absent', 'present']),
organization=dict(required=True),
server=dict(),
reverse=dict(),
region=dict(required=True, choices=list(SCALEWAY_LOCATION.keys())),
id=dict()
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
core(module)
if __name__ == '__main__':
main()
| 27.178707 | 131 | 0.615837 | [
"MIT"
] | elixir-no-nels/usegalaxy | venv/lib/python3.6/site-packages/ansible_collections/community/general/plugins/modules/cloud/scaleway/scaleway_ip.py | 7,148 | Python |
"""
# Interaction Tracker
# @license http://www.apache.org/licenses/LICENSE-2.0
# Author @ Jamil Hussain, Zaki
"""
from django.conf import settings
from django.contrib.auth.models import (
BaseUserManager, AbstractBaseUser
)
from django.core.validators import RegexValidator
from django.db import models
from django.db.models.signals import post_save
# Create your models here.
from .utils import code_generator
USERNAME_REGEX = '^[a-zA-Z0-9.+-]*$'
class MyUserManager(BaseUserManager):
def create_user(self, username, email, password=None):
"""
Creates and saves a User with the given email, date of
birth and password.
"""
if not email:
raise ValueError('Users must have an email address')
user = self.model(
username = username,
email=self.normalize_email(email),
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, username, email, password):
"""
Creates and saves a superuser with the given email, date of
birth and password.
"""
user = self.create_user(
username,
email,
password=password,
)
user.is_admin = True
user.is_staff = True
user.save(using=self._db)
return user
#The data should be collected on the bases of user ID. This Model store information about the user
class MyUser(AbstractBaseUser):
username = models.CharField(
max_length=255,
validators=[
RegexValidator(
regex = USERNAME_REGEX,
message = 'Username must be Alpahnumeric or contain any of the following: ". @ + -" ',
code='invalid_username'
)],
unique=True,
)
email = models.EmailField(
verbose_name='email address',
max_length=255,
unique=True,
)
GENDER_CHOICES = (
('male', 'Male'),
('female', 'Female'),
)
password = models.CharField(max_length=255)
date_of_birth= models.DateField(blank=True,null=True)
gender= models.CharField(max_length=50, null=True, choices=GENDER_CHOICES)
height= models.IntegerField(blank=True,null=True)
weight=models.IntegerField(blank=True,null=True)
user_sight = models.CharField(max_length=50, null=True)
user_hearing = models.CharField(max_length=50, null=True)
user_touch = models.CharField(max_length=50, null=True)
is_active = models.BooleanField(default=True)
is_staff = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
objects = MyUserManager()
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = ['email']
def get_full_name(self):
# The user is identified by their email address
return self.email
def get_short_name(self):
# The user is identified by their email address
return self.email
def __str__(self): # __unicode__ on Python 2
return self.email
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True
class ActivationProfile(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL)
key = models.CharField(max_length=120)
expired = models.BooleanField(default=False)
def save(self, *args, **kwargs):
self.key = code_generator()
super(ActivationProfile, self).save(*args, **kwargs)
def post_save_activation_receiver(sender, instance, created, *args, **kwargs):
if created:
#send email
print('activation created')
post_save.connect(post_save_activation_receiver, sender=ActivationProfile)
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL)
city = models.CharField(max_length=120, null=True, blank=True)
def __str__(self):
return str(self.user.username)
def __unicode__(self):
return str(self.user.username)
def post_save_user_model_receiver(sender, instance, created, *args, **kwargs):
if created:
try:
Profile.objects.create(user=instance)
ActivationProfile.objects.create(user=instance)
except:
pass
post_save.connect(post_save_user_model_receiver, sender=settings.AUTH_USER_MODEL)
| 29.099379 | 110 | 0.646105 | [
"Apache-2.0"
] | Ahmar123/Lean-UX-Platform | InteractionTracker/accounts/models.py | 4,685 | Python |
from __future__ import unicode_literals
import os
import inspect
from webob import Request, Response
from parse import parse
from jinja2 import FileSystemLoader, Environment
from requests import session as RequestsSession
from wsgiadapter import WSGIAdapter as RequestWSGIAdapter
class API:
"""
this is just for examples
to get reponse Hello World in web browser
"""
def __call__(self, environ, start_response):
response_body = b"Hello World!"
status = "200 OK"
start_response(status, headers=[])
return iter([response_body])
class RequestAPI:
"""
this is just for examples
to get reponse Hello World in web browser
"""
def __call__(self, environ, start_response):
request = Request(environ)
response = Response()
response.text = "Hello, World!"
return response(environ, start_response)
class UserRequest:
def __call__(self, environ, start_response):
request = Request(environ)
response = self.handle_request(request)
return response(environ, start_response)
def handle_request(self, request):
"""
based on mozilla documentation
"""
user = request.environ.get("HTTP_USER_AGENT", "No User Agent Found")
response = Response()
response.text = f"This is {user}"
return response
class UserRequestHandler:
def __init__(self):
self.routes = {}
def __call__(self, environ, start_response):
request = Request(environ)
response = self.handle_request(request)
return response(environ, start_response)
def route_url(self, path, handler):
assert path not in self.routes, "Routes Already Exists"
self.routes[path] = handler
def route(self, path):
def wrapper(handler):
self.route_url(path, handler)
return handler
return wrapper
def default_response(self, response):
response.status_code = 404
response.text = "Page Not Found."
def handle_request(self, request):
response = Response()
handler, kwargs = self.find_handler(request_path=request.path)
if handler is not None:
handler(request, response, **kwargs)
else:
self.default_response(response)
return response
def find_handler(self, request_path):
for path, handler in self.routes.items():
parse_result = parse(path, request_path)
if parse_result is not None:
return handler, parse_result.named
return None, None
class UserRequestBasedHandler:
"""
class for implemented alternative
route using class-based handlers
"""
def __init__(self, templates_dirs="templates"):
self.routes = {}
self.templates_env = Environment(
loader=FileSystemLoader(os.path.abspath(templates_dirs)))
def __call__(self, environ, start_response):
request = Request(environ)
response = self.class_based_request(request)
return response(environ, start_response)
def url(self, path, handler):
assert path not in self.routes, "Routes Already Exists"
self.routes[path] = handler
def route(self, path):
def wrapper(handler):
self.url(path, handler)
return handler
return wrapper
def default_response(self, response):
response.status_code = 404
response.text = "Page Not Found"
def class_based_request(self, request):
"""
class based views such as Django
already implemented
"""
response = Response()
handler, kwargs = self.find_handler_request(request_path=request.path)
if handler is not None:
if inspect.isclass(handler):
handler = getattr(handler(), request.method.lower(), None)
if handler is None:
raise AttributeError("Method now allowed", request.method)
handler(request, response, **kwargs)
else:
self.default_response(response)
return response
def find_handler_request(self, request_path):
for path, handler in self.routes.items():
parse_result = parse(path, request_path)
if parse_result is not None:
return handler, parse_result.named
return None, None
def template(self, template_name, context=None):
if context is None:
context = {}
return self.templates_env.get_template(template_name).render(**context)
def session(self, base_url="http://baseserver"):
"""
mount it to session object
any request will start using URL given
by prefix base_url
"""
session = RequestsSession()
session.mount(prefix=base_url, adapter=RequestWSGIAdapter(self))
return session
| 25.218274 | 79 | 0.633655 | [
"BSD-3-Clause"
] | sodrooome/diy | diy/api.py | 4,968 | Python |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ConnectionMonitorQueryResult(Model):
"""List of connection states snaphots.
:param source_status: Status of connection monitor source. Possible values
include: 'Uknown', 'Active', 'Inactive'
:type source_status: str or
~azure.mgmt.network.v2018_10_01.models.ConnectionMonitorSourceStatus
:param states: Information about connection states.
:type states:
list[~azure.mgmt.network.v2018_10_01.models.ConnectionStateSnapshot]
"""
_attribute_map = {
'source_status': {'key': 'sourceStatus', 'type': 'str'},
'states': {'key': 'states', 'type': '[ConnectionStateSnapshot]'},
}
def __init__(self, **kwargs):
super(ConnectionMonitorQueryResult, self).__init__(**kwargs)
self.source_status = kwargs.get('source_status', None)
self.states = kwargs.get('states', None)
| 38.138889 | 78 | 0.633649 | [
"MIT"
] | acured/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2018_10_01/models/connection_monitor_query_result.py | 1,373 | Python |
#!/usr/bin/env python
import unittest
from tests.base import PyangBindTestCase
class EnumerationTests(PyangBindTestCase):
yang_files = ["enumeration.yang"]
def setUp(self):
self.enum_obj = self.bindings.enumeration()
def test_container_has_all_leafs(self):
for leaf in ["e", "f"]:
with self.subTest(leaf=leaf):
self.assertTrue(
hasattr(self.enum_obj.container, leaf), "Container does not contain enumeration %s" % leaf
)
def test_assign_to_enum(self):
self.enum_obj.container.e = "one"
self.assertEqual(
self.enum_obj.container.e,
"one",
"Enumeration value was not correctly set (%s)" % self.enum_obj.container.e,
)
def test_enum_does_not_allow_invalid_value(self):
allowed = True
try:
self.enum_obj.container.e = "twentyseven"
except ValueError:
allowed = False
self.assertFalse(
allowed, "Erroneous value was not caught by restriction handler (%s)" % self.enum_obj.container.e
)
def test_enum_default_value(self):
self.assertEqual(
self.enum_obj.container.f._default,
"c",
"Erroneous default value for 'f' (%s)" % self.enum_obj.container.f._default,
)
def test_static_enum_value(self):
self.enum_obj.container.e = "two"
self.assertEqual(
self.enum_obj.container.e.getValue(mapped=True),
42,
"Erroneously statically defined value returned (%s)" % self.enum_obj.container.e.getValue(mapped=True),
)
if __name__ == "__main__":
unittest.main()
| 30.070175 | 115 | 0.608518 | [
"Apache-2.0"
] | JoseIgnacioTamayo/pyangbind | tests/enumeration/run.py | 1,714 | Python |
import os
from flask_script import Manager
from flask_migrate import MigrateCommand
from App import create_app
env = os.environ.get("flask_env", "develop")
app = create_app(env)
manager = Manager(app)
manager.add_command("db", MigrateCommand)
if __name__ == '__main__':
manager.run()
| 16.388889 | 44 | 0.755932 | [
"MIT"
] | jonathan-hxj/FlaskTpp | manager.py | 295 | Python |
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import unittest
import numpy as np
from enum import Enum
import paddle
import paddle.static
map_np_dtype_to_fluid_dtype = {
'bool': "bool",
'int8': "int8",
'uint8': "uint8",
"int32": "int32",
"int64": "int64",
"float16": "float16",
"float32": "float32",
"float64": "float64",
}
class ExecutionMode(Enum):
CPU_FP32 = 1
IPU_FP32 = 2
# enable_fp16 through ipu_strategy.enable_fp16
IPU_POPART_FP16 = 3
def __lt__(self, other):
return self.value < other.value
def __gt__(self, other):
return self.value > other.value
def np_dtype_to_fluid_str(dtype: np.dtype) -> str:
return map_np_dtype_to_fluid_dtype[dtype.name]
class IPUOpTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Get random seeds
cls._np_rand_state = np.random.get_state()
cls._py_rand_state = random.getstate()
cls.SEED = 2021
np.random.seed(cls.SEED)
random.seed(cls.SEED)
# Enable paddle static graph mode
paddle.enable_static()
@classmethod
def tearDownClass(cls):
"""Restore random seeds"""
np.random.set_state(cls._np_rand_state)
random.setstate(cls._py_rand_state)
@classmethod
def use_ipumodel(cls):
if 'POPLAR_IPUMODEL' not in os.environ:
return False
else:
flag = os.environ['POPLAR_IPUMODEL']
if flag.upper() in ['1', "TRUE"]:
return True
def set_atol(self):
self.atol = 1e-10
self.rtol = 1e-6
self.atol_fp16 = 1e-3
self.rtol_fp16 = 1e-3
def set_training(self):
self.is_training = False
self.epoch = 1
def check(self, outputs, check_shape=False):
cpu_fp32 = outputs[ExecutionMode.CPU_FP32]
ipu_fp32 = outputs[ExecutionMode.IPU_FP32]
max_diff = np.abs(cpu_fp32 - ipu_fp32).max()
fp32_flag = np.allclose(
cpu_fp32, ipu_fp32, rtol=self.rtol, atol=self.atol)
self.assertTrue(fp32_flag, "max diff is %f" % (max_diff))
if check_shape:
self.assertTrue(cpu_fp32.shape == ipu_fp32.shape)
ipu_popart_fp16 = None
if ExecutionMode.IPU_POPART_FP16 in outputs.keys():
ipu_popart_fp16 = outputs[ExecutionMode.IPU_POPART_FP16]
max_diff = np.abs(ipu_popart_fp16.astype(np.float32) -
cpu_fp32).max()
fp16_flag = np.allclose(
ipu_popart_fp16.astype(np.float32),
cpu_fp32,
rtol=self.rtol_fp16,
atol=self.atol_fp16)
self.assertTrue(fp16_flag, "max diff is %f" % (max_diff))
if check_shape:
self.assertTrue(ipu_popart_fp16.shape == cpu_fp32.shape)
| 29.376068 | 74 | 0.635438 | [
"Apache-2.0"
] | Abraham-Xu/Paddle | python/paddle/fluid/tests/unittests/ipu/op_test_ipu.py | 3,437 | Python |
# Copyright 2021 Canonical Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for zaza.events.plugins.conncheck.py."""
import mock
import subprocess
import unit_tests.utils as tests_utils
import zaza.events.plugins.conncheck as conncheck
class TestAutoConfigureFunction(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.patch_object(conncheck, 'logger', name='mock_logger')
self.patch_object(
conncheck, 'get_plugin_manager', name='mock_get_plugin_manager')
self.mock_collection = mock.Mock()
self.mock_conncheck_manager = mock.Mock()
def test_autoconfigure_no_config(self):
self.mock_get_plugin_manager.return_value = self.mock_conncheck_manager
conncheck.auto_configure_with_collection(self.mock_collection)
self.mock_get_plugin_manager.assert_called_once_with('DEFAULT')
self.mock_collection.add_logging_manager.assert_called_once_with(
self.mock_conncheck_manager)
def test_autoconfigure_with_config(self):
self.mock_get_plugin_manager.return_value = self.mock_conncheck_manager
config = {
'manager-name': 'a-manager',
'source': 'a-source',
}
conncheck.auto_configure_with_collection(self.mock_collection,
config=config)
self.mock_get_plugin_manager.assert_called_once_with('a-manager')
self.mock_collection.add_logging_manager.assert_called_once_with(
self.mock_conncheck_manager)
self.mock_conncheck_manager.configure.assert_called_once_with(
module_source='a-source')
class TestGetConncheckManager(tests_utils.BaseTestCase):
def test_get_conncheck_manager(self):
self.patch_object(conncheck, 'get_option', name='mock_get_option')
self.mock_get_option.return_value = 'a-name'
self.patch_object(conncheck, 'get_plugin_manager',
name='mock_get_plugin_manager')
self.mock_get_plugin_manager.return_value = 'a-manager'
self.assertEqual(conncheck.get_conncheck_manager(), 'a-manager')
self.mock_get_option.assert_called_once_with(
'zaza-events.modules.conncheck.manager-name', 'DEFAULT')
self.mock_get_plugin_manager.assert_called_once_with('a-name')
class TestGetPluginManager(tests_utils.BaseTestCase):
def test_get_plugin_manager(self):
self.patch_object(conncheck, '_conncheck_plugin_managers', new={})
self.patch_object(conncheck, 'ConnCheckPluginManager',
name='mock_ConnCheckPluginManager')
self.mock_ConnCheckPluginManager.return_value = 'a-manager'
self.assertEqual(conncheck.get_plugin_manager(), 'a-manager')
self.mock_ConnCheckPluginManager.assert_called_once_with(
managed_name='DEFAULT')
def test_get_plugin_manager_non_default(self):
self.patch_object(conncheck, '_conncheck_plugin_managers', new={})
self.patch_object(conncheck, 'ConnCheckPluginManager',
name='mock_ConnCheckPluginManager')
self.mock_ConnCheckPluginManager.return_value = 'a-manager'
self.assertEqual(conncheck.get_plugin_manager('a-name'), 'a-manager')
self.mock_ConnCheckPluginManager.assert_called_once_with(
managed_name='a-name')
def test_get_plugin_manager_check_caches(self):
self.patch_object(conncheck, '_conncheck_plugin_managers', new={},
name='mock__conncheck_plugin_managers')
self.mock__conncheck_plugin_managers['a-name'] = 'a-manager'
self.patch_object(conncheck, 'ConnCheckPluginManager',
name='mock_ConnCheckPluginManager')
self.mock_ConnCheckPluginManager.return_value = 'the-manager'
self.assertEqual(conncheck.get_plugin_manager('a-name'), 'a-manager')
self.mock_ConnCheckPluginManager.assert_not_called()
class TestConnCheckPluginManager(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.patch_object(conncheck, 'ConnCheckManager',
name='mock_ConnCheckManager')
self.mock_conncheck_manager = mock.Mock()
self.mock_ConnCheckManager.return_value = self.mock_conncheck_manager
self.mock_collection_object = mock.Mock()
self.mock_collection_object.logs_dir = "a-logs-dir"
self.mock_collection_object.log_format = conncheck.LogFormats.InfluxDB
self.mock_collection_object.collection = 'a-collection'
def test_init(self):
cpm = conncheck.ConnCheckPluginManager()
self.assertEqual(cpm.managed_name, 'DEFAULT')
self.assertEqual(cpm._conncheck_manager, self.mock_conncheck_manager)
cpm = conncheck.ConnCheckPluginManager(managed_name='a-manager')
self.assertEqual(cpm.managed_name, 'a-manager')
def test_configure(self):
cpm = conncheck.ConnCheckPluginManager()
self.patch_object(
cpm, 'configure_plugin', name='mock_cpm_configure_plugin')
cpm.configure(collection_object=self.mock_collection_object)
self.mock_cpm_configure_plugin.assert_called_once_with()
def test_configure_plugin(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.configure(collection_object=self.mock_collection_object)
self.mock_conncheck_manager.configure.assert_called_once_with(
collection='a-collection',
logs_dir='a-logs-dir',
module_source='a-source',
tags='abc')
def test_manager_property(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
self.assertEqual(cpm.manager, self.mock_conncheck_manager)
cpm._conncheck_manager = None
with self.assertRaises(AssertionError):
cpm.manager
def test_add_instance(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.add_instance('a-spec', this='that')
self.mock_conncheck_manager.add_instance.assert_called_once_with(
'a-spec', this='that')
def test_get_instance(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
self.mock_conncheck_manager.get_instance.return_value = 'an-instance'
self.assertEqual(cpm.get_instance('a-spec'), 'an-instance')
self.mock_conncheck_manager.get_instance.assert_called_once_with(
'a-spec')
def test_start(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.start('a-spec')
self.mock_conncheck_manager.start.assert_called_once_with('a-spec')
def test_stop(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.stop('a-spec')
self.mock_conncheck_manager.stop.assert_called_once_with('a-spec')
def test_finalise(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.finalise()
self.mock_conncheck_manager.finalise.assert_called_once_with()
def test_log_files(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.log_files()
self.mock_conncheck_manager.log_files.assert_called_once_with()
def test_clean_up(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.clean_up()
self.mock_conncheck_manager.clean_up.assert_called_once_with()
def test_reset(self):
cpm = conncheck.ConnCheckPluginManager(
module_source='a-source', tags='abc')
cpm.reset()
self.mock_conncheck_manager.clean_up.assert_called_once_with()
self.assertIsNone(cpm._conncheck_manager)
class TestConnCheckManager(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.c = conncheck.ConnCheckManager(
collection='a-collection',
logs_dir='/some/dir',
tags=['tag1'])
def test_init(self):
self.assertEqual(self.c.collection, 'a-collection')
self.assertEqual(self.c.logs_dir, '/some/dir')
self.assertEqual(self.c.tags, ['tag1'])
def test_add_instance(self):
self.patch_object(self.c, 'make_instance_with',
name='mock_make_instance_with')
self.mock_make_instance_with.return_value = 'an-instance'
self.c.add_instance('juju:0', this='that', some='thing')
self.mock_make_instance_with.assert_called_once_with(
'juju:0', this='that', some='thing', module_source='conncheck',
collection='a-collection')
self.assertIn('juju:0', self.c._instances)
self.assertEqual(self.c._instances['juju:0'], 'an-instance')
# add again to check for error
with self.assertRaises(RuntimeError):
self.c.add_instance('juju:0', this='that', some='thing')
def test_get_instance(self):
self.c._instances['juju:0'] = 'an-instance'
self.assertEqual(self.c.get_instance('juju:0'), 'an-instance')
def test_start(self):
mock_instance1 = mock.Mock()
mock_instance2 = mock.Mock()
self.c._instances = {'i1': mock_instance1,
'i2': mock_instance2}
self.c.start('i1')
mock_instance1.start.assert_called_once_with()
mock_instance2.start.assert_not_called()
mock_instance1.reset_mock()
self.c.start()
mock_instance1.start.assert_called_once_with()
mock_instance2.start.assert_called_once_with()
def test_stop(self):
mock_instance1 = mock.Mock()
mock_instance2 = mock.Mock()
self.c._instances = {'i1': mock_instance1,
'i2': mock_instance2}
self.c.stop('i1')
mock_instance1.stop.assert_called_once_with()
mock_instance2.stop.assert_not_called()
mock_instance1.reset_mock()
self.c.stop()
mock_instance1.stop.assert_called_once_with()
mock_instance2.stop.assert_called_once_with()
def test_finalise(self):
mock_instance1 = mock.Mock()
mock_instance2 = mock.Mock()
self.c._instances = {'i1': mock_instance1,
'i2': mock_instance2}
self.c.finalise()
mock_instance1.finalise.assert_called_once_with()
mock_instance2.finalise.assert_called_once_with()
mock_instance1.stop.assert_called_once_with()
mock_instance2.stop.assert_called_once_with()
mock_instance1.reset_mock()
mock_instance2.reset_mock()
self.c.finalise()
mock_instance1.stop.assert_not_called()
mock_instance2.stop.assert_not_called()
mock_instance1.finalise.assert_not_called()
mock_instance2.finalise.assert_not_called()
def test_log_files(self):
mock_instance1 = mock.Mock()
mock_instance2 = mock.Mock()
self.c._instances = {'i1': mock_instance1,
'i2': mock_instance2}
mock_instance1.get_logfile_to_local.return_value = 'i1.log'
mock_instance1.log_format = 'f'
mock_instance2.get_logfile_to_local.return_value = 'i2.log'
mock_instance2.log_format = 'f'
log_specs = list(self.c.log_files())
mock_instance1.finalise.assert_called_once_with()
mock_instance2.finalise.assert_called_once_with()
mock_instance1.get_logfile_to_local.assert_called_once_with(
'/some/dir')
mock_instance2.get_logfile_to_local.assert_called_once_with(
'/some/dir')
self.assertEqual(
log_specs,
[('i1', 'f', 'i1.log'),
('i2', 'f', 'i2.log')])
mock_instance1.get_logfile_to_local.reset_mock()
mock_instance2.get_logfile_to_local.reset_mock()
log_specs = list(self.c.log_files())
mock_instance1.get_logfile_to_local.assert_not_called()
mock_instance2.get_logfile_to_local.assert_not_called()
self.assertEqual(
log_specs,
[('i1', 'f', 'i1.log'),
('i2', 'f', 'i2.log')])
def test_clean_up(self):
self.patch_object(self.c, 'finalise', name='mock_finalise')
self.c.clean_up()
self.mock_finalise.assert_called_once_with()
def test_register_spec_handler(self):
self.patch_object(conncheck.ConnCheckManager,
'_spec_handlers',
name='mock_cls__spec_handlers',
new={})
def handler():
pass
conncheck.ConnCheckManager.register_spec_handler('juju', handler)
self.assertIn('juju', conncheck.ConnCheckManager._spec_handlers)
self.assertEqual(conncheck.ConnCheckManager._spec_handlers['juju'],
handler)
# verify can't be added twice.
with self.assertRaises(RuntimeError):
conncheck.ConnCheckManager.register_spec_handler('juju', handler)
def test_make_instance_with(self):
mock_handler = mock.Mock()
mock_handler.return_value = 'an-instance'
self.patch_object(conncheck.ConnCheckManager,
'_spec_handlers',
name='mock_cls__spec_handlers',
new={})
conncheck.ConnCheckManager.register_spec_handler('juju', mock_handler)
# first check for ':' in spec
with self.assertRaises(ValueError):
self.c.make_instance_with('i')
# Now check for unhandled spec
with self.assertRaises(KeyError):
self.c.make_instance_with('some:thing')
# finally make one with juju
self.assertEqual(
self.c.make_instance_with('juju:0', this='that', some='thing'),
'an-instance')
mock_handler.assert_called_once_with('0', this='that', some='thing')
class TestConnCheckInstanceBase(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.c = conncheck.ConnCheckInstanceBase(
name='base',
module_source='/some/source',
collection='a-collection')
def test_init(self):
c = conncheck.ConnCheckInstanceBase(
name='a-name',
log_format=conncheck.LogFormats.CSV,
config_file='thing.yaml',
install_dir='/opt',
module_source='/some/other/source',
install_user='a-user')
self.assertEqual(c.name, 'a-name')
self.assertEqual(c.log_format, conncheck.LogFormats.CSV)
self.assertEqual(c.config_file, 'thing.yaml')
self.assertEqual(c.install_dir, '/opt')
self.assertEqual(c.module_source, '/some/other/source')
self.assertEqual(c.install_user, 'a-user')
self.assertEqual(self.c.name, 'base')
self.assertEqual(self.c.log_format, conncheck.LogFormats.InfluxDB)
self.assertEqual(self.c.config_file, 'config.yaml')
self.assertEqual(self.c.install_dir, '.')
self.assertEqual(self.c.module_source, '/some/source')
self.assertEqual(self.c.install_user, 'conncheck')
def test__validate_not_existing_listener(self):
with self.assertRaises(AssertionError):
self.c._validate_not_existing_listener('thing', 1024)
self.c._validate_not_existing_listener('udp', 1024)
self.c._listeners = {('udp', 1024): None}
with self.assertRaises(RuntimeError):
self.c._validate_not_existing_listener('udp', 1024)
self.c._validate_not_existing_listener('udp', 1023)
def test_add_listener(self):
with self.assertRaises(NotImplementedError):
self.c.add_listener()
def test_add_listener_spec(self):
self.patch_object(self.c, 'write_configuration',
name='mock_c_write_configuration')
self.c.add_listener_spec('udp', 1024, '0.0.0.0', reply_size=50)
self.assertIn(('udp', 1024), self.c._listeners)
self.assertEqual(self.c._listeners[('udp', 1024)],
{'name': 'base:listen:udp:0.0.0.0:1024',
'ipv4': '0.0.0.0',
'port': 1024,
'protocol': 'udp',
'reply-size': 50})
self.mock_c_write_configuration.assert_called_once_with()
def test_add_speaker(self):
self.patch_object(self.c, '_get_remote_address',
name='mock__get_remote_address')
self.mock__get_remote_address.return_value = '1.2.3.4'
self.patch_object(self.c, 'add_speaker_spec',
name='mock_add_speaker_spec')
self.c.add_speaker('udp', 1024, instance='an-instance', address=None,
wait=10, interval=20, send_size=5)
self.mock__get_remote_address.assert_called_once_with(
'an-instance', 'udp', 1024)
self.mock_add_speaker_spec.assert_called_once_with(
'udp', 1024, '1.2.3.4', wait=10, interval=20, send_size=5)
def test__validate_not_existing_speaker(self):
with self.assertRaises(AssertionError):
self.c._validate_not_existing_speaker('thing', '1.2.3.4', 1024)
self.c._validate_not_existing_speaker('udp', '1.2.3.4', 1024)
self.c._speakers = {('udp', '1.2.3.4', 1024): None}
with self.assertRaises(RuntimeError):
self.c._validate_not_existing_speaker('udp', '1.2.3.4', 1024)
self.c._validate_not_existing_speaker('udp', '1.2.3.4', 1023)
def test_add_speaker_spec(self):
self.patch_object(self.c, 'write_configuration',
name='mock_c_write_configuration')
self.c.add_speaker_spec('udp', 1024, '1.2.3.4', send_size=50)
self.assertIn(('udp', '1.2.3.4', 1024), self.c._speakers)
self.assertEqual(self.c._speakers[('udp', '1.2.3.4', 1024)],
{'name': 'base:send:udp:1.2.3.4:1024',
'ipv4': '1.2.3.4',
'port': 1024,
'protocol': 'udp',
'send-size': 50,
'wait': 5,
'interval': 10})
self.mock_c_write_configuration.assert_called_once_with()
self.mock_c_write_configuration.reset_mock()
self.c.add_speaker_spec('http', 1024, '1.2.3.4', send_size=50)
self.assertIn(('http', '1.2.3.4', 1024), self.c._speakers)
self.assertEqual(self.c._speakers[('http', '1.2.3.4', 1024)],
{'name': 'base:request:http:1.2.3.4:1024',
'url': 'http://1.2.3.4:1024/{uuid}',
'protocol': 'http',
'wait': 5,
'interval': 10})
self.mock_c_write_configuration.assert_called_once_with()
self.mock_c_write_configuration.reset_mock()
with self.assertRaises(AssertionError):
self.c.add_speaker_spec('thing', 1024, '1.2.3.4', send_size=50)
def test__get_remote_address(self):
mock_instance = mock.Mock()
mock_instance._listeners = {('udp', 1024): {'ipv4': '1.2.3.4'}}
self.assertEqual(
self.c._get_remote_address(mock_instance, 'udp', 1024), '1.2.3.4')
def test__conncheck_home_dir(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.assertEqual(self.c._conncheck_home_dir, '/some/dir')
self.mock_user_directory.assert_called_once_with(
None, 'conncheck')
self.mock_user_directory.reset_mock()
# check property caches
self.assertEqual(self.c._conncheck_home_dir, '/some/dir')
self.mock_user_directory.assert_not_called()
def test_install_no_user_relative_homedir(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.patch('zaza.utilities.installers.user_exists',
name='mock_user_exists')
self.patch('zaza.utilities.installers.create_user',
name='mock_create_user')
self.mock_create_user.return_value = '/home/conncheck'
self.patch('zaza.utilities.installers.install_module_in_venv',
name='mock_install_module_in_venv')
self.patch('zaza.utilities.installers.SystemdControl',
name='mock_SystemdControl')
mock__systemd = mock.Mock()
self.mock_SystemdControl.return_value = mock__systemd
self.c._ssh_fn = 'ssh-fn'
self.c._scp_fn = 'scp-fn'
self.mock_user_exists.return_value = False
self.c.install()
self.mock_user_exists.assert_called_once_with('ssh-fn', 'conncheck')
self.mock_create_user.assert_called_once_with('ssh-fn', 'conncheck')
self.mock_install_module_in_venv.assert_called_once_with(
'/some/source', '/home/conncheck/.', 'scp-fn', 'ssh-fn',
run_user='conncheck')
mock__systemd.install.assert_called_once_with()
self.assertTrue(self.c._installed)
def test_install_user_exists_absolute_homedir(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.patch('zaza.utilities.installers.user_exists',
name='mock_user_exists')
self.patch('zaza.utilities.installers.create_user',
name='mock_create_user')
self.mock_create_user.return_value = '/home/conncheck'
self.patch('zaza.utilities.installers.install_module_in_venv',
name='mock_install_module_in_venv')
self.patch('zaza.utilities.installers.SystemdControl',
name='mock_SystemdControl')
mock__systemd = mock.Mock()
self.mock_SystemdControl.return_value = mock__systemd
self.c._ssh_fn = 'ssh-fn'
self.c._scp_fn = 'scp-fn'
self.mock_user_exists.return_value = True
self.c.install_dir = '/fixed'
self.c.install()
self.mock_user_exists.assert_called_once_with('ssh-fn', 'conncheck')
self.mock_create_user.assert_not_called()
self.mock_install_module_in_venv.assert_called_once_with(
'/some/source', '/fixed', 'scp-fn', 'ssh-fn',
run_user='conncheck')
mock__systemd.install.assert_called_once_with()
self.assertTrue(self.c._installed)
def test__verify_systemd_not_none(self):
self.c._systemd = 'thing'
self.c._verify_systemd_not_none()
self.c._systemd = None
with self.assertRaises(AssertionError):
self.c._verify_systemd_not_none()
def test_remote_log_filename_property(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.assertEqual(self.c.remote_log_filename, '/some/dir/conncheck.log')
def test_local_log_filename_property(self):
with self.assertRaises(NotImplementedError):
self.c.local_log_filename
def test_get_logfile_to_local(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
mock_scp_fn = mock.Mock()
self.c._scp_fn = mock_scp_fn
with mock.patch.object(
conncheck.ConnCheckInstanceBase, 'local_log_filename',
new_callable=mock.PropertyMock) as mock_local_log_filename:
mock_local_log_filename.return_value = 'some-filename'
self.assertEqual(self.c.get_logfile_to_local('/a/dir'),
'/a/dir/some-filename')
mock_scp_fn.assert_called_once_with('/some/dir/conncheck.log',
'/a/dir/some-filename',
copy_from=True)
def test_write_configuration_not_installed_not_running(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.patch_object(self.c, 'install', name='mock_c_install')
self.patch_object(self.c, 'is_running', name='mock_c_is_running')
self.mock_c_is_running.return_value = False
self.patch_object(self.c, 'restart', name='mock_c_restart')
mock_scp_fn = mock.Mock()
self.c._scp_fn = mock_scp_fn
mock_ssh_fn = mock.Mock()
self.c._ssh_fn = mock_ssh_fn
self.patch('yaml.dump', name='mock_yaml_dump')
self.patch('tempfile.TemporaryDirectory',
name='mock_TemporaryDirectory')
mock_td = mock.MagicMock()
mock_td.__enter__.return_value = '/target'
self.mock_TemporaryDirectory.return_value = mock_td
with tests_utils.patch_open() as (mock_open, mock_file):
self.c.write_configuration()
self.mock_c_install.assert_called_once_with()
mock_open.assert_called_once_with('/target/config.yaml', 'wt')
expected_config = {
'name': 'base',
'file-log-path': '/some/dir/conncheck.log',
'collection': 'a-collection',
'log-format': 'InfluxDB',
'listeners': [],
'speakers': []
}
self.mock_yaml_dump.assert_called_once_with(expected_config, mock_file)
mock_scp_fn.assert_called_once_with('/target/config.yaml',
'config.yaml')
mock_ssh_fn.assert_called_once_with(
['sudo', 'mv', 'config.yaml', '/some/dir/config.yaml'])
self.mock_c_is_running.assert_called_once_with()
self.mock_c_restart.assert_not_called()
def test_write_configuration_installed_and_running(self):
self.patch('zaza.utilities.installers.user_directory',
name='mock_user_directory')
self.mock_user_directory.return_value = '/some/dir'
self.patch_object(self.c, 'install', name='mock_c_install')
self.patch_object(self.c, 'is_running', name='mock_c_is_running')
self.mock_c_is_running.return_value = True
self.patch_object(self.c, 'restart', name='mock_c_restart')
mock_scp_fn = mock.Mock()
self.c._scp_fn = mock_scp_fn
mock_ssh_fn = mock.Mock()
self.c._ssh_fn = mock_ssh_fn
self.patch('yaml.dump', name='mock_yaml_dump')
self.patch('tempfile.TemporaryDirectory',
name='mock_TemporaryDirectory')
mock_td = mock.MagicMock()
mock_td.__enter__.return_value = '/target'
self.mock_TemporaryDirectory.return_value = mock_td
self.c._installed = True
with tests_utils.patch_open() as (mock_open, mock_file):
self.c.write_configuration()
self.mock_c_install.assert_not_called()
mock_open.assert_called_once_with('/target/config.yaml', 'wt')
expected_config = {
'name': 'base',
'file-log-path': '/some/dir/conncheck.log',
'collection': 'a-collection',
'log-format': 'InfluxDB',
'listeners': [],
'speakers': []
}
self.mock_yaml_dump.assert_called_once_with(expected_config, mock_file)
mock_scp_fn.assert_called_once_with('/target/config.yaml',
'config.yaml')
mock_ssh_fn.assert_called_once_with(
['sudo', 'mv', 'config.yaml', '/some/dir/config.yaml'])
self.mock_c_is_running.assert_called_once_with()
self.mock_c_restart.assert_called_once_with()
def test_is_running(self):
self.patch_object(self.c, '_verify_systemd_not_none',
name='mock__verify_systemd_not_none')
mock__systemd = mock.Mock()
mock__systemd.is_running.return_value = False
self.c._systemd = mock__systemd
self.assertFalse(self.c.is_running())
self.mock__verify_systemd_not_none.assert_called_once_with()
mock__systemd.is_running.assert_called_once_with()
def test_start(self):
self.patch_object(self.c, '_verify_systemd_not_none',
name='mock__verify_systemd_not_none')
mock__systemd = mock.Mock()
self.c._systemd = mock__systemd
self.c.start()
self.mock__verify_systemd_not_none.assert_called_once_with()
mock__systemd.start.assert_called_once_with()
def test_stop(self):
self.patch_object(conncheck, 'logger', name='mock_logger')
self.c._systemd = None
self.c.stop()
self.mock_logger.debug.assert_called_once_with(mock.ANY, self.c)
mock__systemd = mock.Mock()
self.c._systemd = mock__systemd
self.mock_logger.reset_mock()
self.c.stop()
mock__systemd.stop.assert_called_once_with()
def test_restart(self):
self.patch_object(self.c, '_verify_systemd_not_none',
name='mock__verify_systemd_not_none')
mock__systemd = mock.Mock()
self.c._systemd = mock__systemd
self.c.restart()
self.mock__verify_systemd_not_none.assert_called_once_with()
mock__systemd.restart.assert_called_once_with()
def test_finalise(self):
self.c._installed = False
mock__systemd = mock.Mock()
self.c._systemd = mock__systemd
self.patch_object(self.c, 'stop', name='mock_c_stop')
self.c.finalise()
self.mock_c_stop.assert_not_called()
mock__systemd.disable.assert_not_called()
self.c._installed = True
self.c.finalise()
self.mock_c_stop.assert_called_once_with()
mock__systemd.disable.assert_called_once_with()
def test_clean_up(self):
self.c._installed = False
mock__systemd = mock.Mock()
self.c._systemd = mock__systemd
self.patch_object(self.c, 'stop', name='mock_c_stop')
self.c.clean_up()
self.mock_c_stop.assert_not_called()
mock__systemd.disable.assert_not_called()
mock__systemd.remove.assert_not_called()
self.c._installed = True
self.c.clean_up()
self.mock_c_stop.assert_called_once_with()
mock__systemd.disable.assert_called_once_with()
mock__systemd.remove.assert_called_once_with()
class TestConnCheckInstanceJuju(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.patch('zaza.utilities.installers.make_juju_ssh_fn',
name='mock_make_juju_ssh_fn')
self.mock_ssh_fn = mock.Mock()
self.mock_make_juju_ssh_fn = self.mock_ssh_fn
self.patch('zaza.utilities.installers.make_juju_scp_fn',
name='mock_make_juju_scp_fn')
self.mock_scp_fn = mock.Mock()
self.mock_make_juju_scp_fn = self.mock_scp_fn
self.c = conncheck.ConnCheckInstanceJuju(
'0',
model='some-model',
user='a-user',
module_source='/some/source',
collection='a-collection')
def test_init(self):
c = conncheck.ConnCheckInstanceJuju(
'0/lxd/15',
log_format=conncheck.LogFormats.CSV,
config_file='thing.yaml',
install_dir='/opt',
module_source='/some/other/source',
install_user='a-user')
self.assertEqual(c.machine_or_unit_spec, '0/lxd/15')
self.assertEqual(c.name, '0/lxd/15')
self.assertEqual(c.log_format, conncheck.LogFormats.CSV)
self.assertEqual(c.config_file, 'thing.yaml')
self.assertEqual(c.install_dir, '/opt')
self.assertEqual(c.module_source, '/some/other/source')
self.assertEqual(c.install_user, 'a-user')
self.assertEqual(self.c.machine_or_unit_spec, '0')
self.assertEqual(self.c.name, '0')
self.assertEqual(self.c.log_format, conncheck.LogFormats.InfluxDB)
self.assertEqual(self.c.config_file, 'config.yaml')
self.assertEqual(self.c.install_dir, '.')
self.assertEqual(self.c.module_source, '/some/source')
self.assertEqual(self.c.install_user, 'conncheck')
def test_local_log_filename(self):
self.assertEqual(self.c.local_log_filename, '0.log')
self.c.machine_or_unit_spec = '0/lxd/15'
self.assertEqual(self.c.local_log_filename, '0_lxd_15.log')
def test__validate_spec(self):
MACHINE = self.c.JujuTypes.MACHINE
UNIT = self.c.JujuTypes.UNIT
valid_specs = (('0', MACHINE),
('9', MACHINE),
('15', MACHINE),
('0/lxd/10', MACHINE),
('1/LXD/4', MACHINE),
('some-unit-0/14', UNIT),
('other/23', UNIT))
invalid_specs = ('b', '1/spec/2', 'other-unit', 'd/10/10')
for spec, type_ in valid_specs:
self.c.machine_or_unit_spec = spec
self.c._validate_spec()
self.assertEqual(self.c._juju_type, type_)
for spec in invalid_specs:
self.c.machine_or_unit_spec = spec
with self.assertRaises(ValueError):
self.c._validate_spec()
def test_add_listener(self):
self.patch_object(self.c, '_validate_not_existing_listener',
name='mock__validate_not_existing_listener')
self.patch_object(self.c, '_get_address', name='mock__get_address')
self.mock__get_address.return_value = '1.2.3.4'
self.patch_object(self.c, 'add_listener_spec',
name='mock_add_listener_spec')
self.c.add_listener('udp', 1024, space='default', cidr='cidr')
self.mock__validate_not_existing_listener.assert_called_once_with(
'udp', 1024)
self.mock__get_address('default', 'cidr')
self.mock_add_listener_spec.assert_called_once_with(
'udp', 1024, '1.2.3.4', reply_size=1024)
def test__get_address(self):
self.patch_object(self.c, '_get_address_unit',
name='mock__get_address_unit')
self.mock__get_address_unit.return_value = '1.2.3.4'
self.patch_object(self.c, '_get_address_machine',
name='mock__get_address_machine')
self.mock__get_address_machine.return_value = '5.6.7.8'
self.c._juju_type = self.c.JujuTypes.UNIT
self.assertEqual(self.c._get_address(None, 'cidr'), '1.2.3.4')
self.mock__get_address_unit.assert_called_once_with(
'juju-info', 'cidr')
self.mock__get_address_unit.reset_mock()
self.c._juju_type = self.c.JujuTypes.MACHINE
self.assertEqual(self.c._get_address(None, 'cidr'), '5.6.7.8')
self.mock__get_address_machine.assert_called_once_with('cidr')
self.c._juju_type = None
with self.assertRaises(RuntimeError):
self.c._get_address(None, 'cidr')
def test__get_address_unit_single_address(self):
self.patch('subprocess.check_output', name='mock_check_output')
self.patch_object(conncheck, 'logger', name='mock_logger')
self.patch('yaml.safe_load', name='mock_yaml_safe_load')
self.mock_check_output.return_value = b'1.2.3.4'
self.mock_yaml_safe_load.return_value = '1.2.3.4\n'
self.assertEqual(self.c._get_address_unit('a-space', 'a-cidr'),
'1.2.3.4')
self.mock_check_output.assert_called_once_with(
['juju', 'run', '-u', '0', '--', 'network-get', '--format',
'yaml', '--bind-address', 'a-space'])
self.mock_yaml_safe_load.assert_called_once_with('1.2.3.4')
def test__get_address_unit_multiple_address(self):
self.patch('subprocess.check_output', name='mock_check_output')
self.patch_object(conncheck, 'logger', name='mock_logger')
self.patch('yaml.safe_load', name='mock_yaml_safe_load')
self.mock_check_output.return_value = b'1.2.3.4'
self.mock_yaml_safe_load.return_value = ['1.2.3.4', '5.6.7.8']
with self.assertRaises(NotImplementedError):
self.c._get_address_unit('a-space', 'a-cidr')
def test__get_address_unit_network_get_fails(self):
self.patch('subprocess.check_output', name='mock_check_output')
self.patch_object(conncheck, 'logger', name='mock_logger')
self.patch('yaml.safe_load', name='mock_yaml_safe_load')
self.mock_check_output.return_value = b'1.2.3.4'
def raise_(*args):
raise subprocess.CalledProcessError(cmd='bang', returncode=1)
self.mock_check_output.side_effect = raise_
with self.assertRaises(subprocess.CalledProcessError):
self.c._get_address_unit('a-space', 'a-cidr')
def test__get_address_machine(self):
with self.assertRaises(NotImplementedError):
self.c._get_address_machine()
class TestConnCheckInstanceSSH(tests_utils.BaseTestCase):
def setUp(self):
super().setUp()
self.patch('zaza.utilities.installers.make_ssh_fn',
name='mock_make_ssh_fn')
self.mock_ssh_fn = mock.Mock()
self.mock_make_ssh_fn = self.mock_ssh_fn
self.patch('zaza.utilities.installers.make_scp_fn',
name='mock_make_scp_fn')
self.mock_scp_fn = mock.Mock()
self.mock_make_scp_fn = self.mock_scp_fn
self.c = conncheck.ConnCheckInstanceSSH(
address='1.2.3.4',
key_file='a-file',
user='a-user',
module_source='/some/source',
collection='a-collection')
def test_init(self):
c = conncheck.ConnCheckInstanceSSH(
'5.6.7.8',
'my-key-file',
log_format=conncheck.LogFormats.CSV,
config_file='thing.yaml',
install_dir='/opt',
module_source='/some/other/source',
install_user='a-user')
self.assertEqual(c.address, '5.6.7.8')
self.assertEqual(c.key_file, 'my-key-file')
self.assertEqual(c.name, '5.6.7.8')
self.assertEqual(c.log_format, conncheck.LogFormats.CSV)
self.assertEqual(c.config_file, 'thing.yaml')
self.assertEqual(c.install_dir, '/opt')
self.assertEqual(c.module_source, '/some/other/source')
self.assertEqual(c.install_user, 'a-user')
self.assertEqual(self.c.address, '1.2.3.4')
self.assertEqual(self.c.key_file, 'a-file')
self.assertEqual(self.c.name, '1.2.3.4')
self.assertEqual(self.c.log_format, conncheck.LogFormats.InfluxDB)
self.assertEqual(self.c.config_file, 'config.yaml')
self.assertEqual(self.c.install_dir, '.')
self.assertEqual(self.c.module_source, '/some/source')
self.assertEqual(self.c.install_user, 'conncheck')
def test_local_log_filename(self):
self.c.address = '[email protected]'
self.assertEqual(self.c.local_log_filename, 'user_1-2-3-4.log')
def test_add_listener(self):
self.patch_object(self.c, '_validate_not_existing_listener',
name='mock__validate_not_existing_listener')
self.patch_object(self.c, 'add_listener_spec',
name='mock_add_listener_spec')
self.c.add_listener('udp', 1024)
self.mock__validate_not_existing_listener.assert_called_once_with(
'udp', 1024)
self.mock_add_listener_spec.assert_called_once_with(
'udp', 1024, '0.0.0.0', reply_size=1024)
| 43.203191 | 79 | 0.641698 | [
"ECL-2.0",
"Apache-2.0"
] | Basdbruijne/zaza | unit_tests/events/plugins/test_zaza_events_plugins_conncheck.py | 40,611 | Python |
stages = ['''
+---+
| |
O |
/|\ |
/ \ |
|
=========
''', '''
+---+
| |
O |
/|\ |
/ |
|
=========
''', '''
+---+
| |
O |
/|\ |
|
|
=========
''', '''
+---+
| |
O |
/| |
|
|
=========
''', '''
+---+
| |
O |
| |
|
|
=========
''', '''
+---+
| |
O |
|
|
|
=========
''', '''
+---+
| |
|
|
|
|
=========
''']
logo = '''
_
| |
| |__ __ _ _ __ __ _ _ __ ___ __ _ _ __
| '_ \ / _` | '_ \ / _` | '_ ` _ \ / _` | '_ \
| | | | (_| | | | | (_| | | | | | | (_| | | | |
|_| |_|\__,_|_| |_|\__, |_| |_| |_|\__,_|_| |_|
__/ |
|___/
''' | 12.911765 | 47 | 0.083144 | [
"MIT"
] | iliescua/Hangman | hangman_art.py | 878 | Python |
import argparse
import numpy as np
from sklearn.metrics import accuracy_score, jaccard_score, balanced_accuracy_score
import matplotlib.pyplot as plt
import seaborn as sns
import pandas as pd
import dataloader
import track
from classifiers import ObservationsConditionsClassifier
from classifiers import ClassifierComposition
np.seterr(all='ignore')
class_set = 9
n_pca_components = 20
def train():
global parsed_args
test_sequence = 'Mix'
measurement_costs = [0.1*i for i in range(0,15)]
measurement_costs.extend([0.01*i for i in range(1, 15)])
loader = dataloader.DataLoaderSpectrogram()
features = [f'PC_{i}' for i in range(n_pca_components)]
classifiers = [
(ObservationsConditionsClassifier(features, discriminant_model='calibrated_Gaussian', n_angle_bins=8), 'Conditioned on $\phi$', 'Isotonic calibration'),
(ObservationsConditionsClassifier(features, discriminant_model='Gaussian', n_angle_bins=8), 'Conditioned on $\phi$','Uncalibrated'),
(ClassifierComposition(features, discriminant_model='Gaussian'), 'Not conditioned on $\phi$', 'Uncalibrated'),
(ClassifierComposition(features, discriminant_model='calibrated_Gaussian'), 'Not conditioned on $\phi$', 'Isotonic calibration'),
]
rows = []
for cost in measurement_costs:
for i_model, (classifier, observation_condition, discriminant_model) in enumerate(classifiers):
if parsed_args.rebuild:
track.state_estimation(load_directory = './data/dataset/RD')
dataset_path = r'C:\Users\peter\Documents\pulseON'
loader = dataloader.DataLoaderSpectrogram()
loader.build(dataset_path,'PCA')
else:
loader.load('./data/dataset_df')
result_df = evaluate_classifier(classifier, loader.df, test_persons = loader.df.person.unique(), test_sequence = test_sequence, measurement_cost = cost)
predictions = result_df.loc[result_df['sequence_type'] == test_sequence]['prediction'].to_numpy()
lables = result_df.loc[result_df['sequence_type'] == test_sequence]['lable'].to_numpy()
accuracy = accuracy_score(lables, predictions)
rows.append({
'Accuracy': accuracy, 'Balanced accuracy': balanced_accuracy_score(lables, predictions), 'Macro-averaged Jaccard index': jaccard_score(lables, predictions, average='macro'),
'Observation conditions': observation_condition, 'Calibration': discriminant_model, 'Cost': cost,
'result_df': result_df, 'model_index': i_model,
})
sns.lineplot(data = pd.DataFrame(rows), x = 'Cost', y = 'Accuracy', style = 'Observation conditions', hue = 'Calibration')
plt.tight_layout()
plt.show()
def evaluate_classifier(model, df, test_persons, measurement_cost, test_sequence = 'Mix', prior = [1/class_set for i in range(class_set)], render_seq=False):
df['prediction'] = -6666
for test_person in test_persons:
training_df = df.loc[df['person'] != test_person]
test_df = df.loc[(df['person'] == test_person) & (df['sequence_type'] == test_sequence)].copy()
transition_matrix = estimate_transition_matrix(
training_df.loc[training_df['sequence_type'] == 'Mix']
)
model.fit(training_df)
for j, file in enumerate(test_df.file_index.unique()):
print(f'File {j}/{len(test_df.file_index.unique())}')
seq_df = test_df.loc[test_df['file_index'] == file].copy()
seq_df = predict_sequence(model, seq_df, transition_matrix, measurement_cost)
if render_seq:
render.render_classification_sequence(seq_df)
df.loc[seq_df.index, 'belief'] = seq_df['belief']
df.loc[seq_df.index, 'prediction'] = seq_df['prediction']
df.loc[seq_df.index, 'Selected'] = seq_df['Selected']
return df
def predict_sequence(model, df, transition_matrix, measurement_cost, prior=[1/class_set for _ in range(class_set)]):
belief = np.reshape(prior, (class_set, 1))
for time in np.sort(df.time.unique()):
df_step = df[df['time'] == time].copy()
if measurement_cost:
selected_sensors = information_selection(df_step, model, belief, measurement_cost)
else:
selected_sensors = df_step.index
df.loc[selected_sensors, 'Selected'] = True
for i, row in df_step.loc[selected_sensors].iterrows():
row = row.to_frame().transpose()
prop_likelihood = model.predict_proba(row)
posterior = prop_likelihood[0, :, np.newaxis] * belief
posterior = posterior/(posterior.sum())
belief = posterior
# save prediction
df['belief'] = np.nan
df['belief'] = df['belief'].astype(object)
for index in df_step.index:
df.loc[index, 'belief'] = [belief]
df.loc[index ,'prediction'] = belief.argmax() + 1
# Transition step
belief = transition_matrix @ np.reshape(belief, (class_set,1))
return df
def information_selection(df, model, belief, measurement_cost):
# Calculate information and sort indices by information
df['information'] = df.apply(lambda row: model.information(belief, [row['predicted_angle']]), axis=1)
potential_sensors = df.sort_values('information').index.to_list()
selected_sensors = []
sensor_utility = {0:[]}
while potential_sensors:
selected_sensors.append(potential_sensors.pop())
information = model.information(belief, sensors=df.loc[selected_sensors]['predicted_angle'].to_list())
utility = information - measurement_cost*len(selected_sensors)
sensor_utility[utility] = selected_sensors[:]
return sensor_utility[np.max(list(sensor_utility.keys()))]
def estimate_transition_matrix(df):
transition_count = np.zeros((class_set,class_set))
df = df.loc[df['radar'] == df.radar.unique()[0]]
sequences = df['file_index'].unique()
for sequence_index in sequences:
df_seq = df.loc[df['file_index'] == sequence_index].sort_values('time').reset_index(drop=True)
previous_state = None
for i, row in df_seq.iterrows():
state = row['lable']
if not previous_state:
previous_state = state
continue
transition_count[state - 1, previous_state - 1] += 1
previous_state = state
transition_matrix = transition_count/transition_count.sum(axis=0,keepdims=1)
transition_matrix = transition_matrix/transition_matrix.sum(axis=0,keepdims=1)
return transition_matrix
def load_options():
global parsed_args
parser = argparse.ArgumentParser(description='Entry point to fit and evaluate\
a Bayesian model of human motion',
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('--rebuild', dest='rebuild', action='store_true')
parser.add_argument('--no-rebuild', dest='rebuild', action='store_false')
parser.set_defaults(rebuild=False)
parsed_args = parser.parse_args()
if __name__ == '__main__':
load_options()
train() | 40.949438 | 189 | 0.664563 | [
"MIT"
] | petersvenningsson/radar-Bayesian-human-motion | train.py | 7,289 | Python |
#!/usr/bin/env python3
# Copyright (c) 2014-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the pruning code.
WARNING:
This test uses 4GB of disk space.
This test takes 30 mins or more (up to 2 hours)
"""
import os
from test_framework.blocktools import create_coinbase
from test_framework.messages import CBlock
from test_framework.script import (
CScript,
OP_NOP,
OP_RETURN,
)
from test_framework.test_framework import UmkoinTestFramework
from test_framework.util import (
assert_equal,
assert_greater_than,
assert_raises_rpc_error,
)
# Rescans start at the earliest block up to 2 hours before a key timestamp, so
# the manual prune RPC avoids pruning blocks in the same window to be
# compatible with pruning based on key creation time.
TIMESTAMP_WINDOW = 2 * 60 * 60
def mine_large_blocks(node, n):
# Make a large scriptPubKey for the coinbase transaction. This is OP_RETURN
# followed by 950k of OP_NOP. This would be non-standard in a non-coinbase
# transaction but is consensus valid.
# Set the nTime if this is the first time this function has been called.
# A static variable ensures that time is monotonicly increasing and is therefore
# different for each block created => blockhash is unique.
if "nTimes" not in mine_large_blocks.__dict__:
mine_large_blocks.nTime = 0
# Get the block parameters for the first block
big_script = CScript([OP_RETURN] + [OP_NOP] * 950000)
best_block = node.getblock(node.getbestblockhash())
height = int(best_block["height"]) + 1
mine_large_blocks.nTime = max(mine_large_blocks.nTime, int(best_block["time"])) + 1
previousblockhash = int(best_block["hash"], 16)
for _ in range(n):
# Build the coinbase transaction (with large scriptPubKey)
coinbase_tx = create_coinbase(height)
coinbase_tx.vin[0].nSequence = 2 ** 32 - 1
coinbase_tx.vout[0].scriptPubKey = big_script
coinbase_tx.rehash()
# Build the block
block = CBlock()
block.nVersion = best_block["version"]
block.hashPrevBlock = previousblockhash
block.nTime = mine_large_blocks.nTime
block.nBits = int('207fffff', 16)
block.nNonce = 0
block.vtx = [coinbase_tx]
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
# Submit to the node
node.submitblock(block.serialize().hex())
previousblockhash = block.sha256
height += 1
mine_large_blocks.nTime += 1
def calc_usage(blockdir):
return sum(os.path.getsize(blockdir + f) for f in os.listdir(blockdir) if os.path.isfile(os.path.join(blockdir, f))) / (1024. * 1024.)
class PruneTest(UmkoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 6
self.supports_cli = False
# Create nodes 0 and 1 to mine.
# Create node 2 to test pruning.
self.full_node_default_args = ["-maxreceivebuffer=20000", "-checkblocks=5"]
# Create nodes 3 and 4 to test manual pruning (they will be re-started with manual pruning later)
# Create nodes 5 to test wallet in prune mode, but do not connect
self.extra_args = [
self.full_node_default_args,
self.full_node_default_args,
["-maxreceivebuffer=20000", "-prune=550"],
["-maxreceivebuffer=20000"],
["-maxreceivebuffer=20000"],
["-prune=550"],
]
self.rpc_timeout = 120
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self):
self.setup_nodes()
self.prunedir = os.path.join(self.nodes[2].datadir, self.chain, 'blocks', '')
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
self.connect_nodes(0, 2)
self.connect_nodes(0, 3)
self.connect_nodes(0, 4)
self.sync_blocks(self.nodes[0:5])
def setup_nodes(self):
self.add_nodes(self.num_nodes, self.extra_args)
self.start_nodes()
self.import_deterministic_coinbase_privkeys()
def create_big_chain(self):
# Start by creating some coinbases we can spend later
self.generate(self.nodes[1], 200)
self.sync_blocks(self.nodes[0:2])
self.generate(self.nodes[0], 150)
# Then mine enough full blocks to create more than 550MiB of data
mine_large_blocks(self.nodes[0], 645)
self.sync_blocks(self.nodes[0:5])
def test_invalid_command_line_options(self):
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune cannot be configured with a negative value.',
extra_args=['-prune=-1'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune configured below the minimum of 550 MiB. Please use a higher number.',
extra_args=['-prune=549'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune mode is incompatible with -txindex.',
extra_args=['-prune=550', '-txindex'],
)
self.nodes[0].assert_start_raises_init_error(
expected_msg='Error: Prune mode is incompatible with -coinstatsindex.',
extra_args=['-prune=550', '-coinstatsindex'],
)
def test_height_min(self):
assert os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), "blk00000.dat is missing, pruning too early"
self.log.info("Success")
self.log.info(f"Though we're already using more than 550MiB, current usage: {calc_usage(self.prunedir)}")
self.log.info("Mining 25 more blocks should cause the first block file to be pruned")
# Pruning doesn't run until we're allocating another chunk, 20 full blocks past the height cutoff will ensure this
mine_large_blocks(self.nodes[0], 25)
# Wait for blk00000.dat to be pruned
self.wait_until(lambda: not os.path.isfile(os.path.join(self.prunedir, "blk00000.dat")), timeout=30)
self.log.info("Success")
usage = calc_usage(self.prunedir)
self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def create_chain_with_staleblocks(self):
# Create stale blocks in manageable sized chunks
self.log.info("Mine 24 (stale) blocks on Node 1, followed by 25 (main chain) block reorg from Node 0, for 12 rounds")
for _ in range(12):
# Disconnect node 0 so it can mine a longer reorg chain without knowing about node 1's soon-to-be-stale chain
# Node 2 stays connected, so it hears about the stale blocks and then reorg's when node0 reconnects
self.disconnect_nodes(0, 1)
self.disconnect_nodes(0, 2)
# Mine 24 blocks in node 1
mine_large_blocks(self.nodes[1], 24)
# Reorg back with 25 block chain from node 0
mine_large_blocks(self.nodes[0], 25)
# Create connections in the order so both nodes can see the reorg at the same time
self.connect_nodes(0, 1)
self.connect_nodes(0, 2)
self.sync_blocks(self.nodes[0:3])
self.log.info(f"Usage can be over target because of high stale rate: {calc_usage(self.prunedir)}")
def reorg_test(self):
# Node 1 will mine a 300 block chain starting 287 blocks back from Node 0 and Node 2's tip
# This will cause Node 2 to do a reorg requiring 288 blocks of undo data to the reorg_test chain
height = self.nodes[1].getblockcount()
self.log.info(f"Current block height: {height}")
self.forkheight = height - 287
self.forkhash = self.nodes[1].getblockhash(self.forkheight)
self.log.info(f"Invalidating block {self.forkhash} at height {self.forkheight}")
self.nodes[1].invalidateblock(self.forkhash)
# We've now switched to our previously mined-24 block fork on node 1, but that's not what we want
# So invalidate that fork as well, until we're on the same chain as node 0/2 (but at an ancestor 288 blocks ago)
mainchainhash = self.nodes[0].getblockhash(self.forkheight - 1)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
while curhash != mainchainhash:
self.nodes[1].invalidateblock(curhash)
curhash = self.nodes[1].getblockhash(self.forkheight - 1)
assert self.nodes[1].getblockcount() == self.forkheight - 1
self.log.info(f"New best height: {self.nodes[1].getblockcount()}")
# Disconnect node1 and generate the new chain
self.disconnect_nodes(0, 1)
self.disconnect_nodes(1, 2)
self.log.info("Generating new longer chain of 300 more blocks")
self.generate(self.nodes[1], 300)
self.log.info("Reconnect nodes")
self.connect_nodes(0, 1)
self.connect_nodes(1, 2)
self.sync_blocks(self.nodes[0:3], timeout=120)
self.log.info(f"Verify height on node 2: {self.nodes[2].getblockcount()}")
self.log.info(f"Usage possibly still high because of stale blocks in block files: {calc_usage(self.prunedir)}")
self.log.info("Mine 220 more large blocks so we have requisite history")
mine_large_blocks(self.nodes[0], 220)
self.sync_blocks(self.nodes[0:3], timeout=120)
usage = calc_usage(self.prunedir)
self.log.info(f"Usage should be below target: {usage}")
assert_greater_than(550, usage)
def reorg_back(self):
# Verify that a block on the old main chain fork has been pruned away
assert_raises_rpc_error(-1, "Block not available (pruned data)", self.nodes[2].getblock, self.forkhash)
with self.nodes[2].assert_debug_log(expected_msgs=['block verification stopping at height', '(pruning, no data)']):
self.nodes[2].verifychain(checklevel=4, nblocks=0)
self.log.info(f"Will need to redownload block {self.forkheight}")
# Verify that we have enough history to reorg back to the fork point
# Although this is more than 288 blocks, because this chain was written more recently
# and only its other 299 small and 220 large blocks are in the block files after it,
# it is expected to still be retained
self.nodes[2].getblock(self.nodes[2].getblockhash(self.forkheight))
first_reorg_height = self.nodes[2].getblockcount()
curchainhash = self.nodes[2].getblockhash(self.mainchainheight)
self.nodes[2].invalidateblock(curchainhash)
goalbestheight = self.mainchainheight
goalbesthash = self.mainchainhash2
# As of 0.10 the current block download logic is not able to reorg to the original chain created in
# create_chain_with_stale_blocks because it doesn't know of any peer that's on that chain from which to
# redownload its missing blocks.
# Invalidate the reorg_test chain in node 0 as well, it can successfully switch to the original chain
# because it has all the block data.
# However it must mine enough blocks to have a more work chain than the reorg_test chain in order
# to trigger node 2's block download logic.
# At this point node 2 is within 288 blocks of the fork point so it will preserve its ability to reorg
if self.nodes[2].getblockcount() < self.mainchainheight:
blocks_to_mine = first_reorg_height + 1 - self.mainchainheight
self.log.info(f"Rewind node 0 to prev main chain to mine longer chain to trigger redownload. Blocks needed: {blocks_to_mine}")
self.nodes[0].invalidateblock(curchainhash)
assert_equal(self.nodes[0].getblockcount(), self.mainchainheight)
assert_equal(self.nodes[0].getbestblockhash(), self.mainchainhash2)
goalbesthash = self.generate(self.nodes[0], blocks_to_mine)[-1]
goalbestheight = first_reorg_height + 1
self.log.info("Verify node 2 reorged back to the main chain, some blocks of which it had to redownload")
# Wait for Node 2 to reorg to proper height
self.wait_until(lambda: self.nodes[2].getblockcount() >= goalbestheight, timeout=900)
assert_equal(self.nodes[2].getbestblockhash(), goalbesthash)
# Verify we can now have the data for a block previously pruned
assert_equal(self.nodes[2].getblock(self.forkhash)["height"], self.forkheight)
def manual_test(self, node_number, use_timestamp):
# at this point, node has 995 blocks and has not yet run in prune mode
self.start_node(node_number)
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
assert_raises_rpc_error(-1, "not in prune mode", node.pruneblockchain, 500)
# now re-start in manual pruning mode
self.restart_node(node_number, extra_args=["-prune=1"])
node = self.nodes[node_number]
assert_equal(node.getblockcount(), 995)
def height(index):
if use_timestamp:
return node.getblockheader(node.getblockhash(index))["time"] + TIMESTAMP_WINDOW
else:
return index
def prune(index):
ret = node.pruneblockchain(height=height(index))
assert_equal(ret, node.getblockchaininfo()['pruneheight'])
def has_block(index):
return os.path.isfile(os.path.join(self.nodes[node_number].datadir, self.chain, "blocks", f"blk{index:05}.dat"))
# should not prune because chain tip of node 3 (995) < PruneAfterHeight (1000)
assert_raises_rpc_error(-1, "Blockchain is too short for pruning", node.pruneblockchain, height(500))
# Save block transaction count before pruning, assert value
block1_details = node.getblock(node.getblockhash(1))
assert_equal(block1_details["nTx"], len(block1_details["tx"]))
# mine 6 blocks so we are at height 1001 (i.e., above PruneAfterHeight)
self.generate(node, 6)
assert_equal(node.getblockchaininfo()["blocks"], 1001)
# Pruned block should still know the number of transactions
assert_equal(node.getblockheader(node.getblockhash(1))["nTx"], block1_details["nTx"])
# negative heights should raise an exception
assert_raises_rpc_error(-8, "Negative", node.pruneblockchain, -10)
# height=100 too low to prune first block file so this is a no-op
prune(100)
assert has_block(0), "blk00000.dat is missing when should still be there"
# Does nothing
node.pruneblockchain(height(0))
assert has_block(0), "blk00000.dat is missing when should still be there"
# height=500 should prune first file
prune(500)
assert not has_block(0), "blk00000.dat is still there, should be pruned by now"
assert has_block(1), "blk00001.dat is missing when should still be there"
# height=650 should prune second file
prune(650)
assert not has_block(1), "blk00001.dat is still there, should be pruned by now"
# height=1000 should not prune anything more, because tip-288 is in blk00002.dat.
prune(1000)
assert has_block(2), "blk00002.dat is still there, should be pruned by now"
# advance the tip so blk00002.dat and blk00003.dat can be pruned (the last 288 blocks should now be in blk00004.dat)
self.generate(node, 288)
prune(1000)
assert not has_block(2), "blk00002.dat is still there, should be pruned by now"
assert not has_block(3), "blk00003.dat is still there, should be pruned by now"
# stop node, start back up with auto-prune at 550 MiB, make sure still runs
self.restart_node(node_number, extra_args=["-prune=550"])
self.log.info("Success")
def wallet_test(self):
# check that the pruning node's wallet is still in good shape
self.log.info("Stop and start pruning node to trigger wallet rescan")
self.restart_node(2, extra_args=["-prune=550"])
self.log.info("Success")
# check that wallet loads successfully when restarting a pruned node after IBD.
# this was reported to fail in #7494.
self.log.info("Syncing node 5 to test wallet")
self.connect_nodes(0, 5)
nds = [self.nodes[0], self.nodes[5]]
self.sync_blocks(nds, wait=5, timeout=300)
self.restart_node(5, extra_args=["-prune=550"]) # restart to trigger rescan
self.log.info("Success")
def run_test(self):
self.log.info("Warning! This test requires 4GB of disk space")
self.log.info("Mining a big blockchain of 995 blocks")
self.create_big_chain()
# Chain diagram key:
# * blocks on main chain
# +,&,$,@ blocks on other forks
# X invalidated block
# N1 Node 1
#
# Start by mining a simple chain that all nodes have
# N0=N1=N2 **...*(995)
# stop manual-pruning node with 995 blocks
self.stop_node(3)
self.stop_node(4)
self.log.info("Check that we haven't started pruning yet because we're below PruneAfterHeight")
self.test_height_min()
# Extend this chain past the PruneAfterHeight
# N0=N1=N2 **...*(1020)
self.log.info("Check that we'll exceed disk space target if we have a very high stale block rate")
self.create_chain_with_staleblocks()
# Disconnect N0
# And mine a 24 block chain on N1 and a separate 25 block chain on N0
# N1=N2 **...*+...+(1044)
# N0 **...**...**(1045)
#
# reconnect nodes causing reorg on N1 and N2
# N1=N2 **...*(1020) *...**(1045)
# \
# +...+(1044)
#
# repeat this process until you have 12 stale forks hanging off the
# main chain on N1 and N2
# N0 *************************...***************************(1320)
#
# N1=N2 **...*(1020) *...**(1045) *.. ..**(1295) *...**(1320)
# \ \ \
# +...+(1044) &.. $...$(1319)
# Save some current chain state for later use
self.mainchainheight = self.nodes[2].getblockcount() # 1320
self.mainchainhash2 = self.nodes[2].getblockhash(self.mainchainheight)
self.log.info("Check that we can survive a 288 block reorg still")
self.reorg_test() # (1033, )
# Now create a 288 block reorg by mining a longer chain on N1
# First disconnect N1
# Then invalidate 1033 on main chain and 1032 on fork so height is 1032 on main chain
# N1 **...*(1020) **...**(1032)X..
# \
# ++...+(1031)X..
#
# Now mine 300 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@(1332)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# Reconnect nodes and mine 220 more blocks on N1
# N1 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ X...
# \ \
# ++...+(1031)X.. ..
#
# N2 **...*(1020) **...**(1032) @@...@@@(1552)
# \ \
# \ *...**(1320)
# \ \
# ++...++(1044) ..
#
# N0 ********************(1032) @@...@@@(1552)
# \
# *...**(1320)
self.log.info("Test that we can rerequest a block we previously pruned if needed for a reorg")
self.reorg_back()
# Verify that N2 still has block 1033 on current chain (@), but not on main chain (*)
# Invalidate 1033 on current chain (@) on N2 and we should be able to reorg to
# original main chain (*), but will require redownload of some blocks
# In order to have a peer we think we can download from, must also perform this invalidation
# on N0 and mine a new longest chain to trigger.
# Final result:
# N0 ********************(1032) **...****(1553)
# \
# X@...@@@(1552)
#
# N2 **...*(1020) **...**(1032) **...****(1553)
# \ \
# \ X@...@@@(1552)
# \
# +..
#
# N1 doesn't change because 1033 on main chain (*) is invalid
self.log.info("Test manual pruning with block indices")
self.manual_test(3, use_timestamp=False)
self.log.info("Test manual pruning with timestamps")
self.manual_test(4, use_timestamp=True)
self.log.info("Test wallet re-scan")
self.wallet_test()
self.log.info("Test invalid pruning command line options")
self.test_invalid_command_line_options()
self.log.info("Done")
if __name__ == '__main__':
PruneTest().main()
| 44.866944 | 138 | 0.613781 | [
"MIT"
] | umkoin/umkoin | test/functional/feature_pruning.py | 21,581 | Python |
import time
from Classes.Packets.PiranhaMessage import PiranhaMessage
class OwnHomeDataMessage(PiranhaMessage):
def __init__(self, messageData):
super().__init__(messageData)
self.messageVersion = 0
def encode(self, fields, player):
ownedBrawlersCount = len(player.OwnedBrawlers)
ownedPinsCount = len(player.OwnedPins)
ownedThumbnailCount = len(player.OwnedThumbnails)
ownedSkins = []
for brawlerInfo in player.OwnedBrawlers.values():
try:
ownedSkins.extend(brawlerInfo["Skins"])
except KeyError:
continue
self.writeVint(int(time.time()))
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(player.Trophies) # Trophies
self.writeVint(player.HighestTrophies) # Highest Trophies
self.writeVint(player.HighestTrophies)
self.writeVint(player.TrophyRoadTier)
self.writeVint(player.Experience) # Experience
self.writeDataReference(28, player.Thumbnail) # Thumbnail
self.writeDataReference(43, player.Namecolor) # Namecolor
self.writeVint(0)
self.writeVint(0) # Selected Skins
self.writeVint(0) # Randomizer Skin Selected
self.writeVint(0) # Current Random Skin
self.writeVint(len(ownedSkins))
for skinID in ownedSkins:
self.writeDataReference(29, skinID)
self.writeVint(0) # Unlocked Skin Purchase Option
self.writeVint(0) # New Item State
self.writeVint(0)
self.writeVint(player.HighestTrophies)
self.writeVint(0)
self.writeVint(1)
self.writeBoolean(True)
self.writeVint(player.TokensDoubler)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(141)
self.writeVint(135)
self.writeVint(5)
self.writeVint(93)
self.writeVint(206)
self.writeVint(456)
self.writeVint(792)
self.writeVint(729)
self.writeBoolean(False) # Offer 1
self.writeBoolean(False) # Offer 2
self.writeBoolean(True) # Token Doubler Enabled
self.writeVint(2) # Token Doubler New Tag State
self.writeVint(2) # Event Tickets New Tag State
self.writeVint(2) # Coin Packs New Tag State
self.writeVint(0) # Change Name Cost
self.writeVint(0) # Timer For the Next Name Change
self.writeVint(1) # Offers count
self.writeVint(1) # RewardCount
for i in range(1):
self.writeVint(6) # ItemType
self.writeVint(0)
self.writeDataReference(0) # CsvID
self.writeVint(0)
self.writeVint(0)
self.writeVint(666)
self.writeVint(950400)
self.writeVint(2)
self.writeVint(0)
self.writeBoolean(False)
self.writeVint(3917)
self.writeVint(0)
self.writeBoolean(False)
self.writeVint(49)
self.writeInt(0)
self.writeString("Unlock all skins")
self.writeBoolean(False)
self.writeString()
self.writeVint(-1)
self.writeBoolean(False)
self.writeVint(0)
self.writeVint(0)
self.writeString()
self.writeBoolean(False)
self.writeBoolean(False)
self.writeVint(0)
self.writeVint(player.Tokens)
self.writeVint(-1)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(len(player.SelectedBrawlers))
for i in player.SelectedBrawlers:
self.writeDataReference(16, i)
self.writeString(player.Region)
self.writeString(player.ContentCreator)
self.writeVint(19)
self.writeLong(2, 1) # Unknown
self.writeLong(3, 0) # TokensGained
self.writeLong(4, 0) # TrophiesGained
self.writeLong(6, 0) # DemoAccount
self.writeLong(7, 0) # InvitesBlocked
self.writeLong(8, 0) # StarPointsGained
self.writeLong(9, 1) # ShowStarPoints
self.writeLong(10, 0) # PowerPlayTrophiesGained
self.writeLong(12, 1) # Unknown
self.writeLong(14, 0) # CoinsGained
self.writeLong(15, 0) # AgeScreen | 3 = underage (disable social media) | 1 = age popup
self.writeLong(16, 1)
self.writeLong(17, 1) # TeamChatMuted
self.writeLong(18, 1) # EsportButton
self.writeLong(19, 1) # ChampionShipLivesBuyPopup
self.writeLong(20, 0) # GemsGained
self.writeLong(21, 1) # LookingForTeamState
self.writeLong(22, 1)
self.writeLong(24, 1) # Have already watched club league stupid animation
self.writeVint(0)
self.writeVint(2) # Brawlpass
for i in range(8, 10):
self.writeVint(i)
self.writeVint(34500)
self.writeBoolean(True)
self.writeVint(0)
self.writeUInt8(2)
self.writeUInt(4294967292)
self.writeUInt(4294967295)
self.writeUInt(511)
self.writeUInt(0)
self.writeUInt8(1)
self.writeUInt(4294967292)
self.writeUInt(4294967295)
self.writeUInt(511)
self.writeUInt(0)
self.writeVint(0)
self.writeBoolean(True)
self.writeVint(0)
self.writeBoolean(True)
self.writeVint(ownedPinsCount + ownedThumbnailCount) # Vanity Count
for i in player.OwnedPins:
self.writeDataReference(52, i)
self.writeVint(1)
for i in range(1):
self.writeVint(1)
self.writeVint(1)
for i in player.OwnedThumbnails:
self.writeDataReference(28, i)
self.writeVint(1)
for i in range(1):
self.writeVint(1)
self.writeVint(1)
self.writeBoolean(False)
self.writeInt(0)
self.writeVint(0)
self.writeVint(25) # Count
self.writeVint(1)
self.writeVint(2)
self.writeVint(3)
self.writeVint(4)
self.writeVint(5)
self.writeVint(6)
self.writeVint(7)
self.writeVint(8)
self.writeVint(9)
self.writeVint(10)
self.writeVint(11)
self.writeVint(12)
self.writeVint(13)
self.writeVint(14)
self.writeVint(15)
self.writeVint(16)
self.writeVint(17)
self.writeVint(20)
self.writeVint(21)
self.writeVint(22)
self.writeVint(23)
self.writeVint(24)
self.writeVint(30)
self.writeVint(31)
self.writeVint(32)
self.writeVint(3) # Events
eventIndex = 1
for i in [5, 7, 24]:
self.writeVint(-1)
self.writeVint(eventIndex) # EventType
self.writeVint(0) # EventsBeginCountdown
self.writeVint(51208) # Timer
self.writeVint(0) # tokens reward for new event
self.writeDataReference(15, i) # MapID
self.writeVint(-1) # GameModeVariation
self.writeVint(2) # State
self.writeString()
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0) # Modifiers
self.writeVint(0)
self.writeVint(0)
self.writeBoolean(False) # Map Maker Map Structure Array
self.writeVint(0)
self.writeBoolean(False) # Power League Data Array
self.writeVint(0)
self.writeVint(0)
self.writeBoolean(False) # ChronosTextEntry
self.writeBoolean(False)
self.writeBoolean(False)
self.writeVint(-1)
self.writeBoolean(False)
self.writeBoolean(False)
eventIndex += 1
self.writeVint(0) # Comming Events
self.writeVint(10) # Brawler Upgrade Cost
self.writeVint(20)
self.writeVint(35)
self.writeVint(75)
self.writeVint(140)
self.writeVint(290)
self.writeVint(480)
self.writeVint(800)
self.writeVint(1250)
self.writeVint(1875)
self.writeVint(2800)
self.writeVint(4) # Shop Coins Price
self.writeVint(20)
self.writeVint(50)
self.writeVint(140)
self.writeVint(280)
self.writeVint(4) # Shop Coins Amount
self.writeVint(150)
self.writeVint(400)
self.writeVint(1200)
self.writeVint(2600)
self.writeBoolean(True) # Show Offers Packs
self.writeVint(0)
self.writeVint(23) # IntValueEntry
self.writeLong(10008, 501)
self.writeLong(65, 2)
self.writeLong(1, 41000036) # ThemeID
self.writeLong(60, 36270)
self.writeLong(66, 1)
self.writeLong(61, 36270) # SupportDisabled State | if 36218 < state its true
self.writeLong(47, 41381)
self.writeLong(29, 0) # Skin Group Active For Campaign
self.writeLong(48, 41381)
self.writeLong(50, 0) # Coming up quests placeholder
self.writeLong(1100, 500)
self.writeLong(1101, 500)
self.writeLong(1003, 1)
self.writeLong(36, 0)
self.writeLong(14, 0) # Double Token Event
self.writeLong(31, 0) # Gold rush event
self.writeLong(79, 149999)
self.writeLong(80, 160000)
self.writeLong(28, 4)
self.writeLong(74, 1)
self.writeLong(78, 1)
self.writeLong(17, 4)
self.writeLong(10046, 1)
self.writeVint(0) # Timed Int Value Entry
self.writeVint(0) # Custom Event
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeLong(player.ID[0], player.ID[1]) # PlayerID
self.writeVint(0) # NotificationFactory
self.writeVint(-1)
self.writeBoolean(False)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVLong(player.ID[0], player.ID[1])
self.writeVLong(0, 0)
self.writeVLong(0, 0)
self.writeString(player.Name)
self.writeBoolean(player.Registered)
self.writeInt(0)
self.writeVint(15)
self.writeVint(3 + ownedBrawlersCount)
for brawlerInfo in player.OwnedBrawlers.values():
self.writeDataReference(23, brawlerInfo["CardID"])
self.writeVint(1)
self.writeDataReference(5, 8)
self.writeVint(player.Coins)
self.writeDataReference(5, 10)
self.writeVint(player.StarPoints)
self.writeDataReference(5, 13)
self.writeVint(99999) # Club coins
self.writeVint(ownedBrawlersCount)
for brawlerID,brawlerInfo in player.OwnedBrawlers.items():
self.writeDataReference(16, brawlerID)
self.writeVint(brawlerInfo["Trophies"])
self.writeVint(ownedBrawlersCount)
for brawlerID, brawlerInfo in player.OwnedBrawlers.items():
self.writeDataReference(16, brawlerID)
self.writeVint(brawlerInfo["HighestTrophies"])
self.writeVint(0)
self.writeVint(ownedBrawlersCount)
for brawlerID, brawlerInfo in player.OwnedBrawlers.items():
self.writeDataReference(16, brawlerID)
self.writeVint(brawlerInfo["PowerPoints"])
self.writeVint(ownedBrawlersCount)
for brawlerID, brawlerInfo in player.OwnedBrawlers.items():
self.writeDataReference(16, brawlerID)
self.writeVint(brawlerInfo["PowerLevel"] - 1)
self.writeVint(0)
self.writeVint(ownedBrawlersCount)
for brawlerID, brawlerInfo in player.OwnedBrawlers.items():
self.writeDataReference(16, brawlerID)
self.writeVint(brawlerInfo["State"])
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(0)
self.writeVint(player.Gems) # Diamonds
self.writeVint(player.Gems) # Free Diamonds
self.writeVint(player.Level) # Player Level
self.writeVint(100)
self.writeVint(0) # CumulativePurchasedDiamonds or Avatar User Level Tier | 10000 < Level Tier = 3 | 1000 < Level Tier = 2 | 0 < Level Tier = 1
self.writeVint(0) # Battle Count
self.writeVint(0) # WinCount
self.writeVint(0) # LoseCount
self.writeVint(0) # WinLooseStreak
self.writeVint(0) # NpcWinCount
self.writeVint(0) # NpcLoseCount
self.writeVint(2) # TutorialState | shouldGoToFirstTutorialBattle = State == 0
self.writeVint(0)
def decode(self):
fields = {}
# fields["AccountID"] = self.readLong()
# fields["HomeID"] = self.readLong()
# fields["PassToken"] = self.readString()
# fields["FacebookID"] = self.readString()
# fields["GamecenterID"] = self.readString()
# fields["ServerMajorVersion"] = self.readInt()
# fields["ContentVersion"] = self.readInt()
# fields["ServerBuild"] = self.readInt()
# fields["ServerEnvironment"] = self.readString()
# fields["SessionCount"] = self.readInt()
# fields["PlayTimeSeconds"] = self.readInt()
# fields["DaysSinceStartedPlaying"] = self.readInt()
# fields["FacebookAppID"] = self.readString()
# fields["ServerTime"] = self.readString()
# fields["AccountCreatedDate"] = self.readString()
# fields["StartupCooldownSeconds"] = self.readInt()
# fields["GoogleServiceID"] = self.readString()
# fields["LoginCountry"] = self.readString()
# fields["KunlunID"] = self.readString()
# fields["Tier"] = self.readInt()
# fields["TencentID"] = self.readString()
#
# ContentUrlCount = self.readInt()
# fields["GameAssetsUrls"] = []
# for i in range(ContentUrlCount):
# fields["GameAssetsUrls"].append(self.readString())
#
# EventUrlCount = self.readInt()
# fields["EventAssetsUrls"] = []
# for i in range(EventUrlCount):
# fields["EventAssetsUrls"].append(self.readString())
#
# fields["SecondsUntilAccountDeletion"] = self.readVint()
# fields["SupercellIDToken"] = self.readCompressedString()
# fields["IsSupercellIDLogoutAllDevicesAllowed"] = self.readBoolean()
# fields["isSupercellIDEligible"] = self.readBoolean()
# fields["LineID"] = self.readString()
# fields["SessionID"] = self.readString()
# fields["KakaoID"] = self.readString()
# fields["UpdateURL"] = self.readString()
# fields["YoozooPayNotifyUrl"] = self.readString()
# fields["UnbotifyEnabled"] = self.readBoolean()
# super().decode(fields)
return fields
def execute(message, calling_instance, fields):
pass
def getMessageType(self):
return 24101
def getMessageVersion(self):
return self.messageVersion | 31.844538 | 152 | 0.599419 | [
"Apache-2.0"
] | ServerBSvvv/BSDS-V41 | Classes/Packets/Server/Home/OwnHomeDataMessage.py | 15,158 | Python |
from sympy.logic.boolalg import to_cnf, eliminate_implications, distribute_and_over_or, \
compile_rule, conjuncts, disjuncts, to_int_repr, fuzzy_not, Boolean, is_cnf
from sympy import symbols, And, Or, Xor, Not, Nand, Nor, Implies, Equivalent, ITE
from sympy.utilities.pytest import raises, XFAIL
def test_overloading():
"""Test that |, & are overloaded as expected"""
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert A & B == And(A, B)
assert A | B == Or(A, B)
assert (A & B) | C == Or(And(A, B), C)
assert A >> B == Implies(A, B)
assert A << B == Implies(B, A)
assert ~A == Not(A)
assert A ^ B == Xor(A, B)
def test_And():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert And() == True
assert And(A) == A
assert And(True) == True
assert And(False) == False
assert And(True, True ) == True
assert And(True, False) == False
assert And(False, False) == False
assert And(True, A) == A
assert And(False, A) == False
assert And(True, True, True) == True
assert And(True, True , A) == A
assert And(True, False, A) == False
def test_Or():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert Or() == False
assert Or(A) == A
assert Or(True) == True
assert Or(False) == False
assert Or(True, True ) == True
assert Or(True, False) == True
assert Or(False, False) == False
assert Or(True, A) == True
assert Or(False, A) == A
assert Or(True, False, False) == True
assert Or(True, False, A) == True
assert Or(False, False, A) == A
def test_Xor():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert Xor() == False
assert Xor(A) == A
assert Xor(True) == True
assert Xor(False) == False
assert Xor(True, True ) == False
assert Xor(True, False) == True
assert Xor(False, False) == False
assert Xor(True, A) == ~A
assert Xor(False, A) == A
assert Xor(True, False, False) == True
assert Xor(True, False, A) == ~A
assert Xor(False, False, A) == A
def test_Not():
assert Not(True) == False
assert Not(False) == True
assert Not(True, True ) == [False, False]
assert Not(True, False) == [False, True ]
assert Not(False,False) == [True, True ]
def test_Nand():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert Nand() == False
assert Nand(A) == ~A
assert Nand(True) == False
assert Nand(False) == True
assert Nand(True, True ) == False
assert Nand(True, False) == True
assert Nand(False, False) == True
assert Nand(True, A) == ~A
assert Nand(False, A) == True
assert Nand(True, True, True) == False
assert Nand(True, True , A) == ~A
assert Nand(True, False, A) == True
def test_Nor():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert Nor() == True
assert Nor(A) == ~A
assert Nor(True) == False
assert Nor(False) == True
assert Nor(True, True ) == False
assert Nor(True, False) == False
assert Nor(False, False) == True
assert Nor(True, A) == False
assert Nor(False, A) == ~A
assert Nor(True, True, True) == False
assert Nor(True, True , A) == False
assert Nor(True, False, A) == False
def test_Implies():
A, B, C = list(map(Boolean, symbols('A,B,C')))
raises(ValueError, "Implies(A,B,C)")
assert Implies(True, True) == True
assert Implies(True, False) == False
assert Implies(False, True) == True
assert Implies(False, False) == True
assert A >> B == B << A
def test_Equivalent():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert Equivalent(A, B) == Equivalent(B, A) == Equivalent(A, B, A)
assert Equivalent() == True
assert Equivalent(A, A) == Equivalent(A) == True
assert Equivalent(True, True) == Equivalent(False, False) == True
assert Equivalent(True, False) == Equivalent(False, True) == False
assert Equivalent(A, True) == A
assert Equivalent(A, False) == Not(A)
assert Equivalent(A, B, True) == A & B
assert Equivalent(A, B, False) == ~A & ~B
def test_bool_symbol():
"""Test that mixing symbols with boolean values
works as expected"""
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert And(A, True) == A
assert And(A, True, True) == A
assert And(A, False) == False
assert And(A, True, False) == False
assert Or(A, True) == True
assert Or(A, False) == A
def test_subs():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert (A & B).subs(A, True) == B
assert (A & B).subs(A, False) == False
assert (A & B).subs(B, True) == A
assert (A & B).subs(B, False) == False
assert (A & B).subs({A: True, B:True}) == True
assert (A | B).subs(A, True) == True
assert (A | B).subs(A, False) == B
assert (A | B).subs(B, True) == True
assert (A | B).subs(B, False) == A
assert (A | B).subs({A: True, B:True}) == True
"""
we test for axioms of boolean algebra
see http://en.wikipedia.org/wiki/Boolean_algebra_(structure)
"""
def test_commutative():
"""Test for commutivity of And and Or"""
A, B = list(map(Boolean, symbols('A,B')))
assert A & B == B & A
assert A | B == B | A
def test_and_associativity():
"""Test for associativity of And"""
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert (A & B) & C == A & (B & C)
def test_or_assicativity():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert ((A | B) | C) == (A | (B | C))
def test_double_negation():
a = Boolean()
assert ~(~a) == a
def test_De_Morgan():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert ~(A & B) == (~A) | (~B)
assert ~(A | B) == (~A) & (~B)
assert ~(A | B | C) == ~A & ~B & ~C
# test methods
def test_eliminate_implications():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert eliminate_implications(Implies(A, B, evaluate=False)) == (~A) | B
assert eliminate_implications(A >> (C >>Not(B))) == Or(Or(Not(B), Not(C)), Not(A))
def test_conjuncts():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert conjuncts(A & B & C) == set([A, B, C])
assert conjuncts((A | B) & C) == set([A | B, C])
assert conjuncts(A) == set([A])
assert conjuncts(True) == set([True])
assert conjuncts(False) == set([False])
def test_disjuncts():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert disjuncts(A | B | C) == set([A, B, C])
assert disjuncts((A | B) & C) == set([(A | B) & C])
assert disjuncts(A) == set([A])
assert disjuncts(True) == set([True])
assert disjuncts(False) == set([False])
def test_distribute():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert distribute_and_over_or(Or(And(A, B), C)) == And(Or(A, C), Or(B, C))
def test_to_cnf():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert to_cnf(~(B | C)) == And(Not(B), Not(C))
assert to_cnf((A & B) | C) == And(Or(A, C), Or(B, C))
assert to_cnf(A >> B) == (~A) | B
assert to_cnf(A >> (B & C)) == (~A | B) & (~A | C)
assert to_cnf(Equivalent(A, B)) == And(Or(A, Not(B)), Or(B, Not(A)))
assert to_cnf(Equivalent(A, B & C)) == (~A | B) & (~A | C) & (~B | ~C | A)
assert to_cnf(Equivalent(A, B | C)) == \
And(Or(Not(B), A), Or(Not(C), A), Or(B, C, Not(A)))
def test_compile_rule():
from sympy import sympify
assert compile_rule("A & B") == sympify("A & B")
def test_to_int_repr():
x, y, z = list(map(Boolean, symbols('x,y,z')))
def sorted_recursive(arg):
try:
return sorted(sorted_recursive(x) for x in arg)
except TypeError: #arg is not a sequence
return arg
assert sorted_recursive(to_int_repr([x | y, z | x], [x, y, z])) == \
sorted_recursive([[1, 2], [1, 3]])
assert sorted_recursive(to_int_repr([x | y, z | ~x], [x, y, z])) == \
sorted_recursive([[1, 2], [3, -1]])
def test_is_cnf():
x, y, z = symbols('x,y,z')
assert is_cnf(x | y | z) == True
assert is_cnf(x & y & z) == True
assert is_cnf((x | y) & z) == True
assert is_cnf((x & y) | z) == False
def test_ITE():
A, B, C = list(map(Boolean, symbols('A,B,C')))
assert ITE(True, False, True) == False
assert ITE(True, True, False) == True
assert ITE(False, True, False) == False
assert ITE(False, False, True) == True
A = True
assert ITE(A, B, C) == B
A = False
assert ITE(A, B, C) == C
B = True
assert ITE(And(A, B), B, C) == C
assert ITE(Or(A, False), And(B, True), False) == False
| 32.097015 | 89 | 0.562892 | [
"BSD-3-Clause"
] | cielavenir/sympy | py3k-sympy/sympy/logic/tests/test_boolalg.py | 8,602 | Python |
import argparse
from pathlib import Path
import tempfile
from typing import List
from gensim.models import KeyedVectors
from gensim.scripts.glove2word2vec import glove2word2vec
from target_extraction.data_types import TargetTextCollection
from target_extraction.tokenizers import spacy_tokenizer
def parse_path(path_string: str) -> Path:
path_string = Path(path_string).resolve()
return path_string
def shrink_glove_file(glove_fp: Path, filter_words: List[str], save_fp: Path
) -> None:
'''
:param glove_fp: File path to the glove file that is to be shrinked
:param filter_words: List of words to filter/shrink the glove file/vectors
by
:param save_fp:
'''
with save_fp.open('w+') as save_file:
with glove_fp.open('r') as glove_file:
for glove_vector in glove_file:
glove_parts = glove_vector.split()
if (len(glove_parts) == 301 or len(glove_parts) == 51 or
len(glove_parts) == 201):
pass
else:
continue
glove_word = glove_parts[0]
if glove_word in filter_words:
save_file.write(glove_vector)
#python tdsa_augmentation/data_creation/shrink_glove_to_targets.py ./data/original_restaurant_sentiment/train.json ./resources/word_embeddings/glove.840B.300d.txt ./here
if __name__ == '__main__':
glove_fp_help = 'File path to the Glove embedding to be shrunk and '\
'converted to Word2Vec format'
parser = argparse.ArgumentParser()
parser.add_argument("json_train_data", type=parse_path,
help='File path JSON training data')
parser.add_argument("glove_embedding_fp", type=parse_path,
help=glove_fp_help)
parser.add_argument("target_only_word2vec_path", type=parse_path,
help='File path to save the embedding too.')
args = parser.parse_args()
save_fp = args.target_only_word2vec_path
if save_fp.exists():
print('A file already exists at the location to store '
f'the new Word2Vec model/vector: {save_fp}\n'
'Thus skipping the rest of this script.')
else:
dataset = TargetTextCollection.load_json(args.json_train_data)
all_targets = list(dataset.target_count(lower=True).keys())
tokenizer = spacy_tokenizer()
tokenised_targets = [target for targets in all_targets for target in tokenizer(targets)]
with tempfile.TemporaryDirectory() as temp_dir:
shrink_glove_temp_fp = Path(temp_dir, 'temp_glove')
shrink_word_vec_temp_fp = Path(temp_dir, 'temp_wordvec')
shrink_glove_file(args.glove_embedding_fp, tokenised_targets, shrink_glove_temp_fp)
glove2word2vec(shrink_glove_temp_fp, shrink_word_vec_temp_fp)
model = KeyedVectors.load_word2vec_format(shrink_word_vec_temp_fp)
model.save(str(save_fp))
print(f'Word2Vec shrunk to target model saved to {save_fp}')
| 42.863014 | 169 | 0.660914 | [
"Apache-2.0"
] | apmoore1/tdsa_augmentation | tdsa_augmentation/data_creation/shrink_glove_to_targets.py | 3,129 | Python |
import platform
import random
import shutil
import subprocess
import textwrap
from pathlib import Path, PurePath
import pytest
from cibuildwheel.docker_container import DockerContainer
from cibuildwheel.environment import EnvironmentAssignment
# for these tests we use manylinux2014 images, because they're available on
# multi architectures and include python3.8
pm = platform.machine()
if pm == "x86_64":
DEFAULT_IMAGE = "quay.io/pypa/manylinux2014_x86_64:2020-05-17-2f8ac3b"
elif pm == "aarch64":
DEFAULT_IMAGE = "quay.io/pypa/manylinux2014_aarch64:2020-05-17-2f8ac3b"
elif pm == "ppc64le":
DEFAULT_IMAGE = "quay.io/pypa/manylinux2014_ppc64le:2020-05-17-2f8ac3b"
elif pm == "s390x":
DEFAULT_IMAGE = "quay.io/pypa/manylinux2014_s390x:2020-05-17-2f8ac3b"
@pytest.mark.docker
def test_simple():
with DockerContainer(DEFAULT_IMAGE) as container:
assert container.call(["echo", "hello"], capture_output=True) == "hello\n"
@pytest.mark.docker
def test_no_lf():
with DockerContainer(DEFAULT_IMAGE) as container:
assert container.call(["printf", "hello"], capture_output=True) == "hello"
@pytest.mark.docker
def test_environment():
with DockerContainer(DEFAULT_IMAGE) as container:
assert (
container.call(
["sh", "-c", "echo $TEST_VAR"], env={"TEST_VAR": "1"}, capture_output=True
)
== "1\n"
)
@pytest.mark.docker
def test_cwd():
with DockerContainer(DEFAULT_IMAGE, cwd="/cibuildwheel/working_directory") as container:
assert container.call(["pwd"], capture_output=True) == "/cibuildwheel/working_directory\n"
assert container.call(["pwd"], capture_output=True, cwd="/opt") == "/opt\n"
@pytest.mark.docker
def test_container_removed():
with DockerContainer(DEFAULT_IMAGE) as container:
docker_containers_listing = subprocess.run(
"docker container ls",
shell=True,
check=True,
stdout=subprocess.PIPE,
universal_newlines=True,
).stdout
assert container.name in docker_containers_listing
old_container_name = container.name
docker_containers_listing = subprocess.run(
"docker container ls",
shell=True,
check=True,
stdout=subprocess.PIPE,
universal_newlines=True,
).stdout
assert old_container_name not in docker_containers_listing
@pytest.mark.docker
def test_large_environment():
# max environment variable size is 128kB
long_env_var_length = 127 * 1024
large_environment = {
"a": "0" * long_env_var_length,
"b": "0" * long_env_var_length,
"c": "0" * long_env_var_length,
"d": "0" * long_env_var_length,
}
with DockerContainer(DEFAULT_IMAGE) as container:
# check the length of d
assert (
container.call(["sh", "-c", "echo ${#d}"], env=large_environment, capture_output=True)
== f"{long_env_var_length}\n"
)
@pytest.mark.docker
def test_binary_output():
with DockerContainer(DEFAULT_IMAGE) as container:
# note: the below embedded snippets are in python2
# check that we can pass though arbitrary binary data without erroring
container.call(
[
"/usr/bin/python2",
"-c",
textwrap.dedent(
"""
import sys
sys.stdout.write(''.join(chr(n) for n in range(0, 256)))
"""
),
]
)
# check that we can capture arbitrary binary data
output = container.call(
[
"/usr/bin/python2",
"-c",
textwrap.dedent(
"""
import sys
sys.stdout.write(''.join(chr(n % 256) for n in range(0, 512)))
"""
),
],
capture_output=True,
)
data = bytes(output, encoding="utf8", errors="surrogateescape")
for i in range(512):
assert data[i] == i % 256
# check that environment variables can carry binary data, except null characters
# (https://www.gnu.org/software/libc/manual/html_node/Environment-Variables.html)
binary_data = bytes(n for n in range(1, 256))
binary_data_string = str(binary_data, encoding="utf8", errors="surrogateescape")
output = container.call(
["python2", "-c", 'import os, sys; sys.stdout.write(os.environ["TEST_VAR"])'],
env={"TEST_VAR": binary_data_string},
capture_output=True,
)
assert output == binary_data_string
@pytest.mark.docker
def test_file_operations(tmp_path: Path):
with DockerContainer(DEFAULT_IMAGE) as container:
# test copying a file in
test_binary_data = bytes(random.randrange(256) for _ in range(1000))
original_test_file = tmp_path / "test.dat"
original_test_file.write_bytes(test_binary_data)
dst_file = PurePath("/tmp/test.dat")
container.copy_into(original_test_file, dst_file)
output = container.call(["cat", dst_file], capture_output=True)
assert test_binary_data == bytes(output, encoding="utf8", errors="surrogateescape")
@pytest.mark.docker
def test_dir_operations(tmp_path: Path):
with DockerContainer(DEFAULT_IMAGE) as container:
test_binary_data = bytes(random.randrange(256) for _ in range(1000))
original_test_file = tmp_path / "test.dat"
original_test_file.write_bytes(test_binary_data)
# test copying a dir in
test_dir = tmp_path / "test_dir"
test_dir.mkdir()
test_file = test_dir / "test.dat"
shutil.copyfile(original_test_file, test_file)
dst_dir = PurePath("/tmp/test_dir")
dst_file = dst_dir / "test.dat"
container.copy_into(test_dir, dst_dir)
output = container.call(["cat", dst_file], capture_output=True)
assert test_binary_data == bytes(output, encoding="utf8", errors="surrogateescape")
# test glob
assert container.glob(dst_dir, "*.dat") == [dst_file]
# test copy dir out
new_test_dir = tmp_path / "test_dir_new"
container.copy_out(dst_dir, new_test_dir)
assert test_binary_data == (new_test_dir / "test.dat").read_bytes()
@pytest.mark.docker
def test_environment_executor():
with DockerContainer(DEFAULT_IMAGE) as container:
assignment = EnvironmentAssignment("TEST=$(echo 42)")
assert assignment.evaluated_value({}, container.environment_executor) == "42"
| 33.385 | 98 | 0.634417 | [
"BSD-2-Clause"
] | YannickJadoul/cibuildwheel | unit_test/docker_container_test.py | 6,677 | Python |
import numpy as np
from envs.babyai.oracle.teacher import Teacher
class LandmarkCorrection(Teacher):
def empty_feedback(self):
"""
Return a tensor corresponding to no feedback.
"""
return np.array([-1, -1])
def random_feedback(self):
"""
Return a tensor corresponding to no feedback.
"""
raise NotImplementedError('random feedback not implemented')
def compute_feedback(self):
"""
Return the expert action from the previous timestep.
"""
# TODO: Unhardocde this
# Hardcoded 1 time-step away
# Iterate through the objects and order them by their distance from the current object
# Pick the first one that is closer to the goal than the current object. If none, then return the goal
dist_pos = np.array(self.env.dist_pos)
# Distance agent to objects
agentobj_distances = np.sum(np.abs(dist_pos - self.env.agent_pos), axis=1)
# Distance agent to goal
curr_dist = np.sum(np.abs(self.env.obj_pos - self.env.agent_pos))
# Distance object to goal
goalobj_distances = np.sum(np.abs(dist_pos - self.env.obj_pos), axis=1)
idx_closer = np.where(goalobj_distances < curr_dist)
if len(idx_closer[0]) == 0:
return np.array([self.env.obj_color, self.env.obj_type])
else:
idx_agentobj = range(len(agentobj_distances))
idx_agentobj = [x for _,x in sorted(zip(agentobj_distances, idx_agentobj))]
for idx in idx_agentobj:
if idx in idx_closer[0]:
break
return np.array([self.env.dist_colors[idx], self.env.dist_types[idx]])
def feedback_condition(self):
"""
Returns true when we should give feedback.
Currently returns true when the agent's past action did not match the oracle's action.
"""
# For now, we're being lazy and correcting the agent any time it strays from the agent's optimal set of actions.
# This is kind of sketchy since multiple paths can be optimal.
return len(self.agent_actions) > 0 and (not self.agent_actions[-1] == self.oracle_actions[-1])
| 39.660714 | 120 | 0.638001 | [
"MIT"
] | AliengirlLiv/babyai | envs/babyai/oracle/landmark_correction.py | 2,221 | Python |
from inits import *
import tensorflow as tf
flags = tf.app.flags
FLAGS = flags.FLAGS
# global unique layer ID dictionary for layer name assignment
_LAYER_UIDS = {}
def get_layer_uid(layer_name=''):
"""Helper function, assigns unique layer IDs."""
if layer_name not in _LAYER_UIDS:
_LAYER_UIDS[layer_name] = 1
return 1
else:
_LAYER_UIDS[layer_name] += 1
return _LAYER_UIDS[layer_name]
def sparse_dropout(x, keep_prob, noise_shape):
"""Dropout for sparse tensors."""
random_tensor = keep_prob
random_tensor += tf.random_uniform(noise_shape)
dropout_mask = tf.cast(tf.floor(random_tensor), dtype=tf.bool)
pre_out = tf.sparse_retain(x, dropout_mask)
return pre_out * (1./keep_prob)
def dot(x, y, sparse=False):
"""Wrapper for tf.matmul (sparse vs dense)."""
if sparse:
res = tf.sparse_tensor_dense_matmul(x, y)
else:
res = tf.matmul(x, y)
return res
class Layer(object):
"""Base layer class. Defines basic API for all layer objects.
Implementation inspired by keras (http://keras.io).
# Properties
name: String, defines the variable scope of the layer.
logging: Boolean, switches Tensorflow histogram logging on/off
# Methods
_call(inputs): Defines computation graph of layer
(i.e. takes input, returns output)
__call__(inputs): Wrapper for _call()
_log_vars(): Log all variables
"""
def __init__(self, **kwargs):
allowed_kwargs = {'name', 'logging'}
for kwarg in kwargs.keys():
assert kwarg in allowed_kwargs, 'Invalid keyword argument: ' + kwarg
name = kwargs.get('name')
if not name:
layer = self.__class__.__name__.lower()
name = layer + '_' + str(get_layer_uid(layer))
self.name = name
self.vars = {}
logging = kwargs.get('logging', False)
self.logging = logging
self.sparse_inputs = False
def _call(self, inputs):
return inputs
def __call__(self, inputs):
with tf.name_scope(self.name):
if self.logging and not self.sparse_inputs:
tf.summary.histogram(self.name + '/inputs', inputs)
outputs = self._call(inputs)
if self.logging:
tf.summary.histogram(self.name + '/outputs', outputs)
return outputs
def _log_vars(self):
for var in self.vars:
tf.summary.histogram(self.name + '/vars/' + var, self.vars[var])
class Dense(Layer):
"""Dense layer."""
def __init__(self, input_dim, output_dim, placeholders, dropout=0., sparse_inputs=False,
act=tf.nn.relu, bias=False, featureless=False, **kwargs):
super(Dense, self).__init__(**kwargs)
if dropout:
self.dropout = placeholders['dropout']
else:
self.dropout = 0.
self.act = act
self.sparse_inputs = sparse_inputs
self.featureless = featureless
self.bias = bias
# helper variable for sparse dropout
self.num_features_nonzero = placeholders['num_features_nonzero']
with tf.variable_scope(self.name + '_vars'):
self.vars['weights'] = glorot([input_dim, output_dim],
name='weights')
if self.bias:
self.vars['bias'] = zeros([output_dim], name='bias')
if self.logging:
self._log_vars()
def _call(self, inputs):
x = inputs
# dropout
if self.sparse_inputs:
x = sparse_dropout(x, 1-self.dropout, self.num_features_nonzero)
else:
x = tf.nn.dropout(x, 1-self.dropout)
# transform
output = dot(x, self.vars['weights'], sparse=self.sparse_inputs)
# bias
if self.bias:
output += self.vars['bias']
return self.act(output)
class GraphConvolution(Layer):
"""Graph convolution layer."""
def __init__(self, input_dim, output_dim, placeholders, dropout=0.,
sparse_inputs=False, act=tf.nn.relu, bias=False,
featureless=False, **kwargs):
super(GraphConvolution, self).__init__(**kwargs)
if dropout:
self.dropout = placeholders['dropout']
else:
self.dropout = 0.
self.act = act
self.support = placeholders['support']
self.sparse_inputs = sparse_inputs
self.featureless = featureless
self.bias = bias
# helper variable for sparse dropout
self.num_features_nonzero = placeholders['num_features_nonzero']
with tf.variable_scope(self.name + '_vars'):
for i in range(len(self.support)):
self.vars['weights_' + str(i)] = glorot([input_dim, output_dim],
name='weights_' + str(i))
if self.bias:
self.vars['bias'] = zeros([output_dim], name='bias')
if self.logging:
self._log_vars()
def _call(self, inputs):
x = inputs
# dropout
if self.sparse_inputs:
x = sparse_dropout(x, 1-self.dropout, self.num_features_nonzero)
else:
x = tf.nn.dropout(x, 1-self.dropout)
# convolve
supports = list()
for i in range(len(self.support)):
if not self.featureless:
pre_sup = dot(x, self.vars['weights_' + str(i)],
sparse=self.sparse_inputs)
else:
pre_sup = self.vars['weights_' + str(i)]
support = dot(self.support[i], pre_sup, sparse=True)
supports.append(support)
output = tf.add_n(supports)
# bias
if self.bias:
output += self.vars['bias']
return self.act(output)
class TAGraphConvolution(Layer):
"""Graph convolution layer."""
def __init__(self, input_dim, output_dim, placeholders, dropout=0.,
sparse_inputs=False, act=tf.nn.relu, bias=False,
featureless=False, **kwargs):
super(TAGraphConvolution, self).__init__(**kwargs)
if dropout:
self.dropout = placeholders['dropout']
else:
self.dropout = 0.
self.act = act
self.support = placeholders['support']
self.sparse_inputs = sparse_inputs
self.featureless = featureless
self.bias = bias
# helper variable for sparse dropout
self.num_features_nonzero = placeholders['num_features_nonzero']
with tf.variable_scope(self.name + '_vars'):
for k in range(2):
self.vars['weights_' + str(k)] = tf.get_variable(shape=[input_dim, output_dim], name=('weights_' + str(k)), initializer=tf.contrib.layers.xavier_initializer())
if self.bias:
# self.vars['bias'] = ones([1],name='bias')
# self.vars['bias'] = self.vars['bias'] * np.ones([2708,output_dim],dtype=np.float32)
self.vars['bias'] = zeros([output_dim], name='bias') # zeros([2708,output_dim], name='bias')
self.conv = np.zeros(output_dim,dtype=np.float32)
if self.logging:
self._log_vars()
def _call(self, inputs):
x = inputs
# dropout
if self.sparse_inputs:
x = sparse_dropout(x, 1-self.dropout, self.num_features_nonzero)
else:
x = tf.nn.dropout(x, 1-self.dropout)
# convolve
supports = list()
for k in range(2):
w_k = self.support[:,:,k]
# s = tf.matmul(w_k,x) #
G_k = self.vars['weights_' + str(k)]
res = dot(x,G_k,sparse=self.sparse_inputs) # res = tf.matmul(s,G_k)
res = dot(w_k,res)
supports.append(res)
output = tf.add_n(supports)
# self.conv = tf.add(self.conv,res)
# bias
if self.bias:
output += self.vars['bias'] # self.conv += self.vars['bias']
return self.act(output) # self.conv
| 31.297297 | 175 | 0.576733 | [
"MIT"
] | krohak/TAGCN | layers.py | 8,106 | Python |
import numpy
import mysql.connector
from sklearn.preprocessing import LabelEncoder
from sklearn.preprocessing import OneHotEncoder
from sklearn import tree
##--------------------------Catch Data from data base--------------------------------
cnx = mysql.connector.connect(user = [type your user] , password = [type your password] ,
host = [type your host] , database = [type your database name] )
cur = cnx.cursor()
cur.execute("SELECT Neighborhood, Area, rooms, Antiquity FROM specifications")
inputData = cur.fetchall()
cur.execute("SELECT Price FROM specifications")
outputData = cur.fetchall()
if cur:
cur.close()
if cnx:
cnx.close()
## TestData
newApartments = [['ولنجک', 120, '2','2'],
['میرداماد', 110, '2','0'],
['هروی', 200, '4','2']]
for i in newApartments: ## Add newApartments to input of table
inputData.append(i)
Neighborhood = list()
Area = list()
rooms = list()
Antiquity = list()
for i in inputData :
Neighborhood.append(i[0])
Area.append(i[1])
rooms.append(i[2])
Antiquity.append(i[3])
# Encode Neighborhood
values = numpy.array(Neighborhood)
# integer encode
labelEncoder = LabelEncoder()
integer_encoded = labelEncoder.fit_transform(values)
# binary encode
NeighborhoodOHE = OneHotEncoder(sparse=False)
integer_encoded = integer_encoded.reshape(len(integer_encoded), 1)
NeighborhoodOHE = NeighborhoodOHE.fit_transform(integer_encoded)
test= Area+rooms
x = numpy.column_stack((NeighborhoodOHE, Area,rooms, Antiquity))
y = outputData
print(x[1])
print(len(x))
print(len(x[1]))
temp = numpy.split(x, [(-1)*len(newApartments)])
x = temp[0]
newApartments_enc = temp[1]
# Start training and testing
clf = tree.DecisionTreeClassifier()
clf = clf.fit(x, y)
# Encode New Apartment
answer = clf.predict(newApartments_enc)
for i in range(len(answer)):
print("The price of Apartment in %s with %i metters Area, is approaximately %s Tomans." % (newApartments[i][0],newApartments[i][1], answer[i]))
| 29.450704 | 148 | 0.66045 | [
"MIT"
] | AlirezaMojtabavi/Predict-Price-of-an-Apartment-in-Tehran | PredictPrice.py | 2,108 | Python |
# Given a statement, find the parse tree that led to it.
from tree_parser import *
import pickle #pickle.dump(database.proof_datas, open(output_file,'wb'))
from copy import deepcopy
from tree import *
#output_file = 'tree_parse_data'
# class InitialStringSearcher:
# def __init__(self):
# self.nodes = {}
# self.return_value = None
#
# def add(self,string,value):
# return self.add()
#
# def add_with_index(self,string, value,index):
# if index==len(string):
# self.value=value
# char = string[index]
# if char not in self.nodes:
# self.nodes[char] = InitialStringSearcher()
# self.nodes[char].add_with_index(string,value,index+1)
class InitialStringSearcher:
def __init__(self):
self.known_values = {}
#self.known_lengths = {}
def add(self,string,value):
self.known_values[tuple(string)]=value
#print 'found new substring', value[0]
#self.known_lengths[string]=len(string)
def find_longest(self,string):
#label=any(label for label in self.known_values if is_initial_string(label,string))
for l in self.known_values:
if is_initial_string(l,string): return self.known_values[l]
return False
def clear(self):
self.known_values = {}
class StatementParser:
def __init__(self,database):
# this is a really ugly hack.
self.wff_variables = set(['ph','ps','ch','th','et','ze','si','rh','mu','la','ka','ta'])
self.set_variables = set(['a', 'a"', "a'", 'a0', 'a0_', 'a1', 'a1_', 'b', 'b"', "b'", 'b0', 'b0_', 'b1', 'b1_', 'c', 'c"', "c'", 'c0_', 'c1_', 'd', 'd"', "d'", 'd0', 'd1', 'e', 'e"', "e'", 'e0', 'e1', 'f', 'f"', "f'", 'f0_', 'f1', 'g', 'g"', "g'", 'g0', 'g1', 'h', 'h"', "h'", 'h0', 'h1', 'i', 'i"', "i'", 'i0', 'i1', 'j', 'j"', "j'", 'j0', 'j1', 'k', 'k"', "k'", 'k0', 'k1', 'l', 'l"', "l'", 'l0', 'l1', 'm', 'm"', "m'", 'm0', 'm1', 'n', 'n"', "n'", 'n0_', 'n1', 'o', 'o"', 'o"_', "o'", "o'_", 'o0', 'o0_', 'o1', 'o1_', 'p', 'p"', "p'", 'p0', 'p1', 'q', 'q"', "q'", 'q0', 'q1', 'r', 'r"', "r'", 'r0', 'r1', 's', 's"', 's"_', "s'", "s'_", 's0', 's1', 't', 't"', "t'", 't0', 't1', 'u', 'u"', "u'", 'u0', 'u1', 'v', 'v"', 'v"_', "v'", "v'_", 'v0', 'v1', 'v2', 'w', 'w"', "w'", 'w0', 'w1', 'x', 'x"', "x'", 'x0', 'x1', 'y', 'y"', "y'", 'y0', 'y1', 'z', 'z"', "z'", 'z0', 'z1'])
self.search = InitialStringSearcher()
self.failed_parses = set()
self.propositions = database.non_entails_axioms
# self.propositions = {}
# for label in database.propositions:
# statement = database.propositions[label].statement
# if statement[0] != '|-': # used in the parse tree
# self.propositions[label] = database.propositions[label]#statement
# add all the variables we ever use to self.variables
# self.variables = set()
# for label in database.propositions:
# self.variables=self.variables.union(database.propositions[label].block.v)
# predefine self.variables because why not
self.variables = set(['.0.', "d'", 'd"', 'D"', 'c0_', 'd0', 'd1', 'q1', 'q0', 'x1', 'G2', 'G1', 'G0', "G'", '-t', '-w', 'q"', 'G"', 'th', 'ta', 'g1', 'g0', 'H', '+t', 'p0', '.0b', "g'", 'P', 'F3', 'g"', 'X', "W'", 'W"', 'h', "ch'", 'J0', 'J1', 't0', 't1', 'p', 'W2', 'W1', '.(x)', "t'", "J'", 't"', 'J"', "w'", "M'", 'j', 'w"', 'M"', 'ze', 'j0', 'j1', '.id', 'M1', 'M0', 'w1', 'M2', "j'", 'j"', 'mu', 'et0', 'et1', 'H1_', "Z'", 'Z"', 'et"', "et'", 'Z0', 'Z1', 'C', 'P1', 'K', "z'", 'z"', 'P"', 'S', "v'_", "P'", 'z0', 'z1', 'c', 'L1_', '0w', "s'_", 'k', './\\', 'r"', 's', 'ch', 'O1_', 'S2', 'S1', 'S0', 'S"', 'v"_', 'b0_', 'ps', "m'", 'E1', 'm"', 'C"', 'o0_', "C'", 'G1_', 'm1', 'm0', 'C2', 'ph', 'C0', 'S1_', "q'", 'F', 'c"', "c'", 'N', 'V', 'f0_', 'F0', 'F1', 'F2', 'p1', '0t', 'f', 'D1_', 'n', 'p"', 'ch1', 'F"', "p'", 'v', 'f1', 'ph"', "ph'", 'o"_', 'f"', "f'", 'ph1', 'ph0', "ze'", 'V"', '.dom', "O'", "V'", 'I1', 'I0', 's1', 's0', 's"_', 'th1', 'th0', 'V0', 'V1', 'V2', 'V3', "th'", 's"', 'I"', "s'", 'th"', "I'", 'A', "L'", 'v"', 'L"', 'ta1', "v'", 'I', 'i0', 'Q', 'v1', 'v2', "ta'", 'L2', 'L3', 'L0', 'L1', 'Y', 'i"', '/t', "i'", 'a', 'si1', 'si0', 'Y1', 'i', 'Y"', 'D2', "Y'", 'q', 'si"', 'si', 'ze1', "si'", 'y', 'Y0', 'I2', 'ps"', 'y"', "ps'", "y'", 'y1', 'y0', 'ps0', 'ps1', 'O2', 'O1', 'O0', "o'_", 'la', '.Morphism', 'n0_', 'k0', 'O"', 'ze"', 'R0', 'D', 'L', 'ze0', "R'", 'c1_', 'T', 'R"', '.X.', '.1.', 'a0_', "l'", "B'", 'l"', 'd', 'B"', 'l', 'B0', 't', 'l0', 'l1', "b'", 'b"', '.(+)', 'U1', 'U0', 'h"', 'b0', 'b1', 'ta0', "U'", 'U"', "o'", "S'", "E'", 'o"', 'E"', 'i1', '+w', 'F1_', '.xb', 'Ro1', 'Ro2', 'rh', 'E0', 'o1', 'o0', "F'", 'B2', 'G', 'R1_', "e'", 'W0', 'I1_', 'O', 'e"', '._|_', 'W', 'x', 'e1', 'e0', '1t', '<_b', 'v0', '<_a', 'r0', 'r1', 'g', 'H2', 'H0', 'H1', 'o', "r'", 'w', 'H"', '.x.', "H'", 'K"', "K'", 'ta"', 'h0', 'h1', "X'", 'K1', 'K0', "T'", 'ch"', "h'", 'M1_', '.*', '.+', '.,', '.-', './', 'X"', 'u1', 'u0', '.<', "u'", 'X1', 'u"', 'ch0_', 'B', 'x"', 'N"', 'J', "x'", "N'", 'R', '.^', 'N0', 'N1', 'x0', 'Z', '.cod', 'C1', 'b', 'o1_', 'X0', '.graph', 'r', '.~', 'B1_', 'z', '.t', '.<_', '.w', 'Q1', 'Q0', 'V1_', 'rh1', 'rh0', 'w0', '~t', '~w', 'Q"', "Q'", 'et', 'rh"', '.||', "rh'", 'k"', 'A"', "k'", "A'", 'P0', 'a1_', '.\\/', 'B1', 'A1', 'A0', 'k1', 'A2', 'C1_', '.+b', 'a"', 'E', "a'", 'A1_', 'M', 'T0', 'T1', 'a1', 'a0', 'U', 'b1_', 'T"', 'ka', 'e', "D'", 'n"', 'm', "n'", 'u', 'D0', 'n1', '.Object', '.+^', 'D1'])
def parse_new_statement(self,statement,context):
string, tree = self.parse_statement(statement,context)
self.search.clear()
self.failed_parses = set()
return string,tree
# This checks whether we're just parsing a variable and otherwise check whether any of the propositions
# describes it.
def parse_statement(self,statement,context):
# this should return a parse tree for a list of strings that form statement
# check whether we are doomed to failure
if (statement[0],len(statement)) in self.failed_parses:
return False,False
# attempt to search for the phrase
tuples = self.search.find_longest(statement)
if tuples: return tuples
# check if it's just a variable
# if statement[1] in self.variables and not (statement[0]=='wff' and statement[1] not in self.wff_variables) and not (statement[0]=='set' and statement[1] not in self.set_variables):
# #yep. It's just a variable. Skip the type checking. Variables can only have one type ever, right?
# length = 1; # exclude the wff term
# string = statement[:2]
# tree = Tree(value='VAR'+statement[1])
# return string,tree
if statement[1] in self.variables:
for f in context.f.values():
if f.variable == statement[1] and f.vclass == statement[0]:
return statement[:2], f.tree
#found_valid_parsing = False
for prop_label in self.propositions:
prop = self.propositions[prop_label]
string, tree = self.proposition_describes_statement(prop,statement,context)
if string == False:
continue
#print 'found',string
#if statement[0]=='wff'
self.search.add(string,(string,tree)) # add to the search tree
return string, tree
#print 'could not find expression for ',statement
self.failed_parses.add((statement[0],len(statement)))
return False, False
# this is a brutally inefficient way to go about this
# when it completes it returns a parse tree for the statement and the length of the tree
def proposition_describes_statement(self,proposition,s,context):
prop_s = proposition.statement
# the types of all the free variables
variable_types = {hyp.variable:hyp.vclass for hyp in proposition.f.values()}
# string definitions of all the free variables
variable_definitions = {hyp.variable:None for hyp in proposition.f.values()}
#tree defintions of all the free variables
variable_trees = {hyp.variable:None for hyp in proposition.f.values()}
index_into_s = 0
index_into_prop_s=0
while index_into_prop_s < len(prop_s):
if index_into_s>=len(s):
#print 'ran out of s'
return False,False
prop_value = prop_s[index_into_prop_s]
if prop_value in variable_types:
# it's a variable
# check if already defined
if variable_definitions[prop_value]==None:
#then we need to figure out the parsing of the substatement
#print 'testing ',[variable_types[prop_value]]+s[index_into_s:],' because of ',proposition.label,prop_s
string, tree = self.parse_statement([variable_types[prop_value]]+s[index_into_s:],context)
if string == False:
return False,False
length = len(string)-1 # skip the wff/set/class bit
index_into_s+=length
index_into_prop_s+=1
variable_definitions[prop_value] = string[1:]
variable_trees[prop_value] = tree
continue
else:
#we've already seen this expression before
if is_initial_string(variable_definitions[prop_value],statement):
# Yes, yes, we get the point
index_into_s+=len(variable_definitions[prop_value])
index_into_prop_s+=1
continue
else:
return False,False # eh. Whatever.
else:
#it's not a variable
if prop_value == s[index_into_s]:
index_into_s+=1
index_into_prop_s+=1
continue
else:
#it's not a variable and it doesn't match
return False,False
#we have the entire parsing and it appears to work
leaves = [variable_trees[hyp.variable] for hyp in proposition.hyps if hyp.type == 'f']
tree = Tree(value=proposition.label,leaves = leaves)
#now construct the string.
out_string = s[:index_into_s]
return out_string, tree
def is_initial_string(initial_string,string):
#return (len(string)>=len(initial_string)) and string[:len(initial_string)]==initial_string
if len(initial_string)>len(string): return False
for i in range(len(initial_string)):
if initial_string[i]!=string[i]: return False
return True
| 57.671958 | 2,466 | 0.503761 | [
"MIT"
] | dwhalen/holophrasm | statement_to_tree.py | 10,900 | Python |
"""
const $ = new Env("京东饭粒");
京东饭粒任务
活动入口:https://u.jd.com/ytWx4w0
每天60豆小毛,爱要不要
cron:
46 9 * * * jd_fanli.py
"""
import os
import time
import re
import requests
import random
proxies = {"http": None, "https": None}
def randomstr(num):
randomstr = ""
for i in range(num):
randomstr = randomstr + random.choice("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789")
return randomstr
def randomstr1():
randomstr = ""
for i in range(16):
randomstr = randomstr + random.choice("0123456789")
randomstr += "-"
for i in range(16):
randomstr = randomstr + random.choice("0123456789")
return randomstr
def getheader(ck):
return {
"Host": "ifanli.m.jd.com",
"Connection": "keep-alive",
"Accept": "application/json, text/plain, */*",
"Cache-Control": "no-cache",
"User-Agent": "jdapp;android;10.2.2;11;%s;model/Mi 10;osVer/30;appBuild/91077;partner/xiaomi001;eufv/1;jdSupportDarkMode/0;Mozilla/5.0 (Linux; Android 11; Mi 10 Build/RKQ1.200826.002; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/77.0.3865.120 MQQBrowser/6.2 TBS/045715 Mobile Safari/537.36" % randomstr1(),
"Sec-Fetch-Mode": "cors",
"X-Requested-With": "com.jingdong.app.mall",
"Sec-Fetch-Site": "same-origin",
"Referer": "https://ifanli.m.jd.com/rebate/earnBean.html?paltform=null",
"Accept-Encoding": "gzip, deflate, br",
"Accept-Language": "zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7",
"Cookie": ck,
"Content-Type": "application/json;charset=UTF-8"
}
def getTaskList(ck):
url = "https://ifanli.m.jd.com/rebateapi/task/getTaskList"
headers = getheader(ck)
r = requests.get(url, headers=headers, proxies=proxies)
# print(r.text)
return r.json()["content"]
def getTaskFinishCount(ck):
url = "https://ifanli.m.jd.com/rebateapi/task/getTaskFinishCount"
headers = getheader(ck)
r = requests.get(url, headers=headers, proxies=proxies)
print('已完成任务次数:', r.json()["content"]["finishCount"], '总任务次数:', r.json()["content"]["maxTaskCount"])
return r.json()["content"]
def saveTaskRecord(ck, taskId):
url = "https://ifanli.m.jd.com/rebateapi/task/saveTaskRecord"
headers = getheader(ck)
data = '{"taskId":%s,"taskType":4}' % taskId
r = requests.post(url, headers=headers, data=data, proxies=proxies)
# print(r.text)
return r.json()["content"]["uid"], r.json()["content"]["tt"]
def saveTaskRecord1(ck, taskId, uid, tt):
# tt=int(time.time()*1000)
url = "https://ifanli.m.jd.com/rebateapi/task/saveTaskRecord"
headers = getheader(ck)
data = '{"taskId":%s,"taskType":4,"uid":"%s","tt":%s}' % (taskId, uid, tt)
# print(data)
r = requests.post(url, headers=headers, data=data, proxies=proxies)
print(r.json()["content"]["msg"])
if __name__ == '__main__':
cks = os.environ["JD_COOKIE"].split("&")
for ck in cks:
ptpin = re.findall(r"pt_pin=(.*?);", ck)[0]
print("--------开始京东账号", ptpin, "--------")
try:
count = getTaskFinishCount(ck)
if count["finishCount"] < count["maxTaskCount"]:
for times in range(count["maxTaskCount"] - count["finishCount"]):
tasks = getTaskList(ck)
for i in tasks:
if i["taskType"] == 4:
uid, tt = saveTaskRecord(ck, i["taskId"])
time.sleep(10)
saveTaskRecord1(ck, i["taskId"], uid, tt)
except:
print("发生异常错误")
| 33.453704 | 331 | 0.599779 | [
"Apache-2.0"
] | w123113/loon | jd_fanli.py | 3,715 | Python |
"""
test all other .agg behavior
"""
from collections import OrderedDict
import datetime as dt
from functools import partial
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
MultiIndex,
PeriodIndex,
Series,
date_range,
period_range,
)
from pandas.core.groupby.groupby import SpecificationError
import pandas.util.testing as tm
from pandas.io.formats.printing import pprint_thing
def test_agg_api():
# GH 6337
# http://stackoverflow.com/questions/21706030/pandas-groupby-agg-function-column-dtype-error
# different api for agg when passed custom function with mixed frame
df = DataFrame(
{
"data1": np.random.randn(5),
"data2": np.random.randn(5),
"key1": ["a", "a", "b", "b", "a"],
"key2": ["one", "two", "one", "two", "one"],
}
)
grouped = df.groupby("key1")
def peak_to_peak(arr):
return arr.max() - arr.min()
expected = grouped.agg([peak_to_peak])
expected.columns = ["data1", "data2"]
result = grouped.agg(peak_to_peak)
tm.assert_frame_equal(result, expected)
def test_agg_datetimes_mixed():
data = [[1, "2012-01-01", 1.0], [2, "2012-01-02", 2.0], [3, None, 3.0]]
df1 = DataFrame(
{
"key": [x[0] for x in data],
"date": [x[1] for x in data],
"value": [x[2] for x in data],
}
)
data = [
[
row[0],
(dt.datetime.strptime(row[1], "%Y-%m-%d").date() if row[1] else None),
row[2],
]
for row in data
]
df2 = DataFrame(
{
"key": [x[0] for x in data],
"date": [x[1] for x in data],
"value": [x[2] for x in data],
}
)
df1["weights"] = df1["value"] / df1["value"].sum()
gb1 = df1.groupby("date").aggregate(np.sum)
df2["weights"] = df1["value"] / df1["value"].sum()
gb2 = df2.groupby("date").aggregate(np.sum)
assert len(gb1) == len(gb2)
def test_agg_period_index():
prng = period_range("2012-1-1", freq="M", periods=3)
df = DataFrame(np.random.randn(3, 2), index=prng)
rs = df.groupby(level=0).sum()
assert isinstance(rs.index, PeriodIndex)
# GH 3579
index = period_range(start="1999-01", periods=5, freq="M")
s1 = Series(np.random.rand(len(index)), index=index)
s2 = Series(np.random.rand(len(index)), index=index)
series = [("s1", s1), ("s2", s2)]
df = DataFrame.from_dict(OrderedDict(series))
grouped = df.groupby(df.index.month)
list(grouped)
def test_agg_dict_parameter_cast_result_dtypes():
# GH 12821
df = DataFrame(
{
"class": ["A", "A", "B", "B", "C", "C", "D", "D"],
"time": date_range("1/1/2011", periods=8, freq="H"),
}
)
df.loc[[0, 1, 2, 5], "time"] = None
# test for `first` function
exp = df.loc[[0, 3, 4, 6]].set_index("class")
grouped = df.groupby("class")
tm.assert_frame_equal(grouped.first(), exp)
tm.assert_frame_equal(grouped.agg("first"), exp)
tm.assert_frame_equal(grouped.agg({"time": "first"}), exp)
tm.assert_series_equal(grouped.time.first(), exp["time"])
tm.assert_series_equal(grouped.time.agg("first"), exp["time"])
# test for `last` function
exp = df.loc[[0, 3, 4, 7]].set_index("class")
grouped = df.groupby("class")
tm.assert_frame_equal(grouped.last(), exp)
tm.assert_frame_equal(grouped.agg("last"), exp)
tm.assert_frame_equal(grouped.agg({"time": "last"}), exp)
tm.assert_series_equal(grouped.time.last(), exp["time"])
tm.assert_series_equal(grouped.time.agg("last"), exp["time"])
# count
exp = pd.Series([2, 2, 2, 2], index=Index(list("ABCD"), name="class"), name="time")
tm.assert_series_equal(grouped.time.agg(len), exp)
tm.assert_series_equal(grouped.time.size(), exp)
exp = pd.Series([0, 1, 1, 2], index=Index(list("ABCD"), name="class"), name="time")
tm.assert_series_equal(grouped.time.count(), exp)
def test_agg_cast_results_dtypes():
# similar to GH12821
# xref #11444
u = [dt.datetime(2015, x + 1, 1) for x in range(12)]
v = list("aaabbbbbbccd")
df = pd.DataFrame({"X": v, "Y": u})
result = df.groupby("X")["Y"].agg(len)
expected = df.groupby("X")["Y"].count()
tm.assert_series_equal(result, expected)
def test_aggregate_float64_no_int64():
# see gh-11199
df = DataFrame({"a": [1, 2, 3, 4, 5], "b": [1, 2, 2, 4, 5], "c": [1, 2, 3, 4, 5]})
expected = DataFrame({"a": [1, 2.5, 4, 5]}, index=[1, 2, 4, 5])
expected.index.name = "b"
result = df.groupby("b")[["a"]].mean()
tm.assert_frame_equal(result, expected)
expected = DataFrame({"a": [1, 2.5, 4, 5], "c": [1, 2.5, 4, 5]}, index=[1, 2, 4, 5])
expected.index.name = "b"
result = df.groupby("b")[["a", "c"]].mean()
tm.assert_frame_equal(result, expected)
def test_aggregate_api_consistency():
# GH 9052
# make sure that the aggregates via dict
# are consistent
df = DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "two", "two", "two", "one", "two"],
"C": np.random.randn(8) + 1.0,
"D": np.arange(8),
}
)
grouped = df.groupby(["A", "B"])
c_mean = grouped["C"].mean()
c_sum = grouped["C"].sum()
d_mean = grouped["D"].mean()
d_sum = grouped["D"].sum()
result = grouped["D"].agg(["sum", "mean"])
expected = pd.concat([d_sum, d_mean], axis=1)
expected.columns = ["sum", "mean"]
tm.assert_frame_equal(result, expected, check_like=True)
result = grouped.agg([np.sum, np.mean])
expected = pd.concat([c_sum, c_mean, d_sum, d_mean], axis=1)
expected.columns = MultiIndex.from_product([["C", "D"], ["sum", "mean"]])
tm.assert_frame_equal(result, expected, check_like=True)
result = grouped[["D", "C"]].agg([np.sum, np.mean])
expected = pd.concat([d_sum, d_mean, c_sum, c_mean], axis=1)
expected.columns = MultiIndex.from_product([["D", "C"], ["sum", "mean"]])
tm.assert_frame_equal(result, expected, check_like=True)
result = grouped.agg({"C": "mean", "D": "sum"})
expected = pd.concat([d_sum, c_mean], axis=1)
tm.assert_frame_equal(result, expected, check_like=True)
result = grouped.agg({"C": ["mean", "sum"], "D": ["mean", "sum"]})
expected = pd.concat([c_mean, c_sum, d_mean, d_sum], axis=1)
expected.columns = MultiIndex.from_product([["C", "D"], ["mean", "sum"]])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = grouped[["D", "C"]].agg({"r": np.sum, "r2": np.mean})
expected = pd.concat([d_sum, c_sum, d_mean, c_mean], axis=1)
expected.columns = MultiIndex.from_product([["r", "r2"], ["D", "C"]])
tm.assert_frame_equal(result, expected, check_like=True)
def test_agg_dict_renaming_deprecation():
# 15931
df = pd.DataFrame({"A": [1, 1, 1, 2, 2], "B": range(5), "C": range(5)})
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False) as w:
df.groupby("A").agg(
{"B": {"foo": ["sum", "max"]}, "C": {"bar": ["count", "min"]}}
)
assert "using a dict with renaming" in str(w[0].message)
assert "named aggregation" in str(w[0].message)
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
df.groupby("A")[["B", "C"]].agg({"ma": "max"})
with tm.assert_produces_warning(FutureWarning) as w:
df.groupby("A").B.agg({"foo": "count"})
assert "using a dict on a Series for aggregation" in str(w[0].message)
assert "named aggregation instead." in str(w[0].message)
def test_agg_compat():
# GH 12334
df = DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "two", "two", "two", "one", "two"],
"C": np.random.randn(8) + 1.0,
"D": np.arange(8),
}
)
g = df.groupby(["A", "B"])
expected = pd.concat([g["D"].sum(), g["D"].std()], axis=1)
expected.columns = MultiIndex.from_tuples([("C", "sum"), ("C", "std")])
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = g["D"].agg({"C": ["sum", "std"]})
tm.assert_frame_equal(result, expected, check_like=True)
expected = pd.concat([g["D"].sum(), g["D"].std()], axis=1)
expected.columns = ["C", "D"]
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = g["D"].agg({"C": "sum", "D": "std"})
tm.assert_frame_equal(result, expected, check_like=True)
def test_agg_nested_dicts():
# API change for disallowing these types of nested dicts
df = DataFrame(
{
"A": ["foo", "bar", "foo", "bar", "foo", "bar", "foo", "foo"],
"B": ["one", "one", "two", "two", "two", "two", "one", "two"],
"C": np.random.randn(8) + 1.0,
"D": np.arange(8),
}
)
g = df.groupby(["A", "B"])
msg = r"cannot perform renaming for r[1-2] with a nested dictionary"
with pytest.raises(SpecificationError, match=msg):
g.aggregate({"r1": {"C": ["mean", "sum"]}, "r2": {"D": ["mean", "sum"]}})
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = g.agg({"C": {"ra": ["mean", "std"]}, "D": {"rb": ["mean", "std"]}})
expected = pd.concat(
[g["C"].mean(), g["C"].std(), g["D"].mean(), g["D"].std()], axis=1
)
expected.columns = pd.MultiIndex.from_tuples(
[("ra", "mean"), ("ra", "std"), ("rb", "mean"), ("rb", "std")]
)
tm.assert_frame_equal(result, expected, check_like=True)
# same name as the original column
# GH9052
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
expected = g["D"].agg({"result1": np.sum, "result2": np.mean})
expected = expected.rename(columns={"result1": "D"})
with tm.assert_produces_warning(FutureWarning, check_stacklevel=False):
result = g["D"].agg({"D": np.sum, "result2": np.mean})
tm.assert_frame_equal(result, expected, check_like=True)
def test_agg_item_by_item_raise_typeerror():
df = DataFrame(np.random.randint(10, size=(20, 10)))
def raiseException(df):
pprint_thing("----------------------------------------")
pprint_thing(df.to_string())
raise TypeError("test")
with pytest.raises(TypeError, match="test"):
df.groupby(0).agg(raiseException)
def test_series_agg_multikey():
ts = tm.makeTimeSeries()
grouped = ts.groupby([lambda x: x.year, lambda x: x.month])
result = grouped.agg(np.sum)
expected = grouped.sum()
tm.assert_series_equal(result, expected)
def test_series_agg_multi_pure_python():
data = DataFrame(
{
"A": [
"foo",
"foo",
"foo",
"foo",
"bar",
"bar",
"bar",
"bar",
"foo",
"foo",
"foo",
],
"B": [
"one",
"one",
"one",
"two",
"one",
"one",
"one",
"two",
"two",
"two",
"one",
],
"C": [
"dull",
"dull",
"shiny",
"dull",
"dull",
"shiny",
"shiny",
"dull",
"shiny",
"shiny",
"shiny",
],
"D": np.random.randn(11),
"E": np.random.randn(11),
"F": np.random.randn(11),
}
)
def bad(x):
assert len(x.values.base) > 0
return "foo"
result = data.groupby(["A", "B"]).agg(bad)
expected = data.groupby(["A", "B"]).agg(lambda x: "foo")
tm.assert_frame_equal(result, expected)
def test_agg_consistency():
# agg with ([]) and () not consistent
# GH 6715
def P1(a):
try:
return np.percentile(a.dropna(), q=1)
except Exception:
return np.nan
df = DataFrame(
{
"col1": [1, 2, 3, 4],
"col2": [10, 25, 26, 31],
"date": [
dt.date(2013, 2, 10),
dt.date(2013, 2, 10),
dt.date(2013, 2, 11),
dt.date(2013, 2, 11),
],
}
)
g = df.groupby("date")
expected = g.agg([P1])
expected.columns = expected.columns.levels[0]
result = g.agg(P1)
tm.assert_frame_equal(result, expected)
def test_agg_callables():
# GH 7929
df = DataFrame({"foo": [1, 2], "bar": [3, 4]}).astype(np.int64)
class fn_class:
def __call__(self, x):
return sum(x)
equiv_callables = [
sum,
np.sum,
lambda x: sum(x),
lambda x: x.sum(),
partial(sum),
fn_class(),
]
expected = df.groupby("foo").agg(sum)
for ecall in equiv_callables:
result = df.groupby("foo").agg(ecall)
tm.assert_frame_equal(result, expected)
def test_agg_over_numpy_arrays():
# GH 3788
df = pd.DataFrame(
[
[1, np.array([10, 20, 30])],
[1, np.array([40, 50, 60])],
[2, np.array([20, 30, 40])],
],
columns=["category", "arraydata"],
)
result = df.groupby("category").agg(sum)
expected_data = [[np.array([50, 70, 90])], [np.array([20, 30, 40])]]
expected_index = pd.Index([1, 2], name="category")
expected_column = ["arraydata"]
expected = pd.DataFrame(
expected_data, index=expected_index, columns=expected_column
)
tm.assert_frame_equal(result, expected)
def test_agg_timezone_round_trip():
# GH 15426
ts = pd.Timestamp("2016-01-01 12:00:00", tz="US/Pacific")
df = pd.DataFrame(
{"a": 1, "b": [ts + dt.timedelta(minutes=nn) for nn in range(10)]}
)
result1 = df.groupby("a")["b"].agg(np.min).iloc[0]
result2 = df.groupby("a")["b"].agg(lambda x: np.min(x)).iloc[0]
result3 = df.groupby("a")["b"].min().iloc[0]
assert result1 == ts
assert result2 == ts
assert result3 == ts
dates = [
pd.Timestamp("2016-01-0%d 12:00:00" % i, tz="US/Pacific") for i in range(1, 5)
]
df = pd.DataFrame({"A": ["a", "b"] * 2, "B": dates})
grouped = df.groupby("A")
ts = df["B"].iloc[0]
assert ts == grouped.nth(0)["B"].iloc[0]
assert ts == grouped.head(1)["B"].iloc[0]
assert ts == grouped.first()["B"].iloc[0]
# GH#27110 applying iloc should return a DataFrame
assert ts == grouped.apply(lambda x: x.iloc[0]).iloc[0, 0]
ts = df["B"].iloc[2]
assert ts == grouped.last()["B"].iloc[0]
# GH#27110 applying iloc should return a DataFrame
assert ts == grouped.apply(lambda x: x.iloc[-1]).iloc[0, 0]
def test_sum_uint64_overflow():
# see gh-14758
# Convert to uint64 and don't overflow
df = pd.DataFrame([[1, 2], [3, 4], [5, 6]], dtype=object)
df = df + 9223372036854775807
index = pd.Index(
[9223372036854775808, 9223372036854775810, 9223372036854775812], dtype=np.uint64
)
expected = pd.DataFrame(
{1: [9223372036854775809, 9223372036854775811, 9223372036854775813]},
index=index,
)
expected.index.name = 0
result = df.groupby(0).sum()
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"structure, expected",
[
(tuple, pd.DataFrame({"C": {(1, 1): (1, 1, 1), (3, 4): (3, 4, 4)}})),
(list, pd.DataFrame({"C": {(1, 1): [1, 1, 1], (3, 4): [3, 4, 4]}})),
(
lambda x: tuple(x),
pd.DataFrame({"C": {(1, 1): (1, 1, 1), (3, 4): (3, 4, 4)}}),
),
(
lambda x: list(x),
pd.DataFrame({"C": {(1, 1): [1, 1, 1], (3, 4): [3, 4, 4]}}),
),
],
)
def test_agg_structs_dataframe(structure, expected):
df = pd.DataFrame(
{"A": [1, 1, 1, 3, 3, 3], "B": [1, 1, 1, 4, 4, 4], "C": [1, 1, 1, 3, 4, 4]}
)
result = df.groupby(["A", "B"]).aggregate(structure)
expected.index.names = ["A", "B"]
tm.assert_frame_equal(result, expected)
@pytest.mark.parametrize(
"structure, expected",
[
(tuple, pd.Series([(1, 1, 1), (3, 4, 4)], index=[1, 3], name="C")),
(list, pd.Series([[1, 1, 1], [3, 4, 4]], index=[1, 3], name="C")),
(lambda x: tuple(x), pd.Series([(1, 1, 1), (3, 4, 4)], index=[1, 3], name="C")),
(lambda x: list(x), pd.Series([[1, 1, 1], [3, 4, 4]], index=[1, 3], name="C")),
],
)
def test_agg_structs_series(structure, expected):
# Issue #18079
df = pd.DataFrame(
{"A": [1, 1, 1, 3, 3, 3], "B": [1, 1, 1, 4, 4, 4], "C": [1, 1, 1, 3, 4, 4]}
)
result = df.groupby("A")["C"].aggregate(structure)
expected.index.name = "A"
tm.assert_series_equal(result, expected)
def test_agg_category_nansum(observed):
categories = ["a", "b", "c"]
df = pd.DataFrame(
{"A": pd.Categorical(["a", "a", "b"], categories=categories), "B": [1, 2, 3]}
)
result = df.groupby("A", observed=observed).B.agg(np.nansum)
expected = pd.Series(
[3, 3, 0],
index=pd.CategoricalIndex(["a", "b", "c"], categories=categories, name="A"),
name="B",
)
if observed:
expected = expected[expected != 0]
tm.assert_series_equal(result, expected)
def test_agg_list_like_func():
# GH 18473
df = pd.DataFrame(
{"A": [str(x) for x in range(3)], "B": [str(x) for x in range(3)]}
)
grouped = df.groupby("A", as_index=False, sort=False)
result = grouped.agg({"B": lambda x: list(x)})
expected = pd.DataFrame(
{"A": [str(x) for x in range(3)], "B": [[str(x)] for x in range(3)]}
)
tm.assert_frame_equal(result, expected)
def test_agg_lambda_with_timezone():
# GH 23683
df = pd.DataFrame(
{
"tag": [1, 1],
"date": [
pd.Timestamp("2018-01-01", tz="UTC"),
pd.Timestamp("2018-01-02", tz="UTC"),
],
}
)
result = df.groupby("tag").agg({"date": lambda e: e.head(1)})
expected = pd.DataFrame(
[pd.Timestamp("2018-01-01", tz="UTC")],
index=pd.Index([1], name="tag"),
columns=["date"],
)
tm.assert_frame_equal(result, expected)
| 30.695222 | 96 | 0.537784 | [
"BSD-3-Clause"
] | ajspera/pandas | pandas/tests/groupby/aggregate/test_other.py | 18,632 | Python |
#!/usr/bin/python
# encoding: utf-8
'''
codegen_checker.codegen -- shortdesc
codegen_checker.codegen is a description
It defines classes_and_methods
@author: user_name
@copyright: 2013 organization_name. All rights reserved.
@license: license
@contact: user_email
@deffield updated: Updated
'''
import sys
import os
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
import glob
import re
# Local imports
import logger
__all__ = []
__version__ = 0.1
__date__ = '2013-03-28'
__updated__ = '2013-03-28'
class CLIError(Exception):
'''Generic exception to raise and log different fatal errors.'''
def __init__(self, msg):
super(CLIError).__init__(type(self))
self.msg = "E: %s" % msg
def __str__(self):
return self.msg
def __unicode__(self):
return self.msg
def get_class( kls ):
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__( module )
for comp in parts[1:]:
m = getattr(m, comp)
return m
def readVars(filename):
result = {}
# Prepare a whitespace remover
wsr = re.compile(r'\s+')
# Read the file in a line at a time
for line in open(filename):
m = re.match("^\s(.+)\s(\S+);$", line)
if m:
result[m.group(2)] = re.sub(wsr, "", m.group(1))
return result
def main(argv=None): # IGNORE:C0111
'''Command line options.'''
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
program_name = os.path.basename(sys.argv[0])
program_version = "v%s" % __version__
program_build_date = str(__updated__)
program_version_message = '%%(prog)s %s (%s)' % (program_version, program_build_date)
program_shortdesc = __import__('__main__').__doc__.split("\n")[1]
program_license = '''%s
Created by user_name on %s.
Copyright 2013 organization_name. All rights reserved.
Licensed under the Apache License 2.0
http://www.apache.org/licenses/LICENSE-2.0
Distributed on an "AS IS" basis without warranties
or conditions of any kind, either express or implied.
EXAMPLE
./tools/codegen/codegen_checker.py -m ./build/tools/strukt_autogen/ -s ./
USAGE
''' % (program_shortdesc, str(__date__))
try:
# Setup argument parser
parser = ArgumentParser(description=program_license, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-v", "--verbose", dest="verbose", action="count", help="set verbosity level [default: %(default)s]")
parser.add_argument("-m", "--model", dest="model", required=True, help="Model directory")
parser.add_argument("-s", "--src", dest="src", required=True, help="Source directory")
parser.add_argument('-V', '--version', action='version', version=program_version_message)
# Process arguments
args = parser.parse_args()
log = logger.logger(args.verbose)
modelDir = args.model
srcDir = args.src
log.write("Verbose mode on")
# Get the directories in canonical form
modelDir = os.path.abspath(modelDir) + "/"
srcDir = os.path.abspath(srcDir) + "/"
# Find all the model files
modelFiles = []
modelFiles.extend(glob.glob(modelDir+"*.h"))
for modelFile in modelFiles:
# Get the basename
filename = os.path.basename(modelFile)
# Try to find the existing header file in the usual place.
potentialMatches = []
potentialMatches.extend(glob.glob(srcDir + "*/src/" + filename))
# Try to find the existing header file in the usual place but with underscores removed from the file name.
if (len(potentialMatches) == 0):
potentialMatches.extend(glob.glob(srcDir + "*/src/" + re.sub('_', '', filename)))
if (len(potentialMatches) == 0):
print "No matches for " + modelFile
for potentialMatch in potentialMatches:
output = []
# Parse the generated model file
modelVars = readVars(modelFile)
# Parse the header file
headerVars = readVars(potentialMatch)
# Compare variables, first starting with ones that are in the model file.
keysToRemove = []
for modelVar in modelVars:
# Is it in the header file?
if (modelVar in headerVars):
if (modelVars[modelVar] != headerVars[modelVar]):
output.append(" " + "Difference: " + modelFile + ":" + modelVar + " is of type " + modelVars[modelVar] + " but " + potentialMatch + ":" + modelVar + " is of type " + headerVars[modelVar])
keysToRemove.append(modelVar)
# Remove keys that we have processed
for key in keysToRemove:
if (key in modelVars):
del modelVars[key]
if (key in headerVars):
del headerVars[key]
# Output missing vars
for modelVar in modelVars:
output.append(" " + modelFile + ":" + modelVar + " is not in " + potentialMatch)
for headerVar in headerVars:
output.append(" " + potentialMatch + ":" + headerVar + " is not in " + modelFile)
if (len(output) > 0):
print "Comparing " + modelFile + " with " + potentialMatch
for line in output:
print line
return 0
except KeyboardInterrupt:
### handle keyboard interrupt ###
return 0
# except Exception, e:
# indent = len(program_name) * " "
# sys.stderr.write(program_name + ": " + repr(e) + "\n")
# sys.stderr.write(indent + " for help use --help")
# return 2
if __name__ == "__main__":
sys.exit(main()) | 33.636364 | 218 | 0.56248 | [
"Apache-2.0"
] | MarouenMechtri/accords-platform-1 | tools/codegen/codegen_checker.py | 6,290 | Python |
# asot: Localhost tunneling
# Copyright 2021, Luna and asot contributors
# SPDX-License-Identifier: BSD-3-Clause
from .main import main
if __name__ == "__main__":
main()
| 19.555556 | 44 | 0.732955 | [
"BSD-3-Clause"
] | lun-4/asot | server/asot/manage/__main__.py | 176 | Python |
from datetime import datetime
import numpy as np
import exetera.core.session as sess
from exetera.core import dataframe
ADATA = '/home/jd21/data/processed_May17_processed.hdf5'
VDATA = '/home/jd21/data/vacc.0603.h5'
DSTDATA = '/home/jd21/data/full_merge.h5'
def asmt_merge_vacc():
"""
Merge assessment df with vaccine dataframe, filter out subject has a healthy assessments before vaccine date
"""
with sess.Session() as s:
# open related datasets
src = s.open_dataset(ADATA, 'r', 'asmt')
asmt = src['assessments']
vacc = s.open_dataset(VDATA, 'r', 'vacc')
dst = s.open_dataset(DSTDATA, 'w', 'dst')
#filter vaccine type
vbrand_filter = (vacc['vaccine_doses']['brand'].data[:] == 2) | \
(vacc['vaccine_doses']['brand'].data[:] == 3)
dvacc = dst.create_dataframe('vacc')
vacc['vaccine_doses'].apply_filter(vbrand_filter, ddf=dvacc)
#join asmt with vaccine using patient_id, write to result
asmt_v = dst.create_dataframe('asmt_v')
dataframe.merge(asmt, dvacc, asmt_v, 'patient_id', 'patient_id', how='inner')
#filter healthy asmt record within 10days of vaccine date
symp_list = ['persistent_cough', 'fever', 'fatigue', 'delirium', 'shortness_of_breath', 'diarrhoea',
'abdominal_pain', 'chest_pain', 'hoarse_voice', 'skipped_meals', 'loss_of_smell', 'headache',
'sore_throat', 'chills_or_shivers', 'eye_soreness', 'nausea', 'blisters_on_feet',
'unusual_muscle_pains', 'runny_nose', 'red_welts_on_face_or_lips', 'dizzy_light_headed',
'swollen_glands', 'sneezing', 'skin_burning', 'earache', 'altered_smell', 'brain_fog',
'irregular_heartbeat']
symp_filter = asmt_v['persistent_cough'].data[:] > 1 # has symptom
for symptom1 in symp_list:
symp_filter |= asmt_v[symptom1].data[:] > 1 # has symptom
symp_filter = ~symp_filter # has no symptom
symp_filter &= asmt_v['date_taken_specific'].data[:] > asmt_v['updated_at_l'].data[:] # asmt before vaccine
symp_filter &= asmt_v['updated_at_l'].data[:] > asmt_v['date_taken_specific'].data[:] - 3600 * 24 * 10 # 10 days
asmt_v.apply_filter(symp_filter)
# has symptom after vaccine
yes_symp_filter = asmt_v['persistent_cough'].data[:] > 1
for symptom1 in symp_list:
yes_symp_filter |= asmt_v[symptom1].data[:] > 1 # has symptom
yes_symp_filter &= asmt_v['date_taken_specific'].data[:] < asmt_v['updated_at_l'].data[:] # assessment after vaccine
yes_symp_filter &= asmt_v['date_taken_specific'].data[:] + 3600 * 24 * 10 > asmt_v['updated_at_l'].data[:] # assessment within 7 days of vaccine
asmt_v.apply_filter(yes_symp_filter)
print("finish asmt join vaccine.")
def join_tests():
"""
Merge tests to previous merged (assessments, vaccine), filter out subjects has test records within 10days after vaccine
"""
with sess.Session() as s:
# open related datasets
src = s.open_dataset(ADATA, 'r', 'asmt')
tests_src = src['tests']
dst = s.open_dataset(DSTDATA, 'r+', 'dst')
vacc = dst['asmt_v']
tests_m = dst.create_dataframe('tests_m')
dataframe.merge(vacc, tests_src, tests_m, 'patient_id_l', 'patient_id', how='inner')
# filter out subjects has tests after 10days of vaccine
# date_taken_specific_l is vaccine date, date_taken_specific_r is tests date
test_filter = tests_m['date_taken_specific_l'] < tests_m['date_taken_specific_r'] # test after vaccine
test_filter &= tests_m['date_taken_specific_l'] > (tests_m['date_taken_specific_r'] - 3600 * 24 * 10)
tests_m.apply_filter(test_filter)
def count():
with sess.Session() as s:
# open related datasets
dst = s.open_dataset(DSTDATA, 'r', 'dst')
vacc = dst['tests_m']
print(len(dst['tests_m']['patient_id_l_l']))
if __name__ == '__main__':
print(datetime.now())
asmt_merge_vacc()
join_tests()
#count()
print(datetime.now())
| 45.945055 | 153 | 0.643387 | [
"Apache-2.0"
] | KCL-BMEIS/ExeTeraCovid | scripts/asmt_merge_vacc_exetera.py | 4,181 | Python |
def can_build(plat):
return plat=="android"
def configure(env):
if (env['platform'] == 'android'):
env.android_add_dependency("compile 'com.google.android.gms:play-services-ads:8.3.0'")
env.android_add_java_dir("android")
env.android_add_to_manifest("android/AndroidManifestChunk.xml")
env.disable_module()
| 29 | 88 | 0.752351 | [
"MIT"
] | Mavhod/GodotAdmob | admob/config.py | 319 | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Public Title.
Doc str for module users
.. moduleauthor:: Max Wu <http://maxwu.me>
.. References::
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
.. Test Samples in doctest format
>>> None
"""
from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
with open('requirements.txt') as f:
required = f.read().splitlines()
with open(path.join(here, 'src', 'cistat', 'version.py')) as f:
exec(f.read())
VERSION = get_version()
setup(
name='cistat',
version=VERSION,
description='A sample Python project',
long_description=long_description,
# The project's main homepage.
url='https://github.com/maxwu/cistat',
# Author details
author='Max Wu',
author_email='[email protected]',
# Choose your license
license='MIT',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Testers, Developers',
'Topic :: Software Test :: Statistic Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
# 'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.3',
# 'Programming Language :: Python :: 3.4',
# 'Programming Language :: Python :: 3.5',
],
# What does your project relate to?
keywords='CI Stat CircleCI',
packages=find_packages("src"),
package_dir={"": "src"},
install_requires=required,
extras_require={
'dev': ['check-manifest'],
'test': ['coverage'],
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
#package_data={
# 'sample': ['package_data.dat'],
#},
# To provide executable scripts, use entry points in preference to the
# "scripts" keyword. Entry points provide cross-platform support and allow
# pip to create the appropriate form of executable for the target platform.
entry_points={
'console_scripts': [
'cistat-cli=cistat:cli_app',
],
},
)
| 29.87 | 79 | 0.634416 | [
"MIT"
] | maxwu/ci-stat | setup.py | 2,987 | Python |
import os
from databases import Database
from sqlalchemy import MetaData, create_engine
SQLALCHEMY_DATABASE_URL = (
os.environ.get("DATABASE_URL")
or '{}://{}:{}@{}:{}/{}'.format(
os.environ.get("DATABASE"),
os.environ.get("DB_USERNAME"),
os.environ.get("DB_PASSWORD"),
os.environ.get("DB_HOST"),
os.environ.get("DB_PORT"),
os.environ.get("DB_NAME"),
)
)
database = Database(
SQLALCHEMY_DATABASE_URL,
ssl=False,
min_size=5,
max_size=20,
)
engine = create_engine(
SQLALCHEMY_DATABASE_URL,
echo=False,
)
metadata = MetaData()
| 19.774194 | 46 | 0.631321 | [
"MIT"
] | lupinthe14th/ptodo | backend/app/database.py | 613 | Python |
from clvm_tools import binutils
from chinilla.types.blockchain_format.program import Program, INFINITE_COST
from chinilla.types.announcement import Announcement
from chinilla.types.blockchain_format.sized_bytes import bytes32
from chinilla.util.condition_tools import parse_sexp_to_conditions
from chinilla.wallet.puzzles.load_clvm import load_clvm
SINGLETON_MOD = load_clvm("singleton_top_layer.clvm")
LAUNCHER_PUZZLE = load_clvm("singleton_launcher.clvm")
P2_SINGLETON_MOD = load_clvm("p2_singleton.clvm")
POOL_MEMBER_MOD = load_clvm("pool_member_innerpuz.clvm")
POOL_WAITINGROOM_MOD = load_clvm("pool_waitingroom_innerpuz.clvm")
LAUNCHER_PUZZLE_HASH = LAUNCHER_PUZZLE.get_tree_hash()
SINGLETON_MOD_HASH = SINGLETON_MOD.get_tree_hash()
LAUNCHER_ID = Program.to(b"launcher-id").get_tree_hash()
POOL_REWARD_PREFIX_VANILLANET = bytes32.fromhex("ccd5bb71183532bff220ba46c268991a00000000000000000000000000000000")
def singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> Program:
return SINGLETON_MOD.curry((SINGLETON_MOD_HASH, (launcher_id, launcher_puzzle_hash)), inner_puzzle)
def p2_singleton_puzzle(launcher_id: Program, launcher_puzzle_hash: bytes32) -> Program:
return P2_SINGLETON_MOD.curry(SINGLETON_MOD_HASH, launcher_id, launcher_puzzle_hash)
def singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32, inner_puzzle: Program) -> bytes32:
return singleton_puzzle(launcher_id, launcher_puzzle_hash, inner_puzzle).get_tree_hash()
def p2_singleton_puzzle_hash(launcher_id: Program, launcher_puzzle_hash: bytes32) -> bytes32:
return p2_singleton_puzzle(launcher_id, launcher_puzzle_hash).get_tree_hash()
def test_only_odd_coins():
singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
# (SINGLETON_STRUCT INNER_PUZZLE lineage_proof my_amount inner_solution)
# SINGLETON_STRUCT = (MOD_HASH . (LAUNCHER_ID . LAUNCHER_PUZZLE_HASH))
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 200))")),
[0xDEADBEEF, 0xCAFEF00D, 200],
200,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception as e:
assert e.args == ("clvm raise", "80")
else:
assert False
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 201))")),
[0xDEADBEEF, 0xCAFED00D, 210],
205,
0,
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception:
assert False
def test_only_one_odd_coin_created():
singleton_mod_hash = SINGLETON_MOD.get_tree_hash()
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 203) (51 0xfadeddab 205))")),
[0xDEADBEEF, 0xCAFEF00D, 411],
411,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception as e:
assert e.args == ("clvm raise", "80")
else:
assert False
solution = Program.to(
[
(singleton_mod_hash, (LAUNCHER_ID, LAUNCHER_PUZZLE_HASH)),
Program.to(binutils.assemble("(q (51 0xcafef00d 203) (51 0xfadeddab 204) (51 0xdeadbeef 202))")),
[0xDEADBEEF, 0xCAFEF00D, 411],
411,
[],
]
)
try:
cost, result = SINGLETON_MOD.run_with_cost(INFINITE_COST, solution)
except Exception:
assert False
def test_p2_singleton():
# create a singleton. This should call driver code.
launcher_id = LAUNCHER_ID
innerpuz = Program.to(1)
singleton_full_puzzle = singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH, innerpuz)
# create a fake coin id for the `p2_singleton`
p2_singleton_coin_id = Program.to(["test_hash"]).get_tree_hash()
expected_announcement = Announcement(singleton_full_puzzle.get_tree_hash(), p2_singleton_coin_id).name()
# create a `p2_singleton` puzzle. This should call driver code.
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
cost, result = p2_singleton_full.run_with_cost(INFINITE_COST, solution)
err, conditions = parse_sexp_to_conditions(result)
assert err is None
p2_singleton_full = p2_singleton_puzzle(launcher_id, LAUNCHER_PUZZLE_HASH)
solution = Program.to([innerpuz.get_tree_hash(), p2_singleton_coin_id])
cost, result = p2_singleton_full.run_with_cost(INFINITE_COST, solution)
assert result.first().rest().first().as_atom() == expected_announcement
assert conditions[0].vars[0] == expected_announcement
| 39.433071 | 115 | 0.720447 | [
"Apache-2.0"
] | Chinilla/chinilla-blockchain | tests/wallet/test_singleton.py | 5,008 | Python |
from __future__ import annotations
import numpy as np
import tcod
import g
import game.constants
import game.engine
import game.game_map
import game.render_functions
from game.tiles import tile_graphics
def render_map(console: tcod.Console, gamemap: game.game_map.GameMap) -> None:
# The default graphics are of tiles that are visible.
light = tile_graphics[gamemap.tiles]
light[gamemap.fire > 0] = (ord("^"), (255, 255, 255), (0xCC, 0x22, 0))
# Apply effects to create a darkened map of tile graphics.
dark = gamemap.memory.copy()
dark["fg"] //= 2
dark["bg"] //= 8
visible = gamemap.visible
if g.fullbright:
visible = np.ones_like(visible)
for entity in sorted(gamemap.entities, key=lambda x: x.render_order.value):
if not visible[entity.x, entity.y]:
continue # Skip entities that are not in the FOV.
light[entity.x, entity.y]["ch"] = ord(entity.char)
light[entity.x, entity.y]["fg"] = entity.color
console.rgb[0 : gamemap.width, 0 : gamemap.height] = np.select(
condlist=[visible, gamemap.explored],
choicelist=[light, dark],
default=dark,
)
for entity in sorted(gamemap.entities, key=lambda x: x.render_order.value):
if not visible[entity.x, entity.y]:
continue # Skip entities that are not in the FOV.
console.print(entity.x, entity.y, entity.char, fg=entity.color)
visible.choose((gamemap.memory, light), out=gamemap.memory)
def render_ui(console: tcod.Console, engine: game.engine.Engine) -> None:
UI_WIDTH = game.constants.ui_width
UI_LEFT = console.width - UI_WIDTH
LOG_HEIGHT = console.height - 8
engine.message_log.render(
console=console, x=UI_LEFT, y=console.height - LOG_HEIGHT, width=UI_WIDTH, height=LOG_HEIGHT
)
console.draw_rect(UI_LEFT, 0, UI_WIDTH, 2, 0x20, (0xFF, 0xFF, 0xFF), (0, 0, 0))
game.render_functions.render_bar(
console=console,
x=UI_LEFT,
y=0,
current_value=engine.player.fighter.hp,
maximum_value=engine.player.fighter.max_hp,
total_width=UI_WIDTH,
)
game.render_functions.render_names_at_mouse_location(console=console, x=UI_LEFT, y=1, engine=engine)
if g.mouse_pos:
console.rgb[g.mouse_pos]["fg"] = (0, 0, 0)
console.rgb[g.mouse_pos]["bg"] = (255, 255, 255)
if g.fullbright or engine.game_map.visible[g.mouse_pos]:
console.print(
UI_LEFT,
2,
f"Fire={engine.game_map.fire[g.mouse_pos]}, Heat={engine.game_map.heat[g.mouse_pos]}, "
f"Smoke={engine.game_map.smoke[g.mouse_pos]},\nFuel={engine.game_map.fuel[g.mouse_pos]}",
)
| 34.620253 | 105 | 0.652285 | [
"CC0-1.0"
] | HexDecimal/7drl-2022 | game/rendering.py | 2,735 | Python |
from sys import argv, stdin
def cut(input_file, *args):
options = process_options(*args)
delimiter = d_option(options["-d"])
lines = input_file.readlines()
columns = [item.split(delimiter) for item in lines]
scope = f_option(options["-f"], len(columns[0]))
out_scope = []
for x in scope:
out_scope.append([column[x] for column in columns])
pr = []
for line in range(len(out_scope[0])):
for rec in out_scope:
pr.append(rec[line].strip())
print(delimiter.join(pr), end='')
pr.clear()
print()
def process_options(options):
out_opt = dict()
last_key = ""
for option in options:
if option.startswith('-'):
out_opt[option] = ""
last_key = option
else:
out_opt[last_key] = option
return out_opt
def f_option(params: str, file_size: int):
if not params:
return None
inp = params.split('-') if '-' in params else params
if '-' not in params and ',' not in params:
return int(params)
elif params.startswith('-'):
return [x for x in range(0, int(inp[1]))]
elif params.endswith('-'):
return [x - 1 for x in range(int(inp[0]), file_size + 1)]
elif ',' in params:
return [int(x) for x in params.split(',')]
else:
return [x - 1 for x in range(int(inp[0]), int(inp[1]) + 1)]
def d_option(params):
return params if params else ' '
cut(stdin, argv[1:]) | 26.927273 | 67 | 0.576637 | [
"MIT"
] | MichalKyjovsky/NPRG065_Programing_in_Python | SandBox/Practicals_05_Cut.py | 1,481 | Python |
import os
import platform
from datetime import datetime
# TODO test counter
# def test_count():
# return 0
# f"Count: {test_count()}\n"\
def serve_info():
return f"Stats\n" \
f"UTC: {datetime.utcnow().isoformat()}\n" \
f"\nMachine\n" \
f"Architecture: {platform.machine()}\n" \
f"Name: {platform.node()}\n" \
f"Platform: {platform.platform()}\n" \
f"CPU Model: {platform.processor()}\n" \
f"CPU Count: {os.cpu_count()}\n" \
f"Release: {platform.release()}\n" \
f"System: {platform.system()}\n" \
f"Version: {platform.version()}\n" \
f"\nPython\n" \
f"Branch: {platform.python_branch()}\n" \
f"Build: {platform.python_build()}\n" \
f"Compiler: {platform.python_compiler()}\n" \
f"Implementation: {platform.python_implementation()}\n" \
f"Revision: {platform.python_revision()}\n"
| 33.275862 | 68 | 0.550259 | [
"MIT"
] | pwentrys/SubstanceHelpers | utils/logger.py | 965 | Python |
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class SpbNetworkRange(Base):
"""The SPB Network Range.
The SpbNetworkRange class encapsulates a list of spbNetworkRange resources that are managed by the user.
A list of resources can be retrieved from the server using the SpbNetworkRange.find() method.
The list can be managed by using the SpbNetworkRange.add() and SpbNetworkRange.remove() methods.
"""
__slots__ = ()
_SDM_NAME = 'spbNetworkRange'
_SDM_ATT_MAP = {
'EnableAdvertiseNetworkRange': 'enableAdvertiseNetworkRange',
'EnableHostName': 'enableHostName',
'EntryColumn': 'entryColumn',
'EntryRow': 'entryRow',
'HostNamePrefix': 'hostNamePrefix',
'InterfaceMetric': 'interfaceMetric',
'NoOfColumns': 'noOfColumns',
'NoOfRows': 'noOfRows',
'StartSystemId': 'startSystemId',
'SystemIdIncrementBy': 'systemIdIncrementBy',
}
def __init__(self, parent):
super(SpbNetworkRange, self).__init__(parent)
@property
def SpbOutsideLinks(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spboutsidelinks_dfb7b1e816409cddb14e138ebc2096dc.SpbOutsideLinks): An instance of the SpbOutsideLinks class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spboutsidelinks_dfb7b1e816409cddb14e138ebc2096dc import SpbOutsideLinks
if self._properties.get('SpbOutsideLinks', None) is None:
return SpbOutsideLinks(self)
else:
return self._properties.get('SpbOutsideLinks')
@property
def SpbmNodeTopologyRange(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spbmnodetopologyrange_199093afa11cd9f4488faaa1ad3ec3a7.SpbmNodeTopologyRange): An instance of the SpbmNodeTopologyRange class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocols.spbmnodetopologyrange_199093afa11cd9f4488faaa1ad3ec3a7 import SpbmNodeTopologyRange
if self._properties.get('SpbmNodeTopologyRange', None) is None:
return SpbmNodeTopologyRange(self)
else:
return self._properties.get('SpbmNodeTopologyRange')
@property
def EnableAdvertiseNetworkRange(self):
"""
Returns
-------
- bool: If true, this SPB ISIS Network Range is advertised.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableAdvertiseNetworkRange'])
@EnableAdvertiseNetworkRange.setter
def EnableAdvertiseNetworkRange(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableAdvertiseNetworkRange'], value)
@property
def EnableHostName(self):
"""
Returns
-------
- bool: If true, the host name of the router is activated.
"""
return self._get_attribute(self._SDM_ATT_MAP['EnableHostName'])
@EnableHostName.setter
def EnableHostName(self, value):
self._set_attribute(self._SDM_ATT_MAP['EnableHostName'], value)
@property
def EntryColumn(self):
"""
Returns
-------
- number: The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
"""
return self._get_attribute(self._SDM_ATT_MAP['EntryColumn'])
@EntryColumn.setter
def EntryColumn(self, value):
self._set_attribute(self._SDM_ATT_MAP['EntryColumn'], value)
@property
def EntryRow(self):
"""
Returns
-------
- number: The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
"""
return self._get_attribute(self._SDM_ATT_MAP['EntryRow'])
@EntryRow.setter
def EntryRow(self, value):
self._set_attribute(self._SDM_ATT_MAP['EntryRow'], value)
@property
def HostNamePrefix(self):
"""
Returns
-------
- str: The host name prefix information.
"""
return self._get_attribute(self._SDM_ATT_MAP['HostNamePrefix'])
@HostNamePrefix.setter
def HostNamePrefix(self, value):
self._set_attribute(self._SDM_ATT_MAP['HostNamePrefix'], value)
@property
def InterfaceMetric(self):
"""
Returns
-------
- number: The metric cost associated with this emulated SPB ISIS router.
"""
return self._get_attribute(self._SDM_ATT_MAP['InterfaceMetric'])
@InterfaceMetric.setter
def InterfaceMetric(self, value):
self._set_attribute(self._SDM_ATT_MAP['InterfaceMetric'], value)
@property
def NoOfColumns(self):
"""
Returns
-------
- number: The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
"""
return self._get_attribute(self._SDM_ATT_MAP['NoOfColumns'])
@NoOfColumns.setter
def NoOfColumns(self, value):
self._set_attribute(self._SDM_ATT_MAP['NoOfColumns'], value)
@property
def NoOfRows(self):
"""
Returns
-------
- number: The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
"""
return self._get_attribute(self._SDM_ATT_MAP['NoOfRows'])
@NoOfRows.setter
def NoOfRows(self, value):
self._set_attribute(self._SDM_ATT_MAP['NoOfRows'], value)
@property
def StartSystemId(self):
"""
Returns
-------
- str: The System ID assigned to the starting SPB ISIS router in this network range. The default is 00 00 00 00 00 00.
"""
return self._get_attribute(self._SDM_ATT_MAP['StartSystemId'])
@StartSystemId.setter
def StartSystemId(self, value):
self._set_attribute(self._SDM_ATT_MAP['StartSystemId'], value)
@property
def SystemIdIncrementBy(self):
"""
Returns
-------
- str: This is used when more than one router is to be emulated. The increment value is added to the previous System ID for each additional emulated router in this network range.
"""
return self._get_attribute(self._SDM_ATT_MAP['SystemIdIncrementBy'])
@SystemIdIncrementBy.setter
def SystemIdIncrementBy(self, value):
self._set_attribute(self._SDM_ATT_MAP['SystemIdIncrementBy'], value)
def update(self, EnableAdvertiseNetworkRange=None, EnableHostName=None, EntryColumn=None, EntryRow=None, HostNamePrefix=None, InterfaceMetric=None, NoOfColumns=None, NoOfRows=None, StartSystemId=None, SystemIdIncrementBy=None):
"""Updates spbNetworkRange resource on the server.
Args
----
- EnableAdvertiseNetworkRange (bool): If true, this SPB ISIS Network Range is advertised.
- EnableHostName (bool): If true, the host name of the router is activated.
- EntryColumn (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- EntryRow (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- HostNamePrefix (str): The host name prefix information.
- InterfaceMetric (number): The metric cost associated with this emulated SPB ISIS router.
- NoOfColumns (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- NoOfRows (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- StartSystemId (str): The System ID assigned to the starting SPB ISIS router in this network range. The default is 00 00 00 00 00 00.
- SystemIdIncrementBy (str): This is used when more than one router is to be emulated. The increment value is added to the previous System ID for each additional emulated router in this network range.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(self, EnableAdvertiseNetworkRange=None, EnableHostName=None, EntryColumn=None, EntryRow=None, HostNamePrefix=None, InterfaceMetric=None, NoOfColumns=None, NoOfRows=None, StartSystemId=None, SystemIdIncrementBy=None):
"""Adds a new spbNetworkRange resource on the server and adds it to the container.
Args
----
- EnableAdvertiseNetworkRange (bool): If true, this SPB ISIS Network Range is advertised.
- EnableHostName (bool): If true, the host name of the router is activated.
- EntryColumn (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- EntryRow (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- HostNamePrefix (str): The host name prefix information.
- InterfaceMetric (number): The metric cost associated with this emulated SPB ISIS router.
- NoOfColumns (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- NoOfRows (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- StartSystemId (str): The System ID assigned to the starting SPB ISIS router in this network range. The default is 00 00 00 00 00 00.
- SystemIdIncrementBy (str): This is used when more than one router is to be emulated. The increment value is added to the previous System ID for each additional emulated router in this network range.
Returns
-------
- self: This instance with all currently retrieved spbNetworkRange resources using find and the newly added spbNetworkRange resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained spbNetworkRange resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, EnableAdvertiseNetworkRange=None, EnableHostName=None, EntryColumn=None, EntryRow=None, HostNamePrefix=None, InterfaceMetric=None, NoOfColumns=None, NoOfRows=None, StartSystemId=None, SystemIdIncrementBy=None):
"""Finds and retrieves spbNetworkRange resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve spbNetworkRange resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all spbNetworkRange resources from the server.
Args
----
- EnableAdvertiseNetworkRange (bool): If true, this SPB ISIS Network Range is advertised.
- EnableHostName (bool): If true, the host name of the router is activated.
- EntryColumn (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- EntryRow (number): The value is used in combination to specify which virtual router in the Network Range is connected to the current ISIS L2/L3 Router.
- HostNamePrefix (str): The host name prefix information.
- InterfaceMetric (number): The metric cost associated with this emulated SPB ISIS router.
- NoOfColumns (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- NoOfRows (number): The value is used in combination to create a matrix (grid) for an emulated network range of the following size: The # Rows multiplied the # Cols = Number of routers in this Network Range. (For example, 3 Rows x 3 Columns = 9 Routers).
- StartSystemId (str): The System ID assigned to the starting SPB ISIS router in this network range. The default is 00 00 00 00 00 00.
- SystemIdIncrementBy (str): This is used when more than one router is to be emulated. The increment value is added to the previous System ID for each additional emulated router in this network range.
Returns
-------
- self: This instance with matching spbNetworkRange resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of spbNetworkRange data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the spbNetworkRange resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
| 52.080645 | 266 | 0.698235 | [
"MIT"
] | Vibaswan/ixnetwork_restpy | ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/spbnetworkrange_525415b0593fd4072368412490b137fa.py | 16,145 | Python |
import time, datetime
from app import db
class ServerInfo(db.Model):
__tablename__ = 'servers'
__table_args__ = (db.PrimaryKeyConstraint('ip', 'port', name='_ip_port_pk'),)
ip = db.Column(db.String(128), nullable=False)
port = db.Column(db.Integer, nullable=False)
info = db.Column(db.String(1024), nullable=True)
player_count = db.Column(db.Integer, nullable=False)
player_total = db.Column(db.Integer, nullable=False)
servermod_version = db.Column(db.String(32), nullable=True)
pastebin_url = db.Column(db.String(32), nullable=True)
game_version = db.Column(db.String(32), nullable=True)
date_updated = db.Column(db.DateTime, default=db.func.current_timestamp(),
onupdate=db.func.current_timestamp())
def __getitem__(self, item):
return getattr(self, item)
def __setitem__(self, key, value):
self.__dict__[key] = value
@property
def serialize(self):
# du_unix = time.mktime(self.date_updated.timetuple())
# now_unix = time.mktime(datetime.datetime.now().timetuple())
return {
"ip": self.ip,
"port": self.port,
"info": self.info,
"player_count": self.player_count,
"player_total": self.player_total,
"game_version": self.game_version,
"servermod_version": self.servermod_version,
"pastebin_url": self.pastebin_url,
"date_updated": time.mktime(self.date_updated.timetuple())
}
def prettify_seconds(self, seconds):
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
if d: return "{} days".format(d)
if h: return "{} hours".format(h)
if m: return "{} minutes".format(m)
return "{} seconds".format(s)
| 35.090909 | 82 | 0.58601 | [
"MIT"
] | sappykun/scpsl-masterserver | app/modules/serverinfo/models.py | 1,930 | Python |
'''
TRIES
Trie support search, insert, and deletion in O(L) time where L is length of the key
why Trie?
* With Trie, we can insert and find strings in O(L) time where L represent the length of a single word. This is obviously faster than BST.
This is also faster than Hashing because of the ways it is implemented. We do not need to compute any hash function. No collision handling
is required (like we do in open addressing and separate chaining)
* Another advantage of Trie is, we can easily print all words in alphabetical order which is not easily possible with hashing.
* We can efficiently do prefix search (or auto-complete) with Trie.
Issues with Trie
Faster but require HUGE memory for storing the strings
NOTE: Trie node class
struct TrieNode
{
struct TrieNode *children[ALPHABET_SIZE];
// isEndOfWord is true if the node
// represents end of a word
bool isEndOfWord;
};
'''
class TrieNode:
# Trie node class
def __init__(self):
self.children = [None]*26
# isEndOfWord is True if node represent the end of the word
self.isEndOfWord = False | 28.925 | 147 | 0.703544 | [
"MIT"
] | Wmeng98/Leetcode | CTCI/Data Structures/Trees/tries.py | 1,157 | Python |
import argparse
import multiprocessing
import random
import shutil
from datetime import datetime
from functools import partial
from pathlib import Path
import chainer
import chainer.functions as F
import chainer.links as L
import cupy
import numpy as np
from chainer import iterators, optimizers, serializers
from chainer.datasets import TransformDataset, get_cifar10
from chainer.training import StandardUpdater, Trainer, extensions
import augmentation
from metric_learning import MetricLearnClassifier
from modified_evaluator import ModifiedEvaluator
from modified_updater import ModifiedUpdater
from resnet import ResNet50
def apply_augmentation(inputs, mean, std, angle=(-5, 5), scale=(1, 1.2),
crop_size=None, train=True):
img, label = inputs
img = img.copy()
img = img.transpose(1, 2, 0)
if train:
img, _ = augmentation.gamma_correction(img)
img -= mean[None, None, :]
img /= std[None, None, :]
if train:
img, _ = augmentation.random_rotate(img, angle=angle)
if np.random.rand() < 0.5:
img, _ = augmentation.mirror(img)
if np.random.rand() < 0.5:
img, _ = augmentation.flip(img)
img, _ = augmentation.random_resize(img, scale=scale)
if crop_size is not None:
rnd1 = np.random.randint(img.shape[0] - crop_size)
rnd2 = np.random.randint(img.shape[1] - crop_size)
img = img[rnd1:rnd1 + crop_size, rnd2:rnd2 + crop_size, :]
img = img.transpose(2, 0, 1)
return img, label
def main():
parser = argparse.ArgumentParser(description='training mnist')
parser.add_argument('--gpu', '-g', default=-1, type=int,
help='GPU ID (negative value indicates CPU)')
parser.add_argument('--epoch', '-e', type=int, default=100,
help='Number of sweeps over the dataset to train')
parser.add_argument('--batchsize', '-b', type=int, default=8,
help='Number of images in each mini-batch')
parser.add_argument('--seed', '-s', type=int, default=0,
help='Random seed')
parser.add_argument('--report_trigger', '-rt', type=str, default='1e',
help='Interval for reporting(Ex.100i, default:1e)')
parser.add_argument('--save_trigger', '-st', type=str, default='1e',
help='Interval for saving the model(Ex.100i, default:1e)')
parser.add_argument('--load_model', '-lm', type=str, default=None,
help='Path of the model object to load')
parser.add_argument('--load_optimizer', '-lo', type=str, default=None,
help='Path of the optimizer object to load')
args = parser.parse_args()
start_time = datetime.now()
save_dir = Path('output/{}'.format(start_time.strftime('%Y%m%d_%H%M')))
random.seed(args.seed)
np.random.seed(args.seed)
cupy.random.seed(args.seed)
model = MetricLearnClassifier(ResNet50(), 512, 10,
method='arcface', final_margin=0.5,
final_scale=64, target_epoch=100)
if args.load_model is not None:
serializers.load_npz(args.load_model, model)
if args.gpu >= 0:
chainer.cuda.get_device_from_id(args.gpu).use()
model.to_gpu()
optimizer = optimizers.Adam(alpha=1e-3, weight_decay_rate=5e-4, amsgrad=True)
optimizer.setup(model)
optimizer.add_hook(chainer.optimizer.WeightDecay(5e-4))
if args.load_optimizer is not None:
serializers.load_npz(args.load_optimizer, optimizer)
train_data, valid_data = get_cifar10(scale=255.)
mean = np.mean([x for x, _ in train_data], axis=(0, 2, 3))
std = np.std([x for x, _ in train_data], axis=(0, 2, 3))
train_transform = partial(apply_augmentation, mean=mean, std=std, crop_size=28, train=True)
valid_transform = partial(apply_augmentation, mean=mean, std=std, crop_size=28, train=True)
train_data = TransformDataset(train_data, train_transform)
valid_data = TransformDataset(valid_data, valid_transform)
train_iter = iterators.SerialIterator(train_data, args.batchsize)
valid_iter = iterators.SerialIterator(valid_data, args.batchsize, repeat=False, shuffle=False)
updater = ModifiedUpdater(train_iter, optimizer, device=args.gpu)
trainer = Trainer(updater, (args.epoch, 'epoch'), out=save_dir)
report_trigger = (int(args.report_trigger[:-1]), 'iteration' if args.report_trigger[-1] == 'i' else 'epoch')
trainer.extend(extensions.LogReport(trigger=report_trigger))
trainer.extend(ModifiedEvaluator(valid_iter, model, device=args.gpu), name='val', trigger=report_trigger)
trainer.extend(extensions.PrintReport(['epoch', 'iteration', 'main/loss', 'main/accuracy', 'val/main/loss',
'val/main/accuracy', 'elapsed_time']), trigger=report_trigger)
trainer.extend(extensions.PlotReport(['main/loss', 'val/main/loss'], x_key=report_trigger[1],
marker='.', file_name='loss.png', trigger=report_trigger))
trainer.extend(extensions.PlotReport(['main/accuracy', 'val/main/accuracy'], x_key=report_trigger[1],
marker='.', file_name='accuracy.png', trigger=report_trigger))
save_trigger = (int(args.save_trigger[:-1]), 'iteration' if args.save_trigger[-1] == 'i' else 'epoch')
trainer.extend(extensions.snapshot_object(model, filename='model_{0}-{{.updater.{0}}}.npz'
.format(save_trigger[1])), trigger=save_trigger)
trainer.extend(extensions.snapshot_object(optimizer, filename='optimizer_{0}-{{.updater.{0}}}.npz'
.format(save_trigger[1])), trigger=save_trigger)
trainer.extend(extensions.ProgressBar())
trainer.extend(extensions.ExponentialShift('lr', 0.5), trigger=(30, 'epoch'))
if save_dir.exists():
shutil.rmtree(save_dir)
save_dir.mkdir()
(save_dir / 'training_details').mkdir()
# Write parameters text
with open(save_dir / 'training_details/train_params.txt', 'w') as f:
f.write('model: {}\n'.format(model.predictor.__class__.__name__))
f.write('n_epoch: {}\n'.format(args.epoch))
f.write('batch_size: {}\n'.format(args.batchsize))
f.write('n_data_train: {}\n'.format(len(train_data)))
f.write('n_data_val: {}\n'.format(len(valid_data)))
f.write('seed: {}\n'.format(args.seed))
trainer.run()
if __name__ == '__main__':
main()
| 43.589404 | 112 | 0.648891 | [
"MIT"
] | diceroll/metric_learning | train_cifar.py | 6,582 | Python |
import lvgl as lv
import styles
def lv_arc(screen):
# Create the arc object on lv screen, ie a lv.scr() object
arc = lv.arc(screen)
# Set arc size
arc.set_size(150, 150)
# Set arc background style color blue
arc.add_style(arc.PART.BG, styles.gstyle_bg1)
# Set arc indicator (i.e. line) style to color red
arc.add_style(arc.PART.INDIC, styles.gstyle_line1)
# Setup Angles, from docs:
# Zero degree is at the middle right (3 o'clock) of the object and the degrees are increasing
# in a clockwise direction. The angles should be in [0;360] range.
#
# Get background angle start and end in degrees
start = arc.get_bg_angle_start() # default is 135
end = arc.get_bg_angle_end() # default is 45
# Set background angles
#arc.set_bg_angles(180,max)
# Set start angle of the arc (0-360 degrees)
#arc.set_start_angle(0)
# Get current value of arc
# print(arc.get_value()) # default is 0
# print(arc.get_min_value()) # default is 0
# print(arc.get_max_value()) # default is 100
# Set the current value (0-100)
# A percentage of the arc foreground that is filled
# Note: This doesnt work on micropython?
# Examples:
# 50 is 50% filled
# 100 is 100% filled
# arc.set_value(5)
#
# Or set the value base on end angle (0-360) degrees
# Set end angle of the arc (0-360 degrees)
arc.set_end_angle(200)
if __name__ == '__main__':
lv.init()
scr = lv.obj()
lv.scr_load(scr)
lv_arc(scr)
| 29.5 | 98 | 0.653846 | [
"MIT"
] | ndrogness/lvgl_micropython_examples | widgets/lv_arc/lv_arc.py | 1,534 | Python |
#!/usr/bin/env python
from __future__ import print_function
import argparse
import calendar
import codecs
import datetime
import io
import os.path
import re
import subprocess
import sys
from pycoinzpub.convention import tx_fee, satoshi_to_mbtc
from pycoinzpub.encoding import hash160
from pycoinzpub.key import Key
from pycoinzpub.key.validate import is_address_valid
from pycoinzpub.networks import address_prefix_for_netcode, full_network_name_for_netcode, network_codes
from pycoinzpub.networks.default import get_current_netcode
from pycoinzpub.serialize import b2h_rev, h2b, h2b_rev, stream_to_bytes
from pycoinzpub.services import spendables_for_address, get_tx_db
from pycoinzpub.services.providers import message_about_tx_cache_env, \
message_about_tx_for_tx_hash_env, message_about_spendables_for_address_env
from pycoinzpub.tx.exceptions import BadSpendableError
from pycoinzpub.tx.script.tools import opcode_list, disassemble_for_opcode_data
from pycoinzpub.tx.script.check_signature import parse_signature_blob
from pycoinzpub.tx.script.der import UnexpectedDER
from pycoinzpub.tx.script.disassemble import disassemble_scripts, sighash_type_to_string
from pycoinzpub.tx.tx_utils import distribute_from_split_pool, sign_tx
from pycoinzpub.tx.Tx import Spendable, Tx, TxOut
from pycoinzpub.ui import standard_tx_out_script
DEFAULT_VERSION = 1
DEFAULT_LOCK_TIME = 0
LOCKTIME_THRESHOLD = 500000000
def range_int(min, max, name):
def cast(v):
v = int(v)
if not (min <= v <= max):
raise ValueError()
return v
cast.__name__ = name
return cast
def validate_bitcoind(tx, tx_db, bitcoind_url):
try:
from pycoinzpub.services.bitcoind import bitcoind_agrees_on_transaction_validity
if bitcoind_agrees_on_transaction_validity(bitcoind_url, tx):
print("interop test passed for %s" % tx.id(), file=sys.stderr)
else:
print("tx ==> %s FAILED interop test" % tx.id(), file=sys.stderr)
except ImportError:
print("warning: can't talk to bitcoind due to missing library")
def dump_header(tx):
tx_bin = stream_to_bytes(tx.stream)
print("Version: %2d tx hash %s %d bytes" % (tx.version, tx.id(), len(tx_bin)))
if tx.has_witness_data():
print(" segwit tx hash %s" % tx.w_id())
print("TxIn count: %d; TxOut count: %d" % (len(tx.txs_in), len(tx.txs_out)))
if tx.lock_time == 0:
meaning = "valid anytime"
elif tx.lock_time < LOCKTIME_THRESHOLD:
meaning = "valid after block index %d" % tx.lock_time
else:
when = datetime.datetime.utcfromtimestamp(tx.lock_time)
meaning = "valid on or after %s utc" % when.isoformat()
print("Lock time: %d (%s)" % (tx.lock_time, meaning))
print("Input%s:" % ('s' if len(tx.txs_in) != 1 else ''))
def make_trace_script(do_trace, use_pdb):
if not (do_trace or use_pdb):
return None
def trace_script(pc, opcode, data, stack, altstack, if_condition_stack, is_signature):
from pycoinzpub.serialize import b2h
print("stack: [%s]" % ' '.join(b2h(s) for s in stack))
if len(altstack) > 0:
print("altstack: %s" % altstack)
print("condition stack: %s" % if_condition_stack)
print("%3d : %02x %s" % (pc, opcode, disassemble_for_opcode_data(opcode, data)))
if use_pdb:
import pdb
pdb.set_trace()
return trace_script
def dump_inputs(tx, netcode, verbose_signature, address_prefix, traceback_f, disassembly_level):
def signature_for_hash_type_f(hash_type, script):
return tx.signature_hash(script, idx, hash_type)
for idx, tx_in in enumerate(tx.txs_in):
if tx.is_coinbase():
print("%4d: COINBASE %12.5f mBTC" % (idx, satoshi_to_mbtc(tx.total_in())))
continue
suffix = ""
if tx.missing_unspent(idx):
tx_out = None
address = tx_in.bitcoin_address(address_prefix=address_prefix)
else:
tx_out = tx.unspents[idx]
sig_result = " sig ok" if tx.is_signature_ok(idx, traceback_f=traceback_f) else " BAD SIG"
suffix = " %12.5f mBTC %s" % (satoshi_to_mbtc(tx_out.coin_value), sig_result)
address = tx_out.bitcoin_address(netcode=netcode)
t = "%4d: %34s from %s:%-4d%s" % (idx, address, b2h_rev(tx_in.previous_hash),
tx_in.previous_index, suffix)
print(t.rstrip())
if disassembly_level > 0:
dump_disassembly(tx_in, tx_out, tx.lock_time, signature_for_hash_type_f)
if verbose_signature:
dump_signatures(tx, tx_in, tx_out, idx, netcode, address_prefix, traceback_f, disassembly_level)
def dump_disassembly(tx_in, tx_out, lock_time, signature_for_hash_type_f):
out_script = b''
if tx_out:
out_script = tx_out.script
for (pre_annotations, pc, opcode, instruction, post_annotations) in \
disassemble_scripts(
tx_in.script, out_script, lock_time, signature_for_hash_type_f):
for l in pre_annotations:
print(" %s" % l)
if 1:
print(" %4x: %02x %s" % (pc, opcode, instruction))
for l in post_annotations:
print(" %s" % l)
def dump_signatures(tx, tx_in, tx_out, idx, netcode, address_prefix, traceback_f, disassembly_level):
signatures = []
for opcode in opcode_list(tx_in.script):
if not opcode.startswith("OP_"):
try:
signatures.append(parse_signature_blob(h2b(opcode[1:-1])))
except UnexpectedDER:
pass
if signatures:
sig_types_identical = (
tuple(zip(*signatures))[1].count(signatures[0][1]) == len(signatures))
i = 1 if len(signatures) > 1 else ''
for sig_pair, sig_type in signatures:
print(" r{0}: {1:#x}\n s{0}: {2:#x}".format(i, *sig_pair))
if not sig_types_identical and tx_out:
print(" z{}: {:#x} {}".format(i, tx.signature_hash(tx_out.script, idx, sig_type),
sighash_type_to_string(sig_type)))
if i:
i += 1
if sig_types_identical and tx_out:
print(" z:{} {:#x} {}".format(' ' if i else '', tx.signature_hash(
tx_out.script, idx, sig_type), sighash_type_to_string(sig_type)))
def dump_footer(tx, missing_unspents):
if not missing_unspents:
print("Total input %12.5f mBTC" % satoshi_to_mbtc(tx.total_in()))
if 1:
print("Total output %12.5f mBTC" % satoshi_to_mbtc(tx.total_out()))
if not missing_unspents:
print("Total fees %12.5f mBTC" % satoshi_to_mbtc(tx.fee()))
def dump_tx(tx, netcode, verbose_signature, disassembly_level, do_trace, use_pdb):
address_prefix = address_prefix_for_netcode(netcode)
missing_unspents = tx.missing_unspents()
traceback_f = make_trace_script(do_trace, use_pdb)
dump_header(tx)
dump_inputs(tx, netcode, verbose_signature, address_prefix, traceback_f, disassembly_level)
def signature_for_hash_type_f(hash_type, script):
return tx.signature_hash(script, idx, hash_type)
print("Output%s:" % ('s' if len(tx.txs_out) != 1 else ''))
for idx, tx_out in enumerate(tx.txs_out):
amount_mbtc = satoshi_to_mbtc(tx_out.coin_value)
address = tx_out.bitcoin_address(netcode=netcode) or "(unknown)"
print("%4d: %34s receives %12.5f mBTC" % (idx, address, amount_mbtc))
if disassembly_level > 0:
for (pre_annotations, pc, opcode, instruction, post_annotations) in \
disassemble_scripts(b'', tx_out.script, tx.lock_time, signature_for_hash_type_f):
for l in pre_annotations:
print(" %s" % l)
if 1:
print(" %4x: %02x %s" % (pc, opcode, instruction))
for l in post_annotations:
print(" %s" % l)
dump_footer(tx, missing_unspents)
def check_fees(tx):
total_in, total_out = tx.total_in(), tx.total_out()
actual_tx_fee = total_in - total_out
recommended_tx_fee = tx_fee.recommended_fee_for_tx(tx)
print("warning: transaction fees recommendations casually calculated and estimates may be incorrect",
file=sys.stderr)
if actual_tx_fee > recommended_tx_fee:
print("warning: transaction fee of %s exceeds expected value of %s mBTC" %
(satoshi_to_mbtc(actual_tx_fee), satoshi_to_mbtc(recommended_tx_fee)),
file=sys.stderr)
elif actual_tx_fee < 0:
print("not enough source coins (%s mBTC) for destination (%s mBTC)."
" Short %s mBTC" %
(satoshi_to_mbtc(total_in),
satoshi_to_mbtc(total_out), satoshi_to_mbtc(-actual_tx_fee)),
file=sys.stderr)
elif actual_tx_fee < recommended_tx_fee:
print("warning: transaction fee lower than (casually calculated)"
" expected value of %s mBTC, transaction might not propogate" %
satoshi_to_mbtc(recommended_tx_fee), file=sys.stderr)
return actual_tx_fee
EARLIEST_DATE = datetime.datetime(year=2009, month=1, day=1)
def parse_locktime(s):
s = re.sub(r"[ ,:\-]+", r"-", s)
for fmt1 in ["%Y-%m-%dT", "%Y-%m-%d", "%b-%d-%Y", "%b-%d-%y", "%B-%d-%Y", "%B-%d-%y"]:
for fmt2 in ["T%H-%M-%S", "T%H-%M", "-%H-%M-%S", "-%H-%M", ""]:
fmt = fmt1 + fmt2
try:
when = datetime.datetime.strptime(s, fmt)
if when < EARLIEST_DATE:
raise ValueError("invalid date: must be after %s" % EARLIEST_DATE)
return calendar.timegm(when.timetuple())
except ValueError:
pass
return int(s)
parse_locktime.__name__ = 'locktime'
def parse_fee(fee):
if fee in ["standard"]:
return fee
return int(fee)
def create_parser():
codes = network_codes()
EPILOG = ('Files are binary by default unless they end with the suffix ".hex". ' +
'Known networks codes:\n ' +
', '.join(['%s (%s)' % (i, full_network_name_for_netcode(i)) for i in codes]))
parser = argparse.ArgumentParser(
description="Manipulate bitcoin (or alt coin) transactions.",
epilog=EPILOG)
parser.add_argument('-t', "--transaction-version", type=range_int(0, 255, "version"),
help='Transaction version, either 1 (default) or 3 (not yet supported).')
parser.add_argument('-l', "--lock-time", type=parse_locktime, help='Lock time; either a block'
'index, or a date/time (example: "2014-01-01T15:00:00"')
parser.add_argument('-n', "--network", default=get_current_netcode(), choices=codes,
help='Define network code (BTC=Bitcoin mainnet, XTN=Bitcoin testnet).')
parser.add_argument('-a', "--augment", action='store_true',
help='augment tx by adding any missing spendable metadata by fetching'
' inputs from cache and/or web services')
parser.add_argument('-s', "--verbose-signature", action='store_true',
help='Display technical signature details.')
parser.add_argument("-i", "--fetch-spendables", metavar="address", action="append",
help='Add all unspent spendables for the given bitcoin address. This information'
' is fetched from web services. With no outputs, incoming spendables will be printed.')
parser.add_argument('-f', "--private-key-file", metavar="path-to-private-keys", action="append", default=[],
help='file containing WIF or BIP0032 private keys. If file name ends with .gpg, '
'"gpg -d" will be invoked automatically. File is read one line at a time, and if '
'the file contains only one WIF per line, it will also be scanned for a bitcoin '
'address, and any addresses found will be assumed to be public keys for the given'
' private key.',
type=argparse.FileType('r'))
parser.add_argument('-g', "--gpg-argument", help='argument to pass to gpg (besides -d).', default='')
parser.add_argument("--remove-tx-in", metavar="tx_in_index_to_delete", action="append", type=int,
help='remove a tx_in')
parser.add_argument("--remove-tx-out", metavar="tx_out_index_to_delete", action="append", type=int,
help='remove a tx_out')
parser.add_argument('-F', "--fee", help='fee, in satoshis, to pay on transaction, or '
'"standard" to auto-calculate. This is only useful if the "split pool" '
'is used; otherwise, the fee is automatically set to the unclaimed funds.',
default="standard", metavar="transaction-fee", type=parse_fee)
parser.add_argument('-C', "--cache", help='force the resultant transaction into the transaction cache.'
' Mostly for testing.', action='store_true'),
parser.add_argument("--db", type=Tx.from_hex, help='force the transaction expressed by the given hex '
'into a RAM-based transaction cache. Mostly for testing.', action="append"),
parser.add_argument('-u', "--show-unspents", action='store_true',
help='show TxOut items for this transaction in Spendable form.')
parser.add_argument('-b', "--bitcoind-url",
help='URL to bitcoind instance to validate against (http://user:pass@host:port).')
parser.add_argument('-o', "--output-file", metavar="path-to-output-file", type=argparse.FileType('wb'),
help='file to write transaction to. This supresses most other output.')
parser.add_argument('-d', "--disassemble", action='store_true',
help='Disassemble scripts.')
parser.add_argument("--pdb", action="store_true", help='Enter PDB debugger on each script instruction.')
parser.add_argument("--trace", action='store_true', help='Trace scripts.')
parser.add_argument('-p', "--pay-to-script", metavar="pay-to-script", action="append",
help='a hex version of a script required for a pay-to-script'
'input (a bitcoin address that starts with 3)')
parser.add_argument('-P', "--pay-to-script-file", metavar="pay-to-script-file", nargs=1,
type=argparse.FileType('r'), help='a file containing hex scripts '
'(one per line) corresponding to pay-to-script inputs')
parser.add_argument("argument", nargs="*", help='generic argument: can be a hex transaction id '
'(exactly 64 characters) to be fetched from cache or a web service;'
' a transaction as a hex string; a path name to a transaction to be loaded;'
' a spendable 4-tuple of the form tx_id/tx_out_idx/script_hex/satoshi_count '
'to be added to TxIn list; an address/satoshi_count to be added to the TxOut '
'list; an address to be added to the TxOut list and placed in the "split'
' pool".')
return parser
def replace_with_gpg_pipe(args, f):
gpg_args = ["gpg", "-d"]
if args.gpg_argument:
gpg_args.extend(args.gpg_argument.split())
gpg_args.append(f.name)
popen = subprocess.Popen(gpg_args, stdout=subprocess.PIPE)
return popen.stdout
def parse_private_key_file(args, key_list):
wif_re = re.compile(r"[1-9a-km-zA-LMNP-Z]{51,111}")
# address_re = re.compile(r"[1-9a-kmnp-zA-KMNP-Z]{27-31}")
for f in args.private_key_file:
if f.name.endswith(".gpg"):
f = replace_with_gpg_pipe(args, f)
for line in f.readlines():
# decode
if isinstance(line, bytes):
line = line.decode("utf8")
# look for WIFs
possible_keys = wif_re.findall(line)
def make_key(x):
try:
return Key.from_text(x)
except Exception:
return None
keys = [make_key(x) for x in possible_keys]
for key in keys:
if key:
key_list.append((k.wif() for k in key.subkeys("")))
# if len(keys) == 1 and key.hierarchical_wallet() is None:
# # we have exactly 1 WIF. Let's look for an address
# potential_addresses = address_re.findall(line)
TX_ID_RE = re.compile(r"^[0-9a-fA-F]{64}$")
def parse_tx(arg, parser, tx_db, network):
# hex transaction id
tx = None
if TX_ID_RE.match(arg):
if tx_db is None:
tx_db = create_tx_db(network)
tx = tx_db.get(h2b_rev(arg))
if not tx:
parser.error("can't find Tx with id %s" % arg)
return tx, tx_db
# hex transaction data
try:
return Tx.from_hex(arg), tx_db
except Exception:
pass
if os.path.exists(arg):
try:
with open(arg, "rb") as f:
if f.name.endswith("hex"):
f = io.BytesIO(codecs.getreader("hex_codec")(f).read())
tx = Tx.parse(f)
tx.parse_unspents(f)
except Exception:
pass
return tx, tx_db
def parse_scripts(args):
scripts = []
warnings = []
for p2s in args.pay_to_script or []:
try:
scripts.append(h2b(p2s))
except Exception:
warnings.append("warning: error parsing pay-to-script value %s" % p2s)
hex_re = re.compile(r"[0-9a-fA-F]+")
for f in args.pay_to_script_file or []:
count = 0
for l in f:
try:
m = hex_re.search(l)
if m:
p2s = m.group(0)
scripts.append(h2b(p2s))
count += 1
except Exception:
warnings.append("warning: error parsing pay-to-script file %s" % f.name)
if count == 0:
warnings.append("warning: no scripts found in %s" % f.name)
return scripts, warnings
def build_p2sh_lookup(args):
scripts, warnings = parse_scripts(args)
for w in warnings:
print(w)
p2sh_lookup = {}
for script in scripts:
p2sh_lookup[hash160(script)] = script
return p2sh_lookup
def create_tx_db(network):
tx_db = get_tx_db(network)
tx_db.warning_tx_cache = message_about_tx_cache_env()
tx_db.warning_tx_for_tx_hash = message_about_tx_for_tx_hash_env(network)
return tx_db
def parse_parts(arg, spendables, payables, network):
parts = arg.split("/")
if 4 <= len(parts) <= 7:
# spendable
try:
spendables.append(Spendable.from_text(arg))
return True
except Exception:
pass
if len(parts) == 2 and is_address_valid(parts[0], allowable_netcodes=[network]):
try:
payables.append(parts)
return True
except ValueError:
pass
def key_found(arg, payables, key_iters):
try:
key = Key.from_text(arg)
# TODO: check network
if key.wif() is None:
payables.append((key.address(), 0))
return True
key_iters.append(iter([key.wif()]))
return True
except Exception:
pass
return False
def parse_context(args, parser):
# we create the tx_db lazily
tx_db = None
if args.db:
the_ram_tx_db = dict((tx.hash(), tx) for tx in args.db)
if tx_db is None:
tx_db = create_tx_db(args.network)
tx_db.lookup_methods.append(the_ram_tx_db.get)
# defaults
txs = []
spendables = []
payables = []
key_iters = []
# there are a few warnings we might optionally print out, but only if
# they are relevant. We don't want to print them out multiple times, so we
# collect them here and print them at the end if they ever kick in.
warning_spendables = None
for arg in args.argument:
if is_address_valid(arg, allowable_netcodes=[args.network], allowable_types=[
"address", "pay_to_script", "segwit"]):
payables.append((arg, 0))
continue
if key_found(arg, payables, key_iters):
continue
tx, tx_db = parse_tx(arg, parser, tx_db, args.network)
if tx:
txs.append(tx)
continue
if parse_parts(arg, spendables, payables, args.network):
continue
parser.error("can't parse %s" % arg)
parse_private_key_file(args, key_iters)
if args.fetch_spendables:
warning_spendables = message_about_spendables_for_address_env(args.network)
for address in args.fetch_spendables:
spendables.extend(spendables_for_address(address, args.network))
return (txs, spendables, payables, key_iters, tx_db, warning_spendables)
def merge_txs(txs, spendables, payables):
txs_in = []
txs_out = []
unspents = []
# we use a clever trick here to keep each tx_in corresponding with its tx_out
for tx in txs:
smaller = min(len(tx.txs_in), len(tx.txs_out))
txs_in.extend(tx.txs_in[:smaller])
txs_out.extend(tx.txs_out[:smaller])
unspents.extend(tx.unspents[:smaller])
for tx in txs:
smaller = min(len(tx.txs_in), len(tx.txs_out))
txs_in.extend(tx.txs_in[smaller:])
txs_out.extend(tx.txs_out[smaller:])
unspents.extend(tx.unspents[smaller:])
for spendable in spendables:
txs_in.append(spendable.tx_in())
unspents.append(spendable)
for address, coin_value in payables:
script = standard_tx_out_script(address)
txs_out.append(TxOut(coin_value, script))
return txs_in, txs_out, unspents
def calculate_lock_time_and_version(args, txs):
# if no lock_time is explicitly set, inherit from the first tx or use default
lock_time = args.lock_time
if lock_time is None:
if txs:
lock_time = txs[0].lock_time
else:
lock_time = DEFAULT_LOCK_TIME
# if no version is explicitly set, inherit from the first tx or use default
version = args.transaction_version
if version is None:
if txs:
version = txs[0].version
else:
version = DEFAULT_VERSION
return lock_time, version
def remove_indices(items, indices):
if indices:
s = set(indices)
items = [i for idx, i in enumerate(items) if idx not in s]
return items
def wif_iter(iters):
while len(iters) > 0:
for idx, iter in enumerate(iters):
try:
wif = next(iter)
yield wif
except StopIteration:
iters = iters[:idx] + iters[idx+1:]
break
def generate_tx(txs, spendables, payables, args):
txs_in, txs_out, unspents = merge_txs(txs, spendables, payables)
lock_time, version = calculate_lock_time_and_version(args, txs)
if len(unspents) == len(txs_in):
unspents = remove_indices(unspents, args.remove_tx_in)
txs_in = remove_indices(txs_in, args.remove_tx_in)
txs_out = remove_indices(txs_out, args.remove_tx_out)
tx = Tx(txs_in=txs_in, txs_out=txs_out, lock_time=lock_time, version=version, unspents=unspents)
fee = args.fee
try:
if len(payables) > 0:
distribute_from_split_pool(tx, fee)
except ValueError as ex:
print("warning: %s" % ex.args[0], file=sys.stderr)
return tx
def print_output(tx, include_unspents, output_file, show_unspents, network, verbose_signature, disassemble, trace, pdb):
if len(tx.txs_in) == 0:
print("warning: transaction has no inputs", file=sys.stderr)
if len(tx.txs_out) == 0:
print("warning: transaction has no outputs", file=sys.stderr)
tx_as_hex = tx.as_hex(include_unspents=include_unspents)
if output_file:
f = output_file
if f.name.endswith(".hex"):
f.write(tx_as_hex.encode("utf8"))
else:
tx.stream(f, include_unspents=include_unspents)
f.close()
elif show_unspents:
for spendable in tx.tx_outs_as_spendable():
print(spendable.as_text())
elif len(tx.txs_out) == 0:
for spendable in tx.unspents:
print(spendable.as_text())
else:
if not tx.missing_unspents():
check_fees(tx)
dump_tx(tx, network, verbose_signature, disassemble, trace, pdb)
if include_unspents:
print("including unspents in hex dump since transaction not fully signed")
print(tx_as_hex)
def do_signing(tx, key_iters, p2sh_lookup, netcode):
unsigned_before = tx.bad_signature_count()
unsigned_after = unsigned_before
if unsigned_before > 0 and key_iters:
print("signing...", file=sys.stderr)
sign_tx(tx, wif_iter(key_iters), p2sh_lookup=p2sh_lookup, netcode=netcode)
unsigned_after = tx.bad_signature_count()
if unsigned_after > 0:
print("warning: %d TxIn items still unsigned" % unsigned_after, file=sys.stderr)
return unsigned_after == 0
def cache_result(tx, tx_db, cache, network):
if cache:
if tx_db is None:
tx_db = create_tx_db(network)
tx_db.put(tx)
return tx_db
def validate_tx(tx, tx_db, network):
if not tx.txs_out:
return
if tx.missing_unspents():
print("\n** can't validate transaction as source transactions missing", file=sys.stderr)
else:
try:
if tx_db is None:
tx_db = create_tx_db(network)
tx.validate_unspents(tx_db)
print('all incoming transaction values validated')
except BadSpendableError as ex:
print("\n**** ERROR: FEES INCORRECTLY STATED: %s" % ex.args[0], file=sys.stderr)
except Exception as ex:
print("\n*** can't validate source transactions as untampered: %s" %
ex.args[0], file=sys.stderr)
def validate_against_bitcoind(tx, tx_db, network, bitcoind_url):
if bitcoind_url:
if tx_db is None:
tx_db = create_tx_db(network)
validate_bitcoind(tx, tx_db, bitcoind_url)
return tx_db
def tx(args, parser):
(txs, spendables, payables, key_iters, tx_db, warning_spendables) = parse_context(args, parser)
for tx in txs:
if tx.missing_unspents() and (args.augment or tx_db):
if tx_db is None:
tx_db = create_tx_db(args.network)
tx.unspents_from_db(tx_db, ignore_missing=True)
# build p2sh_lookup
p2sh_lookup = build_p2sh_lookup(args)
tx = generate_tx(txs, spendables, payables, args)
is_fully_signed = do_signing(tx, key_iters, p2sh_lookup, args.network)
include_unspents = not is_fully_signed
print_output(tx, include_unspents, args.output_file, args.show_unspents, args.network,
args.verbose_signature, args.disassemble, args.trace, args.pdb)
tx_db = cache_result(tx, tx_db, args.cache, args.network)
tx_db = validate_against_bitcoind(tx, tx_db, args.network, args.bitcoind_url)
if not args.show_unspents:
tx_db = validate_tx(tx, tx_db, args.network)
# print warnings
if tx_db:
for m in [tx_db.warning_tx_cache, tx_db.warning_tx_for_tx_hash]:
if m:
print("warning: %s" % m, file=sys.stderr)
if warning_spendables:
print("warning: %s" % warning_spendables, file=sys.stderr)
def main():
parser = create_parser()
args = parser.parse_args()
tx(args, parser)
if __name__ == '__main__':
main()
| 37.086207 | 120 | 0.620999 | [
"MIT"
] | mewald55/pycoin_ypub-zpub | pycoin/cmds/tx.py | 27,963 | Python |
from __future__ import absolute_import
__version__ = '0.3.3'
from .core.composition import *
from .core.transforms_interface import *
from .core.serialization import *
from .augmentations.transforms import *
from .augmentations.bbox_utils import *
from .imgaug.transforms import *
| 25.727273 | 40 | 0.80212 | [
"MIT"
] | BelBES/albumentations | albumentations/__init__.py | 283 | Python |
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "WebPersonal.settings")
application = get_wsgi_application()
#application = DjangoWhiteNoise(application) | 30.714286 | 71 | 0.84186 | [
"Apache-2.0"
] | CristianAAT/web-personal | WebPersonal/WebPersonal/wsgi.py | 215 | Python |
import sys
import win32gui
import sys
import re
import os
import delete_mark
import keyboard
import win32com.client
import pythoncom
from PyQt5.QtWidgets import QApplication
from PyQt5.QtGui import QImage
import map_insert
import pic_locate
import delete_mark
base_dir = os.path.dirname(os.path.abspath(__file__))
app = QApplication(sys.argv)
def JudgeWindowSize(hwnd):
screen = QApplication.primaryScreen()
pix = screen.grabWindow(hwnd).toImage().convertToFormat(QImage.Format.Format_RGBA8888)
print((pix.width(),pix.height()))
if pix.width()==1920 and pix.height()==1080:
return 1
if pix.width()==1366 and pix.height()==768:
return 1
if pix.width()==1280 and pix.height()==720:
return 1
return 0
def OpenSearchBox():
st=os.system(r'python {0}\search_box.py'.format(base_dir))
def ImportConfig():
config_list=[]
with open('{0}\config.ini'.format(base_dir), 'r', encoding='UTF-8') as f:
temp=f.readlines()
print(temp)
for i in range(5):
gp=re.search(r'(:|:)\s*?(\S.*)\s*?', temp[i])
if gp==None or len(gp.groups())<2:config_list.append('')
else: config_list.append(gp.group(2))
return config_list
if __name__=='__main__':
pythoncom.CoInitialize()
shell = win32com.client.Dispatch("WScript.Shell")
shell.SendKeys('%')
hwnd = win32gui.FindWindow('UnityWndClass', None)
if JudgeWindowSize(hwnd)==0:
print('分辨率尚未适配')
else:
win32gui.SetForegroundWindow(hwnd)
config=ImportConfig()
if config[0]!='':
keyboard.add_hotkey(config[0],OpenSearchBox,suppress = False)
if config[1]!='':
keyboard.add_hotkey(config[1],delete_mark.DeleteCenterMark,(map_insert.kp2,map_insert.des2),suppress = False)
if config[2]!='':
keyboard.add_hotkey(config[2],delete_mark.DeleteMouseMark,(map_insert.kp2,map_insert.des2),suppress = False)
if config[3]!='':
keyboard.add_hotkey(config[3],delete_mark.GetMarkInfo,(hwnd,map_insert.kp2,map_insert.des2),suppress = False)
if config[4]!='':
keyboard.add_hotkey(config[4],pic_locate.DeleteAllMarks,(hwnd,),suppress = False)
keyboard.wait() | 37.733333 | 121 | 0.658127 | [
"MIT"
] | ChengYang1998/GenshinMapAutoMarkTools | main.py | 2,280 | Python |
from bottle import route, Route, request, default_app, view, HTTPError, response
from redis import StrictRedis
from redis.utils import pipeline
import json
import uwsgi
import os
import logging
import requests
from config import get_config
from worker import get_cache_key, get_wait_key, get_queue_key
from worker import init_redis
application = None
ERROR_RESP = {'archived': False, 'queued': False, 'error': {'msg': 'unknown'}}
def init():
""" Init the application and add routes """
logging.basicConfig(format='%(asctime)s: [%(levelname)s]: %(message)s',
level=logging.DEBUG)
global theconfig
theconfig = get_config()
global rc
rc = init_redis(theconfig)
app = default_app()
return app
@route(['/', '/index.html', '/index.htm'])
@view('index')
def home():
return {'archives': theconfig['archives'],
'default_archive': theconfig.get('default_archive')}
def get_params():
url = request.query.get('url')
archive = request.query.get('archive')
browser_type = request.query.get('browser', 'chrome')
if not url:
raise HTTPError(status=400, body='No url= specified')
if archive not in theconfig['archives']:
raise HTTPError(status=400, body='No archive {0}'.format(archive))
if not url.startswith(('http://', 'https://')):
url = 'http://' + url
return browser_type, archive, url
@route('/archivepage')
def archive_page():
browser_type, archive, url = get_params()
response_key = get_cache_key(archive, browser_type, url)
wait_key = get_wait_key(archive, browser_type, url)
queue_key = get_queue_key(browser_type)
result = None
if not rc.exists(response_key):
cmd = dict(request.query)
cmd['url'] = url
num = rc.incr('total_urls:' + browser_type)
cmd['num'] = num
cmd = json.dumps(cmd)
with pipeline(rc) as pi:
waiting_str = {'archived': False,
'queued': True,
'num': num}
pi.set(response_key, json.dumps(waiting_str))
pi.rpush(queue_key, cmd)
rc.blpop(wait_key, theconfig['wait_timeout_secs'])
result = rc.get(response_key)
if result:
result = json.loads(result)
if 'queued' in result:
result['queue_pos'] = 0
front = rc.lindex(queue_key, 0)
if front:
front = json.loads(front)
front_num = front.get('num', 0)
# pos == 1 implies this url is next up
# pos <= 0 implies this url was removed from queue and is being processed
pos = result['num'] - front_num + 1
result['queue_pos'] = pos
else:
result['ttl'] = rc.ttl(response_key)
else:
result = ERROR_RESP
return result
@route('/download')
def download():
browser_type, archive, url = get_params()
response_key = get_cache_key(archive, browser_type, url)
result = rc.get(response_key)
if not result:
raise HTTPError(status=404, body='Url Not Archived')
result = json.loads(result)
if not 'download_url' in result:
raise HTTPError(status=404, body='Download Not Available')
headers = {}
session = result.get('download_session')
if session:
headers['Cookie'] = session
r = requests.get(result['download_url'],
headers=headers,
stream=True)
if r.status_code != 200:
raise HTTPError(status=400, body='Invalid Download Result: {0} {1}'.format(r.status_code, r.reason))
pass_headers = ('Content-Disposition', 'Content-Length', 'Content-Type')
for h in pass_headers:
response.set_header(h, r.headers.get(h))
response.body = r.iter_content()
return response
application = init()
| 24.670886 | 108 | 0.611339 | [
"MIT"
] | ikreymer/archivethiswebsite | web/app.py | 3,898 | Python |
"""
Some common functions for interfacing with the
NodeMeister REST API.
"""
import requests
import anyjson
import re
import logging
MISSING_ITEM = '-'
DIFF_MARKER = ">"
try:
logger.debug("importing nodemeisterlib")
except NameError:
FORMAT = "[%(levelname)s %(filename)s:%(lineno)s - %(funcName)20s() ] %(message)s"
logging.basicConfig(level=logging.ERROR, format=FORMAT)
logger = logging.getLogger(__name__)
def red(text):
"""
Shameless hack-up of the 'termcolor' python package
by Konstantin Lepa - <https://pypi.python.org/pypi/termcolor>
to reduce rependencies and only make red text.
"""
s = '\033[%dm%s\033[0m' % (31, text)
return s
def print_columns(lines, spacer=' ', onlydifferent=False):
"""
Take a list of lines, each being a list with 3 elements
(the three columns to print) and print in 3 columns.
:param lines: list of 3-element lists, each list is a line and
each sub-list are the 3 columns in the line
:type lines: list of lists
:param spacer: spacer between columns, default 3 spaces
:type lines: string
:param onlydifferent: only output differing lines
:type onlydifferent: boolean
"""
s = ""
# get the column width
clen = [0, 0, 0]
for l in lines:
if onlydifferent:
if len(l) < 3:
continue
for c in xrange(0, 3):
if len(str(l[c])) > clen[c]:
clen[c] = len(str(l[c]))
line_spec = "{{0:<{1}s}}{0}{{1:<{2}s}}{0}{{2:<{3}s}}\n".format(' ' * 3, clen[0], clen[1], clen[2])
# print the lines
for l in lines:
if len(l) > 3 and l[3] == True:
s += red(line_spec.format(DIFF_MARKER + l[0], str(l[1]), str(l[2])))
else:
if onlydifferent:
continue
s += line_spec.format(l[0], str(l[1]), str(l[2]))
return s
def pretty_diff_list(title, oA, oB):
"""
Generate a pretty diff of two dicts.
:param title: the title/heading for the line
:type title: string
:param oA: first object
:param oB: second object
:returns: list of lines, each a list of 3 columns
:rtype: list of lists
"""
lines = []
items = set.union(set(oA), set(oB))
for i in sorted(items):
if i in oA and i in oB:
lines.append(['', i, i])
elif i in oA:
lines.append(['', i, MISSING_ITEM, True])
elif i in oB:
lines.append(['', MISSING_ITEM, i, True])
return lines
def pretty_diff_str(title, oA, oB):
"""
Generate a pretty diff of two dicts.
:param title: the title/heading for the line
:type title: string
:param oA: first object
:param oB: second object
:returns: list of lines, each a list of 3 columns
:rtype: list of lists
"""
if oA != oB:
return [[title, oA, oB, True]]
return [[title, oA, oB]]
def pretty_diff_dict(title, oA, oB):
"""
Generate a pretty diff of two dicts.
:param title: the title/heading for the line
:type title: string
:param oA: first object
:param oB: second object
:returns: list of lines, each a list of 3 columns
:rtype: list of lists
"""
lines = [[title, '', '']]
keys = set.union(set(oA.keys()), set(oB.keys()))
for k in sorted(keys):
if k in oA and k in oB:
if oA[k] == oB[k]:
lines.append([k, oA[k], oB[k]])
else:
lines.append([k, oA[k], oB[k], True])
elif k in oA:
lines.append([k, oA[k], MISSING_ITEM, True])
else:
lines.append([k, MISSING_ITEM, oB[k], True])
return lines
def pretty_diff_obj(title, oA, oB):
"""
Generate a pretty diff of two objects (actually just
dict, list or string) of lines suitable for use in pretty_diff_dicts()
This method is a pass-through to
pretty_diff_(dict|string|list)
depending on the input type.
:param title: the title/heading for the line
:type title: string
:param oA: first object
:param oB: second object
:returns: list of lines, each a list of 3 columns
:rtype: list of lists
"""
if type(oA) == type({}) or type(oB) == type({}):
return pretty_diff_dict(title, oA, oB)
elif type(oA) == type("") or type(oB) == type("") or type(oA) == type(u"") or type(oB) == type(u""):
return pretty_diff_str(title, oA, oB)
else:
return pretty_diff_list(title, oA, oB)
return []
def pretty_diff(title, titleA, dictA, titleB, dictB, onlydifferent=False):
"""
Generate a "pretty" printable diff of two Nodes or Groups
containing arbitrarily deep dict, list or string items.
Intended to be used for the "text" dicts in migrate_group()
and migrate_node().
:param title: overall title of the diff
:type title: string
:param titleA: title of the first dict
:type titleA: string
:param dictA: the first dict
:type dictA: dict
:param titleB: title of the second dict
:type titleB: string
:param dictB: the second dict
:type dictB: dict
:param onlydifferent: only output differing lines
:type onlydifferent: boolean
:returns: multi-line string, columnar diff of dicts
:rtype: string
"""
s = "Diff of %s\n" % title
lines = []
lines.append(['', titleA, titleB])
lines.append(['', '-' * len(titleA), '-' * len(titleB)])
lines.append(['name', dictA.get('name', '<none>'), dictB.get('name', '<none>')])
lines.append(['id', dictA.get('id', '<none>'), dictB.get('id', '<none>')])
lines.append(['description', dictA.get('description', '<none>'), dictB.get('description', '<none>')])
dictA.pop('name', None)
dictA.pop('id', None)
dictA.pop('description', None)
dictB.pop('name', None)
dictB.pop('id', None)
dictB.pop('description', None)
lines.append(['', '', ''])
k = set.union(set(dictA.keys()), set(dictB.keys()))
for p in sorted(k):
lines.append([p.capitalize() + ':', '', ''])
lines.extend(pretty_diff_obj('', dictA.get(p), dictB.get(p)))
#lines.append(['', '', ''])
s += print_columns(lines, onlydifferent=onlydifferent)
return s
def get_nm_node_yaml(nm_host, node_name, ssl_verify=False, verbose=False):
"""
Get the raw ENC YAML for a given node
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param node_name: name of the node to get YAML for
:type node_name: string
:param ssl_verify: whether or not to verify SSL certificate, default False
:type ssl_verify: boolean
:rtype: string
:returns: raw YAML string, or None
"""
nm_url = "http://%s/enc/puppet/%s" % (nm_host, node_name)
r = requests.get(nm_url, headers={'Accept': 'text/yaml'}, verify=ssl_verify)
if r.status_code == 200:
return r.content
else:
logger.error("got status code {s} for {u}".format(s=r.status_code, u=nm_url))
return None
def get_dashboard_node_yaml(url, ssl_verify=False, verbose=False):
"""
Given the full URL to a Puppet Dashboard node YAML file,
return the content of the YAML file as a string.
:param url: full URL to Dashboard node yaml
:type url: string
:param ssl_verify: whether or not to verify SSL certificate, default False
:type ssl_verify: boolean
:rtype: string
:returns: raw YAML string, or None
"""
r = requests.get(url, headers={'Accept': 'text/yaml'}, verify=ssl_verify)
if r.status_code == 200:
return r.content
else:
logger.error("got status code {s} for {u}".format(s=r.status_code, u=url))
return None
def get_json(url):
"""
uses requests to GET and return deserialized json
uses anyjson if the Response object doesn't have .json()
:param url: the URL to get
:type url: string
:rtype: dict/mixed or None
:returns: unserialized JSON, or None
"""
r = requests.get(url)
if 'json' in dir(r):
return r.json()
try:
j = anyjson.deserialize(r.content)
return j
except:
logger.error("could not deserialize JSON for {u} (got status code {s})".format(s=r.status_code, u=url))
return None
def get_group_names(nm_host):
"""
Return a dict of groups in the NM instance,
id => name
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM groups, dict of the form {id<int>: name<string>}
"""
j = get_json("http://%s/enc/groups/" % nm_host)
names = {}
for n in j:
names[n['id']] = n['name']
return names
def get_nm_group_classes(nm_host):
"""
Return a dict of all group classes in NM,
with their id as the dict key.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM group classes, dict of the form:
{id<int>: {'classname': <string>, 'classparams': <string or None>, 'group': <int>, 'id': <int>}
"""
r = {}
j = get_json("http://%s/enc/classes/groups/" % nm_host)
for o in j:
r[o['id']] = o
return r
def get_nm_group_params(nm_host):
"""
Return a dict of all group params in NM,
with their id as the dict key.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM group params, dict of the form:
{id<int>: {'paramkey': <string>, 'paramvalue': <string or None>, 'group': <int>, 'id': <int>}
"""
r = {}
j = get_json("http://%s/enc/parameters/groups/" % nm_host)
for o in j:
if o['paramvalue'] is not None:
o['paramvalue'] = clean_value(o['paramvalue'])
r[o['id']] = o
return r
def get_nm_group(nm_host, gname=None, gid=None, groupnames=None):
"""
Return a dict of information about a group
in NM, by either name or ID. If gname is specified,
it will be resolved to the id.
groupnames, if specified, is the output dict from get_group_names();
if it is not specified, get_group_names() will be called internally.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:param gname: name of group to get
:type gname: string
:param gid: ID of group to get, overrides gname
:type gid: int
:param groupnames: output of get_group_names(), to prevent calling it again if we already have it
:type groupnames: dict
:rtype: dict
:returns: unserialized JSON dict representing the specified group, of the form:
{'name': <string>, 'parameters': [<param IDs>], 'classes': [<class IDs>], 'parents': [<group IDs>], 'groups': [<group IDs>], 'id': <int>, 'description': <string>}
"""
if gid is None and gname is None:
raise ValueError("get_nm_group called without gname or gid")
if gid is None:
if groupnames is None:
groupnames = get_group_names(nm_host)
for n in groupnames:
if groupnames[n] == gname:
gid = n
if gid is None:
return {}
j = get_json("http://%s/enc/groups/%d/" % (nm_host, gid))
return j
def interpolate_group(group, classes, params, group_names):
"""
In the dict returned by get_nm_group, replace class
and parameter IDs, and other group IDs, with their
appropriate string or dict representations.
:param group: the Group dict returned by get_nm_group()
:type group: dict
:param classes: the dict of classes returned by get_nm_group_classes()
:type classes: dict
:param params: the dict of parameters returned by get_nm_group_params()
:type params: dict
:param group_names: the dict of group IDs to names returned by get_group_names()
:type group_names: dict
:returns: group dict, with classes and params interpolated
:rtype: dict
"""
g_params = group.get('parameters', {})
params_text = {}
for p in g_params:
foo = params[p]
params_text[foo['paramkey']] = foo['paramvalue']
group['parameters'] = params_text
g_classes = group.get('classes', {})
classes_text = {}
for c in g_classes:
foo = classes[c]
classes_text[foo['classname']] = foo['classparams']
group['classes'] = classes_text
g_parents = group.get('parents', {})
parents_text = []
for p in g_parents:
parents_text.append(group_names[p])
group['parents'] = parents_text
g_groups = group.get('groups', {})
groups_text = []
for g in g_groups:
groups_text.append(group_names[g])
group['groups'] = groups_text
return group
def add_group(nm_host, name, description, parents=None, groups=None, dry_run=False):
"""
add a group to NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param name: name of the new group
:type name: string
:param description: description of the new group
:type description: string
:param parents: parents of this group
:type parents: list of int IDs
:param groups: child groups of this group
:type groups: list of int IDs
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: int ID of the new group on success or False on failure
:rtype: int or False
"""
payload = {'name': name, 'description': description}
if parents is not None:
payload['parents'] = parents
if groups is not None:
payload['groups'] = groups
url = "http://%s/enc/groups/" % nm_host
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return get_nm_group_id(nm_host, name, dry_run=dry_run)
logger.error("ERROR: add_group got status code %d" % status_code)
return False
def get_nm_group_id(nm_host, name, groups=None, dry_run=False):
"""
Get the group ID of a group specified by name
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param name: name of the new group
:type name: string
:param groups: dict of groups as returned by get_group_names()
:type groups: dict
:returns: int ID of the group or False on failure
:rtype: int or False
"""
if dry_run:
return 0
if groups is None:
groups = get_group_names(nm_host)
for n in groups:
if groups[n] == name:
return n
return False
def add_param_to_group(nm_host, gid, pname, pval, dry_run=False):
"""
add a parameter to a group in NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param gid: numeric ID of the group to add param to
:type gid: int
:param pname: parameter name
:type pname: string
:param pval: parameter value
:type pval: string
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: True on success or False on failure
:rtype: boolean
"""
if isinstance(pval, basestring) and (pval.strip() == "" or pval == "" or pval == "''"):
pval = None
payload = {'group': gid, 'paramkey': pname, 'paramvalue': pval}
url = "http://%s/enc/parameters/groups/" % nm_host
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return True
logger.error("ERROR: add_param_to_group got status code %d" % status_code)
return False
def add_class_to_group(nm_host, gid, classname, classparams=None, dry_run=False):
"""
add a class to a group in NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param gid: numeric ID of the group to add param to
:type gid: int
:param classname: class name
:type classname: string
:param classparams: class parameters, default None
:type classparams: string or None
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: True on success or False on failure
:rtype: boolean
"""
payload = {'group': gid, 'classname': classname, 'classparams': classparams}
url = "http://%s/enc/classes/groups/" % nm_host
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return True
logger.error("ERROR: add_class_to_group got status code %d" % status_code)
return False
def get_node_names(nm_host):
"""
Return a dict of nodes in the NM instance,
id => hostname
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM nodes, dict of the form {id<int>: hostname<string>}
"""
j = get_json("http://%s/enc/nodes/" % nm_host)
names = {}
for n in j:
names[n['id']] = n['hostname']
return names
def get_nm_node_id(nm_host, hostname, nodenames=None, dry_run=False):
"""
Get the node ID of a node specified by hostname
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param hostname: hostname of the node
:type hostname: string
:param nodenames: dict of nodes as returned by get_node_names()
:type nodenames: dict
:returns: int ID of the group or False on failure
:rtype: int or False
"""
if dry_run:
return 0
if nodenames is None:
nodenames = get_node_names(nm_host)
for n in nodenames:
if nodenames[n] == hostname:
return n
logger.error("could not find node ID for {h}".format(h=hostname))
return False
def get_nm_node(nm_host, hostname=None, node_id=None, nodenames=None):
"""
Return a dict of information about a node
in NM, by either name or ID. If nodename is specified,
it will be resolved to the id.
nodenames, if specified, is the output dict from get_node_names();
if it is not specified, get_node_names() will be called internally.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:param hostname: name of node to get
:type hostname: string
:param node_id: ID of node to get, overrides hostname
:type node_id: int
:param nodenames: output of get_node_names(), to prevent calling it again if we already have it
:type nodenames: dict
:rtype: dict
:returns: unserialized JSON dict representing the specified group, of the form:
{'hostname': <string>, 'parameters': [<param IDs>], 'classes': [<class IDs>], 'parents': [<group IDs>],
'groups': [<group IDs>], 'id': <int>, 'description': <string>}
"""
if node_id is None and hostname is None:
raise ValueError("get_nm_node called without hostname or node_id")
if node_id is None:
if nodenames is None:
nodenames = get_node_names(nm_host)
for n in nodenames:
if nodenames[n] == hostname:
node_id = n
if node_id is None:
logger.error("could not find hode with hostname {h}".format(h=hostname))
return {}
j = get_json("http://%s/enc/nodes/%d/" % (nm_host, node_id))
return j
def get_nm_node_classes(nm_host):
"""
Return a dict of all node classes in NM,
with their id as the dict key.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM node classes, dict of the form:
{id<int>: {'classname': <string>, 'classparams': <string or None>, 'node': <int>, 'id': <int>}
"""
r = {}
j = get_json("http://%s/enc/classes/nodes/" % nm_host)
for o in j:
r[o['id']] = o
return r
def get_nm_node_params(nm_host):
"""
Return a dict of all node params in NM,
with their id as the dict key.
:param nm_host: NodeMeister hostname/IP
:type nm_host: string
:rtype: dict
:returns: NM node params, dict of the form:
{id<int>: {'paramkey': <string>, 'paramvalue': <string or None>, 'node': <int>, 'id': <int>}
"""
r = {}
j = get_json("http://%s/enc/parameters/nodes/" % nm_host)
for o in j:
r[o['id']] = o
return r
def add_node(nm_host, hostname, description, groups=None, dry_run=False):
"""
add a node to NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param hostname: hostname of the new node
:type hostname: string
:param description: description of the new node
:type description: string
:param groups: groups that this node is in
:type groups: list of int IDs
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: int ID of the new node on success or False on failure
:rtype: int or False
"""
payload = {'hostname': hostname, 'description': description}
if groups is not None:
payload['groups'] = groups
url = "http://%s/enc/nodes/" % nm_host
logger.debug("adding node {h}".format(h=hostname))
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return get_nm_node_id(nm_host, hostname, dry_run=dry_run)
logger.error("ERROR: add_node got status code %d" % status_code)
return False
def add_param_to_node(nm_host, node_id, pname, pval, dry_run=False):
"""
add a parameter to a node in NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param node_id: numeric ID of the node to add param to
:type node_id: int
:param pname: parameter name
:type pname: string
:param pval: parameter value
:type pval: string
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: True on success or False on failure
:rtype: boolean
"""
if pval.strip() == "" or pval == "" or pval == "''":
pval = None
payload = {'node': node_id, 'paramkey': pname, 'paramvalue': pval}
url = "http://%s/enc/parameters/nodes/" % nm_host
logger.debug("adding param '{pname}' to node {n} with val: {pval}".format(n=node_id, pname=pname, pval=pval))
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return True
logger.error("ERROR: add_param_to_node got status code %d" % status_code)
return False
def add_class_to_node(nm_host, node_id, classname, classparams=None, dry_run=False):
"""
add a class to a node in NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param node_id: numeric ID of the node to add param to
:type node_id: int
:param classname: class name
:type classname: string
:param classparams: class parameters, default None
:type classparams: string or None
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: True on success or False on failure
:rtype: boolean
"""
payload = {'node': node_id, 'classname': classname, 'classparams': classparams}
url = "http://%s/enc/classes/nodes/" % nm_host
logger.debug("adding class '{cn}' to node {n} with params: {cp}".format(n=node_id, cn=classname, cp=classparams))
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return True
logger.error("ERROR: add_class_to_node got status code %d" % status_code)
return False
def get_name_for_class_exclusion(nm_host, class_exclusion_id, verbose):
"""
Get the excluded class name for a given ClassExclusion ID.
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param class_exclusion_id: numeric ID of the class exclusion
:type class_exclusion_id: int
:returns: string name of class, or False on faliure
:rtype: string or False
"""
r = {}
j = get_json("http://%s/enc/exclusions/classes/" % nm_host)
if j is None:
return False
for o in j:
if o['id'] == class_exclusion_id:
return o['exclusion']
return False
def add_node_class_exclusion(nm_host, node_id, classname, dry_run=False, verbose=False):
"""
add a class exclusion to a node in NodeMeister
:param nm_host: NodeMeister hostname or IP
:type nm_host: string
:param node_id: numeric ID of the node to add param to
:type node_id: int
:param classname: class name to exclude
:type classname: string
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: True on success or False on failure
:rtype: boolean
"""
payload = {'node': node_id, 'exclusion': classname}
url = "http://%s/enc/exclusions/classes/" % nm_host
logger.debug("adding class exclusion for '{cn}' to node {n}".format(n=node_id, cn=classname))
status_code = do_post(url, payload, dry_run=dry_run)
if status_code == 201:
return True
logger.error("ERROR: add_node_class_exclusion got status code %d" % status_code)
return False
def clean_value(v, debug=False):
"""
Strip bad characters off of values
"""
if debug:
print("clean_value '%s'" % v)
if type(v) == type("") or type(v) == type(u""):
v = v.strip('"\\')
return v
def do_post(url, payload, dry_run=False):
"""
Do a POST request with Requests, return the status code.
:param url: URL to POST to
:type nm_host: string
:param payload: the payload data, to be JSON encoded
:type name: dict
:param dry_run: if True, only print what would be done, do not make any changes
:type dry_run: boolean
:returns: HTTP status code from the request
:rtype: int
"""
headers = {'content-type': 'application/json'}
if dry_run:
logger.warning("DRY RUN: do_post to url %s - payload:\n\t%s\n" % (url, payload))
return 201
r = requests.post(url, data=anyjson.serialize(payload), headers=headers)
return r.status_code
def clone_nodemeister_node(nm_host, dst_name, src_name, munge_res, group_replace=None, noop=False, verbose=False):
"""
Clone a node in nodemeister, munging all parameters and class params through munge_re,
a list of lists, each having 2 elements, a regex and a string to replace matches with.
group_replace is a hash of old_group_id => new_group_id to replace when creating the new node
"""
nodes = get_node_names(nm_host)
dst_node_id = get_nm_node_id(nm_host, dst_name, nodenames=nodes)
if dst_node_id is not False:
logger.error("ERROR: node %s already exists in NodeMeister with id %d." % (dst_name, dst_node_id))
return False
src_node = get_nm_node(nm_host, hostname=src_name, nodenames=nodes)
if len(src_node) == 0:
logger.error("ERROR: could not find source node %s" % src_name)
return False
if verbose:
logger.debug("Got source node id: {n}\n{src}".format(n=src_node['id'], src=src_node))
classes = get_nm_node_classes(nm_host)
params = get_nm_node_params(nm_host)
# add to the right groups
groups = []
for g in src_node['groups']:
if group_replace is not None:
if g in group_replace:
if verbose:
logger.debug(" changing group %d to %d (group_replace)" % (g, group_replace[g]))
g = group_replace[g]
groups.append(g)
# TODO - these are going to be difficult because we need to resolve IDs from source to names,
# and then map to the correct IDs for our new node
# add excluded groups
# add excluded params
node_id = add_node(nm_host, dst_name, "imported by %s" % __file__, groups=groups, dry_run=noop)
if node_id is False:
logger.error("ERROR adding node in Nodemeister.")
return False
else:
logger.info("Node added to NodeMeister with id %d" % node_id)
ok = True
# add excluded classes
for c in src_node['excluded_classes']:
c_name = get_name_for_class_exclusion(nm_host, c, verbose=verbose)
if verbose:
logger.debug("excluded class %s (%d)" % (c_name, c))
res = add_node_class_exclusion(nm_host, node_id, c_name, dry_run=noop, verbose=verbose)
if not res:
logger.error("ERROR adding class exclusion of '%s' to node %d" % (c_name, node_id))
ok = False
if verbose:
logger.info("added class_exclusion of '%s' to group %d" % (c_name, node_id))
# add the params
for p in src_node['parameters']:
for (ptn, repl) in munge_re:
foo = re.sub(ptn, repl, src_node['parameters'][p])
if foo != src_node['parameters'][p] and verbose:
logger.debug("Munged value of '%s' from '%s' to '%s'" % (p, src_node['parameters'][p], foo))
src_node['parameters'][p] = foo
res = add_param_to_node(nm_host, node_id, p, src_node['parameters'][p], dry_run=noop)
if not res:
logger.error("ERROR adding param %s with value '%s' to node %d" % (p, src_node['parameters'][p], node_id))
ok = False
if verbose:
logger.info("\tadded param %s with value '%s' to group %d" % (p, src_node['parameters'][p], node_id))
if len(src_node['classes']) > 0:
logger.critical("ERROR: script does not yet migrate classes for nodes.")
ok = False
if ok is False:
return False
return node_id
def clone_nodemeister_group(nm_host, dst_gname, src_gname, munge_re=None, noop=False, verbose=False):
"""
Clone a group in nodemeister, munging all parameters and class params through munge_re,
a list of lists, each having 2 elements, a regex and a string to replace matches with.
"""
group_names = get_group_names(nm_host)
dst_gid = get_nm_group_id(nm_host, dst_gname, groups=group_names)
if dst_gid is not False:
logger.error("ERROR: group %s already exists in NodeMeister with id %d." % (dst_gname, dst_gid))
return False
src_group = get_nm_group(nm_host, gname=src_gname, groupnames=group_names)
if len(src_group) == 0:
logger.error("ERROR: could not find source group %s" % src_gname)
return False
if verbose:
logger.debug("Got source group id: {n}\n{src}".format(n=src_group['id'], src=src_group))
classes = get_nm_group_classes(nm_host)
params = get_nm_group_params(nm_host)
interp_src_group = interpolate_group(src_group, classes, params, group_names)
#if verbose:
# print("\tInterpolated: %s" % interp_src_group)
groups = []
for foo in src_group['groups']:
bar = get_nm_group_id(nm_host, foo, groups=group_names)
if bar:
groups.append(bar)
# ok, try adding the group
gid = add_group(nm_host, dst_gname, "imported by %s" % __file__, groups=groups, dry_run=noop)
if gid is False:
logger.error("ERROR adding group in Nodemeister.")
return False
else:
logger.info("Group added to NodeMeister with id %d" % gid)
ok = True
# add the params
for p in src_group['parameters']:
for (ptn, repl) in munge_re:
foo = re.sub(ptn, repl, src_group['parameters'][p])
if foo != src_group['parameters'][p] and verbose:
logger.debug("Munged value of '%s' from '%s' to '%s'" % (p, src_group['parameters'][p], foo))
src_group['parameters'][p] = foo
res = add_param_to_group(nm_host, gid, p, src_group['parameters'][p], dry_run=noop)
if not res:
logger.error("ERROR adding param %s with value '%s' to group %d" % (p, src_group['parameters'][p], gid))
ok = False
if verbose:
logger.info("added param %s with value '%s' to group %d" % (p, src_group['parameters'][p], gid))
for c in src_group['classes']:
for (ptn, repl) in munge_re:
foo = re.sub(ptn, repl, src_group['classes'][c])
if foo != src_group['classes'][c] and verbose:
logger.debug("Munged value of '%s' from '%s' to '%s'" % (c, src_group['classes'][c], foo))
src_group['classes'][c] = foo
res = add_class_to_group(nm_host, gid, c, src_group['classes'][c], dry_run=noop)
if not res:
logger.error("ERROR adding class %s with value '%s' to group %d" % (c, src_group['classes'][c], gid))
ok = False
if verbose:
logger.info("added class %s with value '%s' to group %d" % (c, src_group['classes'][c], gid))
if ok is False:
logger.critical("cloning group failed.")
return False
return gid
| 35.338411 | 168 | 0.634253 | [
"Apache-2.0"
] | coxmediagroup/nodemeister | contrib/cli_scripts/nodemeisterlib.py | 32,476 | Python |
from __future__ import absolute_import
from datetime import datetime
from django.utils import timezone
from sentry.api.serializers import Serializer, register
from sentry.models import Event, EventError
@register(Event)
class EventSerializer(Serializer):
_reserved_keys = frozenset(['sentry.interfaces.User', 'sdk', 'device'])
def _get_entries(self, event, user, is_public=False):
# XXX(dcramer): These are called entries for future-proofing
interface_list = []
for key, interface in event.interfaces.iteritems():
# we treat user as a special contextual item
if key in self._reserved_keys:
continue
data = interface.get_api_context(is_public=is_public)
# data might not be returned for e.g. a public HTTP repr
if not data:
continue
entry = {
'data': data,
'type': interface.get_alias(),
}
interface_list.append((interface, entry))
interface_list.sort(key=lambda x: x[0].get_display_score(), reverse=True)
return [i[1] for i in interface_list]
def get_attrs(self, item_list, user, is_public=False):
Event.objects.bind_nodes(item_list, 'data')
results = {}
for item in item_list:
user_interface = item.interfaces.get('sentry.interfaces.User')
if user_interface:
user_data = user_interface.to_json()
else:
user_data = None
device_interface = item.interfaces.get('device')
if device_interface:
device_data = device_interface.to_json()
else:
device_data = None
sdk_interface = item.interfaces.get('sdk')
if sdk_interface:
sdk_data = sdk_interface.to_json()
else:
sdk_data = None
results[item] = {
'entries': self._get_entries(item, user, is_public=is_public),
'user': user_data,
'sdk': sdk_data,
'device': device_data,
}
return results
def serialize(self, obj, attrs, user):
errors = []
error_set = set()
for error in obj.data.get('errors', []):
message = EventError.get_message(error)
if message in error_set:
continue
error_set.add(message)
error_result = {
'type': error['type'],
'message': message,
'data': {
k: v for k, v in error.iteritems()
if k != 'type'
},
}
errors.append(error_result)
tags = sorted([
{
'key': k.split('sentry:', 1)[-1],
'value': v
} for k, v in obj.get_tags()
], key=lambda x: x['key'])
received = obj.data.get('received')
if received:
# Sentry at one point attempted to record invalid types here.
# Remove after June 2 2016
try:
received = datetime.utcfromtimestamp(received).replace(
tzinfo=timezone.utc,
)
except TypeError:
received = None
event_type = obj.data.get('type', 'default')
metadata = obj.data.get('metadata') or {
'title': obj.message_short,
}
# TODO(dcramer): move release serialization here
d = {
'id': str(obj.id),
'groupID': obj.group.id,
'eventID': str(obj.event_id),
'size': obj.size,
'entries': attrs['entries'],
# See GH-3248
'message': obj.get_legacy_message(),
'user': attrs['user'],
'sdk': attrs['sdk'],
'device': attrs['device'],
'context': obj.data.get('extra', {}),
'packages': obj.data.get('modules', {}),
'type': event_type,
'metadata': metadata,
'tags': tags,
'platform': obj.platform,
'dateCreated': obj.datetime,
'dateReceived': received,
'errors': errors,
}
return d
class SharedEventSerializer(EventSerializer):
def get_attrs(self, item_list, user):
return super(SharedEventSerializer, self).get_attrs(
item_list, user, is_public=True
)
def serialize(self, obj, attrs, user):
result = super(SharedEventSerializer, self).serialize(obj, attrs, user)
del result['context']
del result['user']
del result['tags']
return result
| 32.868056 | 81 | 0.530319 | [
"BSD-3-Clause"
] | E-LLP/sentry | src/sentry/api/serializers/models/event.py | 4,733 | Python |
import os
import sys
from envyaml import EnvYAML
# Adding the repository root to the sys path so exports work properly
sys.path.append(os.path.abspath(os.path.join(__file__, "..", "..", "..")))
from examples.common.processes import GUIProcess, ImageCapture, YoloImageProcessor # noqa: E402
from rembrain_robot_framework import RobotDispatcher # noqa: E402
def run_dispatcher():
process_map = {
"gui": GUIProcess,
"image_capture": ImageCapture,
"processor": YoloImageProcessor,
}
config = EnvYAML(os.path.join(os.path.dirname(__file__), "config", "processes_config.yaml"))
processes = {p: {"process_class": process_map[p]} for p in config["processes"]}
robot_dispatcher = RobotDispatcher(config, processes, in_cluster=False)
robot_dispatcher.start_processes()
robot_dispatcher.run(robot_dispatcher.shared_objects["exit_flag"])
robot_dispatcher.stop_logging()
if __name__ == "__main__":
run_dispatcher()
| 31.387097 | 96 | 0.727646 | [
"MIT"
] | francisso/rembrain_robotframework | examples/local/main.py | 973 | Python |
# Purpose: API utilities
#
# Notes: API credentials must be enabled on Veracode account and placed in ~/.veracode/credentials like
#
# [default]
# veracode_api_key_id = <YOUR_API_KEY_ID>
# veracode_api_key_secret = <YOUR_API_KEY_SECRET>
#
# and file permission set appropriately (chmod 600)
import requests
import logging
from requests.adapters import HTTPAdapter
from typing import List
from veracode_api_signing.exceptions import VeracodeAPISigningException
from veracode_api_signing.plugin_requests import RequestsAuthPluginVeracodeHMAC
from .constants import Constants
from .exceptions import VeracodeAPIError
from .applications import Applications, Sandboxes, CustomFields
from .findings import Findings, SummaryReport
from .policy import Policies
from .sca import ComponentActivity, Workspaces
from .collections import Collections
from .identity import Users, Teams, BusinessUnits, APICredentials, Roles
from .healthcheck import Healthcheck
from .dynamic import Analyses, Scans, Occurrences, Configuration, CodeGroups, ScanCapacitySummary, ScanOccurrences, ScannerVariables, DynUtils
from .xmlapi import XMLAPI
class VeracodeAPI:
def __init__(self, proxies=None):
self.baseurl = 'https://analysiscenter.veracode.com/api'
requests.Session().mount(self.baseurl, HTTPAdapter(max_retries=3))
self.proxies = proxies
self.retry_seconds = 120
self.connect_error_msg = "Connection Error"
#xml apis
def get_app_list(self):
return XMLAPI().get_app_list()
def get_app_info(self, app_id):
return XMLAPI().get_app_info(app_id)
def get_sandbox_list(self, app_id):
return XMLAPI().get_sandbox_list(app_id)
def get_build_list(self, app_id, sandbox_id=None):
return XMLAPI().get_build_list(app_id, sandbox_id)
def get_build_info(self, app_id, build_id=None, sandbox_id=None):
return XMLAPI().get_build_info(app_id,build_id,sandbox_id)
def get_detailed_report(self, build_id):
return XMLAPI().get_detailed_report(build_id)
def set_mitigation_info(self,build_id,flaw_id_list,action,comment):
return XMLAPI().set_mitigation_info(build_id,flaw_id_list,action,comment)
def generate_archer(self,payload):
return XMLAPI().generate_archer(payload)
def download_archer(self, token=None):
return XMLAPI().download_archer(token)
# rest apis
## Healthcheck APIs
def healthcheck(self):
return Healthcheck().healthcheck()
def status(self):
return Healthcheck().status()
## Application and Sandbox APIs
def get_apps(self):
return Applications().get_all()
def get_app (self,guid=None,legacy_id=None):
return Applications().get(guid,legacy_id)
def get_app_by_name (self,appname):
return Applications().get_by_name(appname)
def create_app(self,app_name,business_criticality, business_unit=None, teams=[]):
return Applications().create(app_name,business_criticality,business_unit,teams)
def delete_app (self,guid):
return Applications().delete(guid)
def get_custom_fields (self):
return CustomFields().get_all()
def get_app_sandboxes (self,guid):
return Sandboxes().get_all(guid)
def create_sandbox (self, app, name, auto_recreate=False, custom_fields=[]):
return Sandboxes().create(app,name,auto_recreate,custom_fields)
def update_sandbox (self, app, sandbox, name, auto_recreate=False, custom_fields=[]):
return Sandboxes().update(app,sandbox,name,auto_recreate,custom_fields)
def delete_sandbox (self, app, sandbox):
return Sandboxes().delete(app,sandbox)
# Policy APIs
def get_policies (self):
return Policies().get_all()
def get_policy (self,guid):
return Policies().get(guid)
def create_policy(self, name, description, vendor_policy=False, finding_rules=[], scan_frequency_rules=[], grace_periods={}):
return Policies().create(name, description, vendor_policy, finding_rules, scan_frequency_rules, grace_periods)
def delete_policy (self,guid):
return Policies().delete(guid)
def update_policy(self, guid, name, description, vendor_policy=False, finding_rules=[], scan_frequency_rules=[], grace_periods={}):
return Policies().update(guid, name, description, vendor_policy, finding_rules, scan_frequency_rules, grace_periods)
# Findings and Reporting APIs
def get_findings(self,app,scantype='STATIC',annot='TRUE',request_params=None,sandbox=None):
return Findings().get_findings(app,scantype,annot,request_params,sandbox)
def get_static_flaw_info(self,app,issueid,sandbox=None):
return Findings().get_static_flaw_info(app,issueid,sandbox)
def get_dynamic_flaw_info(self,app,issueid):
return Findings().get_dynamic_flaw_info(app,issueid)
def get_summary_report(self,app,sandbox=None):
return SummaryReport().get_summary_report(app,sandbox)
def add_annotation(self,app,issue_list,comment,action,sandbox=None):
return Findings().add_annotation(app,issue_list,comment,action,sandbox)
def match_findings(self,origin_finding,potential_matches,approved_findings_only=True):
return Findings().match(origin_finding,potential_matches,approved_findings_only)
## Collections APIs
def get_collections(self):
return Collections().get_all()
def get_collections_by_name(self,collection_name):
return Collections().get_by_name(collection_name)
def get_collections_by_business_unit(self,business_unit_name):
return Collections().get_by_business_unit(business_unit_name)
def get_collections_statistics(self):
return Collections().get_statistics()
def get_collection(self,guid):
return Collections().get(guid)
def get_collection_assets(self,guid):
return Collections().get_assets(guid)
def create_collection(self,name,description="",tags='',business_unit_guid=None,custom_fields=[],assets=[]):
return Collections().create(name,description,tags,business_unit_guid,custom_fields,assets)
def update_collection(self,guid,name,description="",tags="",business_unit_guid=None,custom_fields=[],assets=[]):
return Collections().update(name,description,tags,business_unit_guid,custom_fields,assets)
def delete_collection(self,guid):
return Collections().delete(guid)
## Identity APIs
def get_users(self):
return Users().get_all()
def get_user_self (self):
return Users().get_self()
def get_user(self,user_guid):
return Users().get(user_guid)
def get_user_by_name(self,username):
return Users().get_by_name(username)
def get_user_by_search(self, search_term=None, api_id=None, role_id=None, login_status=None, saml_user=None, team_id=None, detailed=False, user_type=None, request_params=None):
return Users().get_user_search(search_term,api_id,role_id,login_status,saml_user,team_id,detailed,user_type,request_params)
def create_user (self,email,firstname,lastname,username=None,type="HUMAN",roles=[],teams=[],mfa=False):
return Users().create(email,firstname,lastname,username,type,roles,teams,mfa=mfa)
def update_user_roles (self,user_guid,roles):
return Users().update_roles(user_guid,roles)
def update_user (self,user_guid,changes):
return Users().update(user_guid,changes)
def update_user_email_address (self,user_guid,email_address,ignore_verification=False):
return Users().update_email_address(user_guid,email_address,ignore_verification)
def send_password_reset (self,user_legacy_id):
return Users().reset_password(user_legacy_id)
def disable_user (self,user_guid):
return Users().disable(user_guid)
def delete_user (self,user_guid):
return Users().delete(user_guid)
def get_teams (self, all_for_org=False):
return Teams().get_all()
def create_team (self, team_name, business_unit=None, members=[]):
return Teams().create(team_name,business_unit,members)
def update_team (self, team_guid, team_name="", business_unit=None, members=[]):
return Teams().update(team_guid,team_name,business_unit,members)
def delete_team (self, team_guid):
return Teams().delete(team_guid)
def get_business_units (self):
return BusinessUnits().get_all()
def get_business_unit (self, guid):
return BusinessUnits().get(guid)
def create_business_unit (self, name, teams=[]):
return BusinessUnits().create(name,teams)
def update_business_unit (self, guid, name='', teams=[]):
return BusinessUnits().update(guid,name,teams)
def delete_business_unit (self, guid):
return BusinessUnits().delete(guid)
def get_creds (self,api_id=None):
if api_id != None:
return APICredentials().get(api_id)
else:
return APICredentials().get_self()
def renew_creds (self):
return APICredentials().renew()
def revoke_creds (self, api_id):
return APICredentials().revoke(api_id)
def get_roles (self):
return Roles().get_all()
## SCA APIs - note must be human user to use these, not API user
def get_workspaces(self):
return Workspaces().get_all()
def get_workspace_by_name(self,name):
return Workspaces().get_by_name(name)
def create_workspace(self,name):
return Workspaces().create(name)
def add_workspace_team(self,workspace_guid,team_id):
return Workspaces().add_team(workspace_guid,team_id)
def delete_workspace(self,workspace_guid):
return Workspaces().delete(workspace_guid)
def get_projects(self,workspace_guid):
return Workspaces().get_projects(workspace_guid)
def get_project(self,workspace_guid,project_guid):
return Workspaces().get_project(workspace_guid,project_guid)
def get_project_issues(self,workspace_guid,project_guid):
return Workspaces().get_project_issues(workspace_guid,project_guid)
def get_project_libraries(self,workspace_guid,project_guid):
return Workspaces().get_project_libraries(workspace_guid,project_guid)
def get_agents(self,workspace_guid):
return Workspaces().get_agents(workspace_guid)
def get_agent(self,workspace_guid,agent_guid):
return Workspaces().get_agent(workspace_guid,agent_guid)
def create_agent(self,workspace_guid,name,agent_type='CLI'):
return Workspaces().create_agent(workspace_guid,name,agent_type)
def get_agent_tokens(self,workspace_guid,agent_guid):
return Workspaces().get_agent_tokens(workspace_guid,agent_guid)
def get_agent_token(self,workspace_guid,agent_guid,token_id):
return Workspaces().get_agent_token(workspace_guid,agent_guid,token_id)
def regenerate_agent_token(self,workspace_guid,agent_guid):
return Workspaces().regenerate_agent_token(workspace_guid,agent_guid)
def revoke_agent_token(self,workspace_guid,agent_guid,token_id):
return Workspaces().revoke_agent_token(workspace_guid,agent_guid,token_id)
def get_issues(self,workspace_guid):
return Workspaces().get_issues(workspace_guid)
def get_issue(self,issue_id):
return Workspaces().get_issues(issue_id)
def get_libraries(self,workspace_guid,unmatched=False):
return Workspaces().get_libraries(workspace_guid, unmatched)
def get_library(self,library_id):
return Workspaces().get_library(library_id)
def get_vulnerability(self,vulnerability_id):
return Workspaces().get_vulnerability(vulnerability_id)
def get_license(self,license_id):
return Workspaces().get_license(license_id)
def get_sca_events(self,date_gte=None,event_group=None,event_type=None):
return Workspaces().get_events(date_gte,event_group,event_type)
def get_sca_scan(self,scan_id):
return Workspaces().get_scan(scan_id)
def get_component_activity(self,component_id):
return ComponentActivity().get(component_id)
#dynamic APIs
def get_analyses(self):
return Analyses().get_all()
def get_analyses_by_name(self,name):
return Analyses().get_by_name(analysis_name=name)
def get_analyses_by_target_url(self,url):
return Analyses().get_by_target_url(target_url=url)
def get_analyses_by_search_term(self,search_term):
return Analyses().get_by_search_term(search_term=search_term)
def get_analysis(self,analysis_id):
return Analyses().get(guid=analysis_id)
def get_analysis_audits(self,analysis_id):
return Analyses().get_audits(guid=analysis_id)
def get_analysis_scans(self,analysis_id):
return Analyses().get_scans(guid=analysis_id)
def get_analysis_scanner_variables(self,analysis_id):
return Analyses().get_scanner_variables(guid=analysis_id)
def create_analysis(self,name,scans,schedule_frequency='ONCE',business_unit_guid=None,email=None,owner=None):
return Analyses().create(name,scans,schedule_frequency,business_unit_guid,email,owner)
def update_analysis(self,guid,name,scans,schedule_frequency='ONCE',business_unit_guid=None,email=None,owner=None):
return Analyses().update(guid,name,scans,schedule_frequency,business_unit_guid,email,owner)
def update_analysis_scanner_variable(self,analysis_guid,scanner_variable_guid,reference_key,value,description):
return Analyses().update_scanner_variable(analysis_guid,scanner_variable_guid,reference_key,value,description)
def delete_analysis_scanner_variable(self,analysis_guid,scanner_variable_guid):
return Analyses().delete_scanner_variable(analysis_guid,scanner_variable_guid)
def delete_analysis(self,analysis_guid):
return Analyses().delete(guid=analysis_guid)
def get_dyn_scan(self,scan_guid):
return Scans().get(guid=scan_guid)
def get_dyn_scan_audits(self,scan_guid):
return Scans().get_audits(guid=scan_guid)
def get_dyn_scan_config(self,scan_guid):
return Scans().get_configuration(guid=scan_guid)
def update_dyn_scan(self,scan_guid,scan):
return Scans().update(guid=scan_guid,scan=scan)
def delete_dyn_scan(self,scan_guid):
return Scans().delete(guid=scan_guid)
def get_scan_scanner_variables(self,scan_id):
return Scans().get_scanner_variables(guid=scan_id)
def update_scan_scanner_variable(self,scan_guid,scanner_variable_guid,reference_key,value,description):
return Scans().update_scanner_variable(scan_guid,scanner_variable_guid,reference_key,value,description)
def delete_scan_scanner_variable(self,scan_guid,scanner_variable_guid):
return Scans().delete_scanner_variable(scan_guid,scanner_variable_guid)
def get_analysis_occurrences(self):
return Occurrences().get_all()
def get_analysis_occurrence(self,occurrence_guid):
return Occurrences().get(guid=occurrence_guid)
def stop_analysis_occurrence(self,occurrence_guid,save_or_delete):
return Occurrences().stop(guid=occurrence_guid,save_or_delete=save_or_delete)
def get_scan_occurrences(self,occurrence_guid):
return Occurrences().get_scan_occurrences(guid=occurrence_guid)
def get_scan_occurrence(self,scan_occ_guid):
return ScanOccurrences().get(guid=scan_occ_guid)
def stop_scan_occurrence(self,scan_occ_guid,save_or_delete):
return ScanOccurrences().stop(guid=scan_occ_guid, save_or_delete=save_or_delete)
def get_scan_occurrence_configuration(self,scan_occ_guid):
return ScanOccurrences().get_configuration(guid=scan_occ_guid)
def get_scan_occurrence_verification_report(self,scan_occ_guid):
return ScanOccurrences().get_verification_report(guid=scan_occ_guid)
def get_scan_occurrence_notes_report(self,scan_occ_guid):
return ScanOccurrences().get_scan_notes_report(guid=scan_occ_guid)
def get_scan_occurrence_screenshots(self,scan_occ_guid):
return ScanOccurrences().get_screenshots(guid=scan_occ_guid)
def get_codegroups(self):
return CodeGroups().get_all()
def get_codegroup(self,name):
return CodeGroups().get(name=name)
def get_dynamic_configuration(self):
return Configuration().get()
def get_dynamic_scan_capacity_summary(self):
return ScanCapacitySummary().get()
def get_global_scanner_variables(self):
return ScannerVariables().get_all()
def get_global_scanner_variable(self,guid):
return ScannerVariables().get(guid)
def create_global_scanner_variable(self,reference_key,value,description):
return ScannerVariables().create(reference_key,value,description)
def update_global_scanner_variable(self,guid,reference_key,value,description):
return ScannerVariables().update(guid,reference_key,value,description)
def delete_global_scanner_variable(self,guid):
return ScannerVariables().delete(guid)
def dyn_setup_user_agent(self,custom_header,type):
return DynUtils().setup_user_agent(custom_header,type)
def dyn_setup_custom_host(self,host_name,ip_address):
return DynUtils().setup_custom_host(host_name,ip_address)
def dyn_setup_blocklist(self, urls:List):
return DynUtils().setup_blocklist(urls)
def dyn_setup_url(self,url,directory_restriction_type='DIRECTORY_AND_SUBDIRECTORY',http_and_https=True):
return DynUtils().setup_url(url,directory_restriction_type,http_and_https)
def dyn_setup_scan_setting(self,blocklist_configs:list,custom_hosts:List, user_agent:None):
return DynUtils().setup_scan_setting(blocklist_configs,custom_hosts,user_agent)
def dyn_setup_scan_contact_info(self,email,first_and_last_name,telephone):
return DynUtils().setup_scan_contact_info(email,first_and_last_name,telephone)
def dyn_setup_crawl_script(self,script_body,script_type='SELENIUM'):
return DynUtils().setup_crawl_script(script_body,script_type)
def dyn_setup_crawl_configuration(self,scripts:List,disabled=False):
return DynUtils().setup_crawl_configuration(scripts,disabled)
def dyn_setup_login_logout_script(self,script_body,script_type='SELENIUM'):
return DynUtils().setup_login_logout_script(script_body,script_type)
def dyn_setup_auth(self,authtype,username,password,domain=None,base64_pkcs12=None,cert_name=None, login_script_data=None, logout_script_data=None):
return DynUtils().setup_auth(authtype,username,password,domain,base64_pkcs12,cert_name,login_script_data,logout_script_data)
def dyn_setup_auth_config(self,authentication_node:dict):
return DynUtils().setup_auth_config(authentication_node)
def dyn_setup_scan_config_request(self, url, allowed_hosts:List, auth_config=None, crawl_config=None, scan_setting=None):
return DynUtils().setup_scan_config_request(url,allowed_hosts,auth_config,crawl_config,scan_setting)
def dyn_setup_scan(self, scan_config_request, scan_contact_info=None, linked_app_guid=None):
return DynUtils().setup_scan(scan_config_request,scan_contact_info, linked_app_guid) | 39.184584 | 180 | 0.747904 | [
"MIT"
] | DaYuM/veracode-api-py | veracode_api_py/api.py | 19,318 | Python |
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2021 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version="1.0" encoding="UTF-8"?><xml>
<id/>
<version>1</version>
<name>TS_WANMANAGER_DSLWANoE_FixedMode_ActLink_P_1-1_WAN_Pri-Sec</name>
<primitive_test_id/>
<primitive_test_name>wanmanager_DoNothing</primitive_test_name>
<primitive_test_version>1</primitive_test_version>
<status>FREE</status>
<synopsis>To check if DSL line is active with FIXED_MODE policy ,WAN Type and priorities being (1,1) (Primary,Secondary) for DSL and WANOE respectively</synopsis>
<groups_id/>
<execution_time>40</execution_time>
<long_duration>false</long_duration>
<advanced_script>false</advanced_script>
<remarks/>
<skip>false</skip>
<box_types>
<box_type>Broadband</box_type>
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
</rdk_versions>
<test_cases>
<test_case_id>TC_WANMANAGER_55</test_case_id>
<test_objective>This test case is to check if DSL line is active with FIXED_MODE policy ,WAN Type and priorities being (1,1) (Primary,Secondary) for DSL and WANOE respectively </test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components should be in a running state else invoke cosa_start.sh manually that includes all the ccsp components and TDK Component
2.TDK Agent should be in running state or invoke it through StartTdk.sh script
3.WAN Manager should be enabled
4.Both DSL WAN and WANOE WAN connections should be available</pre_requisite>
<api_or_interface_used>None</api_or_interface_used>
<input_parameters>Device.X_RDK_WanManager.Policy
Device.X_RDK_WanManager.CPEInterface.1.Wan.Type
Device.X_RDK_WanManager.CPEInterface.2.Wan.Type
Device.X_RDK_WanManager.CPEInterface.1.Wan.Priority
Device.X_RDK_WanManager.CPEInterface.2.Wan.Priority
Device.X_RDK_WanManager.CPEInterface.1.Wan.ActiveLink
Device.X_RDK_WanManager.CPEInterface.2.Wan.ActiveLink </input_parameters>
<automation_approch>1.Load the Module
2.Get the current WAN Priority and WAN Types for DSL and WANOE interfaces
3.Make the priority and WAN Type unequal for further set operations to be success
4.Get the current WAN policy , set the policy to FIXED_MODE if not in the same policy
5.Set the Wan Type and priorities as(1,1) (Primary, Secondary) for DSL and WANOE respectively
6.Get the active link status for DSL and WANOE
7.With the current configurations DSL Line is expected to be active
8.Revert the set values
9.Unload the module</automation_approch>
<expected_output>With Fixed Mode policy Wan Type and priorities being (1,1) (Primary, Secondary) for DSL and WANOE respectively - DSL Line is expected to be active </expected_output>
<priority>High</priority>
<test_stub_interface>WAN_MANAGER</test_stub_interface>
<test_script>TS_WANMANAGER_DSLWANoE_FixedMode_ActLink_P_1-1_WAN_Pri-Sec</test_script>
<skipped>No</skipped>
<release_version>M90</release_version>
<remarks>None</remarks>
</test_cases>
</xml>
'''
# tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from tdkbVariables import *;
from time import sleep;
from WanManager_Utility import *;
obj = tdklib.TDKScriptingLibrary("tdkbtr181","RDKB");
obj1 = tdklib.TDKScriptingLibrary("sysutil","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_WANMANAGER_DSLWANoE_FixedMode_ActLink_P_1-1_WAN_Pri-Sec');
obj1.configureTestCase(ip,port,'TS_WANMANAGER_DSLWANoE_FixedMode_ActLink_P_1-1_WAN_Pri-Sec');
#Get the result of connection with test component and DUT
loadmodulestatus =obj.getLoadModuleResult();
loadmodulestatus1 =obj1.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus;
print "[LIB LOAD STATUS] : %s" %loadmodulestatus1;
if "SUCCESS" in (loadmodulestatus.upper() and loadmodulestatus1.upper()):
#Set the result status of execution
obj.setLoadModuleStatus("SUCCESS");
obj1.setLoadModuleStatus("SUCCESS");
revertwantype =0;
revertpriority =0;
expectedresult="SUCCESS";
tdkTestObj = obj.createTestStep('TDKB_TR181Stub_Get');
defaultTypePriority,actualresult = GetCurrentWanTypeAndPriority(tdkTestObj);
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Get the current WAN Type,Priority values for DSL and WANOE";
print "EXPECTED RESULT 1: Should get the current WAN Type,Priority values for DSL and WANOE"
print "ACTUAL RESULT 1 :The current WAN Type,Priority for DSL and WANOE are %s:"%defaultTypePriority;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
step = 2;
status, policy_initial = get_policy(tdkTestObj, step);
if status == 0:
tdkTestObj_Get = obj.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj_Set = obj.createTestStep('TDKB_TR181Stub_Set');
print "***Checking if WAN types are equal and making them Unequal***";
revertwantype,default,actualresult = MakeWANTypeUnEqual(tdkTestObj_Get,tdkTestObj_Set);
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "***Checking if WAN priorities are equal and making them Unequal***";
revertpriority,default,actualresult = MakePriorityUnEqual(tdkTestObj_Get,tdkTestObj_Set);
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
#Set the Wan Manager Policy to FIXED_MODE
new_policy = "FIXED_MODE"
expectedresult="SUCCESS";
policyStatus =1;
revert = 0
if new_policy != policy_initial:
print "Setting the wanmanager policy to :%s"%new_policy
set_policy(new_policy, policy_initial, obj1, revert);
#Get the WANMANAGER POLICY and cross check with the Set value
step = step + 1;
status, policy = get_policy(tdkTestObj, step);
if status == 0:
revert = 1;
if policy == new_policy:
tdkTestObj.setResultStatus("SUCCESS");
print "The wanmanager policy is set successfully";
tdkTestObj = obj1.createTestStep('ExecuteCmd');
obj1.initiateReboot();
sleep(300);
else:
policyStatus =0;
tdkTestObj.setResultStatus("FAILURE");
print "The wanmanager policy is not set successfully";
else:
policyStatus =0;
tdkTestObj.setResultStatus("FAILURE");
print "Failed to get wanmanager policy after set ";
if policyStatus == 1:
print "The current WAN Manager Policy is %s" %new_policy;
wanDSL = "Primary";
wanWANOE = "Secondary";
priDSL = "1";
priWANOE ="1";
actualresult = SetWANTypethenPriority(tdkTestObj_Set,wanDSL,wanWANOE,priDSL,priWANOE);
revertwantype =1;
revertpriority =1;
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 3: Set the (WANtype,Priority)for DSL(%s,%s) and WANOE(%s,%s)" %(wanDSL,priDSL,wanWANOE,priWANOE);
print "EXPECTED RESULT 3:Set operation is expected to be successful";
print "ACTUAL RESULT 3:set operations are successful";
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.X_RDK_WanManager.CPEInterface.1.Wan.ActiveLink");
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult1 = tdkTestObj.getResult();
activeDSL = tdkTestObj.getResultDetails().strip().replace("\\n", "");
tdkTestObj = obj.createTestStep('TDKB_TR181Stub_Get');
tdkTestObj.addParameter("ParamName","Device.X_RDK_WanManager.CPEInterface.2.Wan.ActiveLink");
#Execute the test case in DUT
tdkTestObj.executeTestCase(expectedresult);
actualresult2 = tdkTestObj.getResult();
activeWANOE = tdkTestObj.getResultDetails().strip().replace("\\n", "");
if expectedresult in (actualresult1 and actualresult2):
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 4: Get the Active link status of DSL and WANOE";
print "EXPECTED RESULT 4: Active link status of DSL and WANOE should be fetched successfully";
print "ACTUAL RESULT 4: Get operation succeeded";
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
if activeDSL == "true" and activeWANOE == "false":
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 5: Get the Active link status of DSL and WANOE";
print "EXPECTED RESULT 5: Active link status of DSL is expected to be true and WANOE as false";
print "ACTUAL RESULT 5: DSL status :%s, WANOE status : %s" %(activeDSL,activeWANOE);
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 5: Get the Active link status of DSL and WANOE";
print "EXPECTED RESULT 5:Active link status of DSL is expected to be true and WANOE as false";
print "ACTUAL RESULT 5: DSL status :%s, WANOE status : %s" %(activeDSL,activeWANOE);
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 4: Get the Active link status of DSL and WANOE";
print "EXPECTED RESULT 4: Active link status of DSL is expected to be true and WANOE as false";
print "ACTUAL RESULT 4: Get operation failed ";
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
if revert == 1:
set_policy(new_policy, policy_initial, obj1, revert);
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 3: Set the (WANtype,Priority)for DSL(%s,%s) and WANOE(%s,%s)"%(wanDSL,priDSL,wanWANOE,priWANOE);
print "EXPECTED RESULT 3:Set operation is expected to be successful";
print "ACTUAL RESULT 3 :set operations failed";
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
print "set operation of WAN Policy failed";
else:
tdkTestObj.setResultStatus("FAILURE");
print "Unable to make WAN priorities Un-equal"
else:
tdkTestObj.setResultStatus("FAILURE");
print "Unable to make WAN Types Un-equal"
else:
tdkTestObj.setResultStatus("FAILURE");
print "The current policy is not the expected policy";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Get the default WAN Type,Priority values for DSL and WANOE";
print "EXPECTED RESULT 1: Should get the default WAN Type,Priority values for DSL and WANOE"
print "ACTUAL RESULT 1 :The default WAN Type,Priority for DSL and WANOE are %s:"%defaultTypePriority;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
#Revert operations
revertflag =1;
if revertpriority ==1:
print "Reverting priority to defaults";
paramList = ["Device.X_RDK_WanManager.CPEInterface.1.Wan.Priority","Device.X_RDK_WanManager.CPEInterface.2.Wan.Priority"];
tdkTestObj = obj.createTestStep('TDKB_TR181Stub_Set');
index = 2;
for item in paramList:
tdkTestObj.addParameter("ParamName",item);
tdkTestObj.addParameter("ParamValue",defaultTypePriority[index]);
tdkTestObj.addParameter("Type","int");
expectedresult= "SUCCESS";
#Execute testcase on DUT
tdkTestObj.executeTestCase(expectedresult);
result = tdkTestObj.getResult();
Setresult = tdkTestObj.getResultDetails();
index =index +1;
if expectedresult in result:
tdkTestObj.setResultStatus("SUCCESS");
else:
revertflag =0;
print "Revert operation failed for WAN priority";
tdkTestObj.setResultStatus("FAILURE");
break;
if revertwantype == 1:
print "Reverting WAN Type to defaults";
paramList = ["Device.X_RDK_WanManager.CPEInterface.1.Wan.Type","Device.X_RDK_WanManager.CPEInterface.2.Wan.Type"];
tdkTestObj = obj.createTestStep('TDKB_TR181Stub_Set');
index = 0;
for item in paramList:
tdkTestObj.addParameter("ParamName",item);
tdkTestObj.addParameter("ParamValue",defaultTypePriority[index]);
tdkTestObj.addParameter("Type","string");
expectedresult= "SUCCESS";
#Execute testcase on DUT
tdkTestObj.executeTestCase(expectedresult);
result = tdkTestObj.getResult();
Setresult = tdkTestObj.getResultDetails();
index =index +1;
if expectedresult in result:
tdkTestObj.setResultStatus("SUCCESS");
else:
revertflag =0;
print "Revert operation failed for WAN Type";
tdkTestObj.setResultStatus("FAILURE");
break;
#printing the final revert status
if revertflag == 1:
print "Revert operation successful for WAN Type and WAN priority";
else:
print "Revert operation failed for either WAN Type or WAN priority";
obj.unloadModule("tdkbtr181");
obj1.unloadModule("sysutil");
else:
print "Failed to load module";
obj.setLoadModuleStatus("FAILURE");
obj1.setLoadModuleStatus("FAILURE");
| 53.936102 | 199 | 0.603838 | [
"Apache-2.0"
] | rdkcmf/rdkb-tools-tdkb | testscripts/RDKB/component/WAN_MANAGER/TS_WANMANAGER_DSLWANoE_FixedMode_ActLink_P_1-1_WAN_Pri-Sec.py | 16,882 | Python |
from django.shortcuts import render,redirect
from .models import Image,Location,Category
# Create your views here.
def intro(request):
images = Image.objects.all()
return render(request, 'intro.html',{'images':images})
def search_results(request):
if 'image' in request.GET and request.GET["image"]:
search_term = request.GET.get("image")
searched_images = Image.search_by_cate(search_term)
message = f"{search_term}"
return render(request,'search.html',{"message":message,"images":searched_images})
| 38.928571 | 89 | 0.713761 | [
"MIT"
] | IsaacMurage-dev/Art-Gallery | snaps/views.py | 545 | Python |
class Solution:
def removeDuplicates(self, S: str) -> str:
i = 1
while i < len(S):
if i <= 0:
i += 1
continue
if S[i] == S[i - 1]:
S = S[:i - 1] + S[i + 1:]
i = i - 1
else:
i = i + 1
return S
slu = Solution()
print(slu.removeDuplicates("abbaca"))
| 21.833333 | 46 | 0.358779 | [
"Apache-2.0"
] | kefirzhang/algorithms | leetcode/python/easy/p1047_removeDuplicates.py | 393 | Python |
"""Assorted utilities shared between parts of apitools."""
import collections
import httplib
import os
import types
import urllib2
from apitools.base.py import exceptions
__all__ = [
'DetectGae',
'DetectGce',
]
def DetectGae():
"""Determine whether or not we're running on GAE.
This is based on:
https://developers.google.com/appengine/docs/python/#The_Environment
Returns:
True iff we're running on GAE.
"""
server_software = os.environ.get('SERVER_SOFTWARE', '')
return (server_software.startswith('Development/') or
server_software.startswith('Google App Engine/'))
def DetectGce():
"""Determine whether or not we're running on GCE.
This is based on:
https://developers.google.com/compute/docs/instances#dmi
Returns:
True iff we're running on a GCE instance.
"""
try:
o = urllib2.urlopen('http://metadata.google.internal')
except urllib2.URLError:
return False
return o.getcode() == httplib.OK
def NormalizeScopes(scope_spec):
"""Normalize scope_spec to a set of strings."""
if isinstance(scope_spec, types.StringTypes):
return set(scope_spec.split(' '))
elif isinstance(scope_spec, collections.Iterable):
return set(scope_spec)
raise exceptions.TypecheckError(
'NormalizeScopes expected string or iterable, found %s' % (
type(scope_spec),))
def Typecheck(arg, arg_type, msg=None):
if not isinstance(arg, arg_type):
if msg is None:
if isinstance(arg_type, tuple):
msg = 'Type of arg is "%s", not one of %r' % (type(arg), arg_type)
else:
msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type)
raise exceptions.TypecheckError(msg)
return arg
| 25.477612 | 74 | 0.68717 | [
"Apache-2.0"
] | Technology-Hatchery/google-cloud-sdk | .install/.backup/lib/apitools/base/py/util.py | 1,707 | Python |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
r"""
Batch acquisition functions using the reparameterization trick in combination
with (quasi) Monte-Carlo sampling. See [Rezende2014reparam]_ and
[Wilson2017reparam]_
.. [Rezende2014reparam]
D. J. Rezende, S. Mohamed, and D. Wierstra. Stochastic backpropagation and
approximate inference in deep generative models. ICML 2014.
.. [Wilson2017reparam]
J. T. Wilson, R. Moriconi, F. Hutter, and M. P. Deisenroth.
The reparameterization trick for acquisition functions. ArXiv 2017.
"""
import math
from abc import ABC, abstractmethod
from typing import Optional, Union
import torch
from torch import Tensor
from ..exceptions.errors import UnsupportedError
from ..models.model import Model
from ..sampling.samplers import MCSampler, SobolQMCNormalSampler
from ..utils.transforms import (
concatenate_pending_points,
match_batch_shape,
t_batch_mode_transform,
)
from .acquisition import AcquisitionFunction
from .objective import IdentityMCObjective, MCAcquisitionObjective
from .utils import prune_inferior_points
class MCAcquisitionFunction(AcquisitionFunction, ABC):
r"""Abstract base class for Monte-Carlo based batch acquisition functions."""
def __init__(
self,
model: Model,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""Constructor for the MCAcquisitionFunction base class.
Args:
model: A fitted model.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)`.
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated.
"""
super().__init__(model=model)
if sampler is None:
sampler = SobolQMCNormalSampler(num_samples=512, collapse_batch_dims=True)
self.add_module("sampler", sampler)
if objective is None:
objective = IdentityMCObjective()
elif not isinstance(objective, MCAcquisitionObjective):
raise UnsupportedError(
"Only objectives of type MCAcquisitionObjective are supported for "
"MC acquisition functions."
)
self.add_module("objective", objective)
self.set_X_pending(X_pending)
@abstractmethod
def forward(self, X: Tensor) -> Tensor:
r"""Takes in a `(b) x q x d` X Tensor of `(b)` t-batches with `q` `d`-dim
design points each, and returns a one-dimensional Tensor with
`(b)` elements. Should utilize the result of set_X_pending as needed
to account for pending function evaluations.
"""
pass # pragma: no cover
class qExpectedImprovement(MCAcquisitionFunction):
r"""MC-based batch Expected Improvement.
This computes qEI by
(1) sampling the joint posterior over q points
(2) evaluating the improvement over the current best for each sample
(3) maximizing over q
(4) averaging over the samples
`qEI(X) = E(max(max Y - best_f, 0)), Y ~ f(X), where X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> best_f = train_Y.max()[0]
>>> sampler = SobolQMCNormalSampler(1000)
>>> qEI = qExpectedImprovement(model, best_f, sampler)
>>> qei = qEI(test_X)
"""
def __init__(
self,
model: Model,
best_f: Union[float, Tensor],
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""q-Expected Improvement.
Args:
model: A fitted model.
best_f: The best objective value observed so far (assumed noiseless).
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if not torch.is_tensor(best_f):
best_f = torch.tensor(float(best_f))
self.register_buffer("best_f", best_f)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qExpectedImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Expected Improvement values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
obj = (obj - self.best_f).clamp_min(0)
q_ei = obj.max(dim=-1)[0].mean(dim=0)
return q_ei
class qNoisyExpectedImprovement(MCAcquisitionFunction):
r"""MC-based batch Noisy Expected Improvement.
This function does not assume a `best_f` is known (which would require
noiseless observations). Instead, it uses samples from the joint posterior
over the `q` test points and previously observed points. The improvement
over previously observed points is computed for each sample and averaged.
`qNEI(X) = E(max(max Y - max Y_baseline, 0))`, where
`(Y, Y_baseline) ~ f((X, X_baseline)), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qNEI = qNoisyExpectedImprovement(model, train_X, sampler)
>>> qnei = qNEI(test_X)
"""
def __init__(
self,
model: Model,
X_baseline: Tensor,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
prune_baseline: bool = False,
) -> None:
r"""q-Noisy Expected Improvement.
Args:
model: A fitted model.
X_baseline: A `r x d`-dim Tensor of `r` design points that have
already been observed. These points are considered as the
potential best design point.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`.
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
prune_baseline: If True, remove points in `X_baseline` that are
highly unlikely to be the best point. This can significantly
improve performance and is generally recommended. In order to
customize pruning parameters, instead manually call
`botorch.acquisition.utils.prune_inferior_points` on `X_baseline`
before instantiating the acquisition function.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if prune_baseline:
X_baseline = prune_inferior_points(
model=model, X=X_baseline, objective=objective
)
self.register_buffer("X_baseline", X_baseline)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qNoisyExpectedImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Noisy Expected Improvement values at the given
design points `X`.
"""
q = X.shape[-2]
X_full = torch.cat([X, match_batch_shape(self.X_baseline, X)], dim=-2)
# TODO (T41248036): Implement more efficient way to compute posterior
# over both training and test points in GPyTorch
posterior = self.model.posterior(X_full)
samples = self.sampler(posterior)
obj = self.objective(samples)
diffs = obj[:, :, :q].max(dim=-1)[0] - obj[:, :, q:].max(dim=-1)[0]
return diffs.clamp_min(0).mean(dim=0)
class qProbabilityOfImprovement(MCAcquisitionFunction):
r"""MC-based batch Probability of Improvement.
Estimates the probability of improvement over the current best observed
value by sampling from the joint posterior distribution of the q-batch.
MC-based estimates of a probability involves taking expectation of an
indicator function; to support auto-differntiation, the indicator is
replaced with a sigmoid function with temperature parameter `tau`.
`qPI(X) = P(max Y >= best_f), Y ~ f(X), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> best_f = train_Y.max()[0]
>>> sampler = SobolQMCNormalSampler(1000)
>>> qPI = qProbabilityOfImprovement(model, best_f, sampler)
>>> qpi = qPI(test_X)
"""
def __init__(
self,
model: Model,
best_f: Union[float, Tensor],
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
tau: float = 1e-3,
) -> None:
r"""q-Probability of Improvement.
Args:
model: A fitted model.
best_f: The best objective value observed so far (assumed noiseless).
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
tau: The temperature parameter used in the sigmoid approximation
of the step function. Smaller values yield more accurate
approximations of the function, but result in gradients
estimates with higher variance.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
if not torch.is_tensor(best_f):
best_f = torch.tensor(float(best_f))
self.register_buffer("best_f", best_f)
if not torch.is_tensor(tau):
tau = torch.tensor(float(tau))
self.register_buffer("tau", tau)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qProbabilityOfImprovement on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Probability of Improvement values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
max_obj = obj.max(dim=-1)[0]
val = torch.sigmoid((max_obj - self.best_f) / self.tau).mean(dim=0)
return val
class qSimpleRegret(MCAcquisitionFunction):
r"""MC-based batch Simple Regret.
Samples from the joint posterior over the q-batch and computes the simple
regret.
`qSR(X) = E(max Y), Y ~ f(X), X = (x_1,...,x_q)`
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qSR = qSimpleRegret(model, sampler)
>>> qsr = qSR(test_X)
"""
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qSimpleRegret on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Simple Regret values at the given design
points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
val = obj.max(dim=-1)[0].mean(dim=0)
return val
class qUpperConfidenceBound(MCAcquisitionFunction):
r"""MC-based batch Upper Confidence Bound.
Uses a reparameterization to extend UCB to qUCB for q > 1 (See Appendix A
of [Wilson2017reparam].)
`qUCB = E(max(mu + |Y_tilde - mu|))`, where `Y_tilde ~ N(mu, beta pi/2 Sigma)`
and `f(X)` has distribution `N(mu, Sigma)`.
Example:
>>> model = SingleTaskGP(train_X, train_Y)
>>> sampler = SobolQMCNormalSampler(1000)
>>> qUCB = qUpperConfidenceBound(model, 0.1, sampler)
>>> qucb = qUCB(test_X)
"""
def __init__(
self,
model: Model,
beta: float,
sampler: Optional[MCSampler] = None,
objective: Optional[MCAcquisitionObjective] = None,
X_pending: Optional[Tensor] = None,
) -> None:
r"""q-Upper Confidence Bound.
Args:
model: A fitted model.
beta: Controls tradeoff between mean and standard deviation in UCB.
sampler: The sampler used to draw base samples. Defaults to
`SobolQMCNormalSampler(num_samples=500, collapse_batch_dims=True)`
objective: The MCAcquisitionObjective under which the samples are
evaluated. Defaults to `IdentityMCObjective()`.
X_pending: A `m x d`-dim Tensor of `m` design points that have
points that have been submitted for function evaluation
but have not yet been evaluated. Concatenated into X upon
forward call. Copied and set to have no gradient.
"""
super().__init__(
model=model, sampler=sampler, objective=objective, X_pending=X_pending
)
self.beta_prime = math.sqrt(beta * math.pi / 2)
@concatenate_pending_points
@t_batch_mode_transform()
def forward(self, X: Tensor) -> Tensor:
r"""Evaluate qUpperConfidenceBound on the candidate set `X`.
Args:
X: A `(b) x q x d`-dim Tensor of `(b)` t-batches with `q` `d`-dim
design points each.
Returns:
A `(b)`-dim Tensor of Upper Confidence Bound values at the given
design points `X`.
"""
posterior = self.model.posterior(X)
samples = self.sampler(posterior)
obj = self.objective(samples)
mean = obj.mean(dim=0)
ucb_samples = mean + self.beta_prime * (obj - mean).abs()
return ucb_samples.max(dim=-1)[0].mean(dim=0)
| 39.197561 | 86 | 0.62759 | [
"MIT"
] | BradyBromley/botorch | botorch/acquisition/monte_carlo.py | 16,071 | Python |
from .fhirbase import fhirbase
class Annotation(fhirbase):
"""
A text note which also contains information about who made the
statement and when.
Args:
authorReference: The individual responsible for making the annotation.
authorString: The individual responsible for making the annotation.
time: Indicates when this particular annotation was made.
text: The text of the annotation.
"""
__name__ = 'Annotation'
def __init__(self, dict_values=None):
self.authorReference = None
# reference to Reference: identifier
self.authorString = None
# type: str
self.time = None
# type: str
self.text = None
# type: str
self.object_id = None
# unique identifier for object class
if dict_values:
self.set_attributes(dict_values)
def get_relationships(self):
return [
{'parent_entity': 'Reference',
'parent_variable': 'identifier',
'child_entity': 'Annotation',
'child_variable': 'authorReference'},
]
| 25.2 | 78 | 0.614638 | [
"MIT"
] | D3-AI/Cardea | cardea/fhir/Annotation.py | 1,134 | Python |
import abc
import json
import logging
import os
from typing import Any, Tuple, Union
import numpy as np
import tifffile
import tiledb
import zarr
from slaid.commons import Mask, BasicSlide
from slaid.commons.base import Polygon
from slaid.commons.ecvl import BasicSlide as EcvlSlide
logger = logging.getLogger(__file__)
class Renderer(abc.ABC):
@abc.abstractmethod
def render(
self,
array: np.ndarray,
filename: str,
):
pass
class TiffRenderer(Renderer):
def __init__(self,
tile_size: Tuple[int, int] = (256, 256),
rgb: bool = True,
bigtiff=True):
self.tile_size = tile_size
self.channels = 4 if rgb else 2
self.rgb = rgb
self.bigtiff = bigtiff
def _tiles(self, data: np.ndarray) -> np.ndarray:
for y in range(0, data.shape[0], self.tile_size[0]):
for x in range(0, data.shape[1], self.tile_size[1]):
tile = data[y:y + self.tile_size[0], x:x + self.tile_size[1]]
if tile.shape[:2] != self.tile_size:
pad = (
(0, self.tile_size[0] - tile.shape[0]),
(0, self.tile_size[1] - tile.shape[1]),
)
tile = np.pad(tile, pad, 'constant')
final_tile = np.zeros(
(tile.shape[0], tile.shape[1], self.channels),
dtype='uint8')
final_tile[:, :, 0] = tile * 255
final_tile[final_tile[:, :, 0] > 255 / 10,
self.channels - 1] = 255
yield final_tile
def render(self, array: np.ndarray, filename: str):
with tifffile.TiffWriter(filename, bigtiff=self.bigtiff) as tif:
tif.save(self._tiles(array),
dtype='uint8',
shape=(array.shape[0], array.shape[1], self.channels),
tile=self.tile_size,
photometric='rgb' if self.rgb else 'minisblack',
extrasamples=('ASSOCALPHA', ))
class BaseJSONEncoder(abc.ABC):
@abc.abstractproperty
def target(self):
pass
def encode(self, obj: Any):
pass
class NumpyArrayJSONEncoder(BaseJSONEncoder):
@property
def target(self):
return np.ndarray
def encode(self, array: np.ndarray):
return array.tolist()
class PolygonJSONEncoder(BaseJSONEncoder):
@property
def target(self):
return Polygon
def encode(self, obj: Polygon):
return obj.coords
class Int64JSONEncoder(BaseJSONEncoder):
@property
def target(self):
return np.int64
def encode(self, int_: np.int64):
return int(int_)
# from https://github.com/hmallen/numpyencoder
def convert_numpy_types(obj):
if isinstance(obj, (np.int_, np.intc, np.intp, np.int8, np.int16, np.int32,
np.int64, np.uint8, np.uint16, np.uint32, np.uint64)):
return int(obj)
elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
return float(obj)
elif isinstance(obj, (np.complex_, np.complex64, np.complex128)):
return {'real': obj.real, 'imag': obj.imag}
elif isinstance(obj, (np.ndarray, )):
return obj.tolist()
elif isinstance(obj, (np.bool_)):
return bool(obj)
elif isinstance(obj, (np.void)):
return None
return obj
class JSONEncoder(json.JSONEncoder):
encoders = [NumpyArrayJSONEncoder(), PolygonJSONEncoder()]
def default(self, obj):
encoded = None
for encoder in self.encoders:
if isinstance(obj, encoder.target):
encoded = encoder.encode(obj)
break
if encoded is None:
encoded = super().default(obj)
return encoded
class VectorialRenderer(Renderer):
def render(self,
slide: BasicSlide,
filename: str,
one_file_per_patch: bool = False):
if one_file_per_patch:
raise NotImplementedError()
with open(filename, 'w') as json_file:
json.dump(slide.patches, json_file, cls=JSONEncoder)
def to_json(obj: Any, filename: str = None) -> Union[str, None]:
if filename is not None:
with open(filename, 'w') as f:
json.dump(obj, f, cls=JSONEncoder)
else:
return json.dumps(obj, cls=JSONEncoder)
| 28.062893 | 79 | 0.575751 | [
"MIT"
] | mdrio/slaid | slaid/renderers.py | 4,462 | Python |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.